From 57545d5506a9233548f02d951549e246a73d6fda Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Wed, 3 Apr 2024 22:01:01 +0900 Subject: [PATCH 01/19] Implement Polygon Hermez type2 SMT (#34) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Working with dummy hash * Use keccak * Add account * Clippy * Support storage trie * Use remaining key and clean * Write pointer to leaf data in serialization * Minor * Comments * Cleaning * Cleaning * readme * Fix plonky2 dep * Fix dep * minor * Remove dbg * Squashed 'evm_arithmetization/' changes from d547f8d5..5ec32fcb 5ec32fcb FMT c1728d4e Merge remote-tracking branch 'mir/main' into tmp-type2/hermez 01bb7974 Add Poseidon constraints e695ae18 Tests passing 207c54df Poseidon STARK 02d6435f Poseidon files 710225c9 Simulate jumpdest data with the interpreter (#1489) 3ec1bfdd Update `starky` and leverage it as dependency for `plonky2_evm` (#1503) f0ffb811 Fix tests 9f68d71d Fix tests 5a385392 Fix interpreter b6fec06c Fix nightly build (ahash issue) (#1524) b600142c Cleanup `alloc` / `std` imports for plonky2 (#1518) 6b39fc90 Remove risk of panics in interpreter (#1519) d4213223 Fix dep 756d3a27 Clippy e46112e3 Cleaning 061e70ce All tests passing d2598bde Revert "Remove StarkProofWithMetadata (#1497)" (#1502) 4ea4807c Passing erc20 test 1a08e783 Fix no-std tests and add corresponding jobs in the CI (#1501) 06444eaa Switch permutation argument for logUp in `starky` (#1496) fb8452de Merge pull request #1500 from topos-protocol/ci 246c2b62 Fix workflow 8f919133 Fix nightly version 212f29cf Add missing constraints mentioned by auditors (#1499) af0259c5 Remove StarkProofWithMetadata (#1497) f3f7433c Some cleanup (#1498) 63579636 Reorganize lookup / ctl modules (#1495) e502a0df Make CTLs more generic (#1493) 1dc22b76 Fix BaseSumGenerator and BaseSplitGenerator Ids (#1494) f76245e2 Cleanup imports (#1492) a9060e61 Add LA audit report 36e62a13 Use usize::BITS and wrapping_shr in reverse_index_bits_in_place_small (#1478) 14bb5bdb Use web_time crate instead of std::time (#1481) eff7cc0f Fix circuit sizes (#1484) ca2e56e2 Fix bugs in jumpdest analysis (#1474) c0700065 Improve `BIGNUM` operations (#1482) acc59c35 Speed-up `bn254` pairing operation (#1476) ae0907ff Merge pull request #1462 from 0xPolygonZero/pg/evm-licensing 8cb80e09 Improve `blake2f` call (#1477) 319fc6a2 Improve SHA2 precompile (#1480) 8e1969db Fix interpreter jumps (#1471) b8a16b39 Fix typos (#1479) 265d46a9 chore: update ci workflow (#1475) 39a2d62d Fix touched_addresses removal (#1473) 10fc9660 Remove some more CPU cycles (#1472) 990eb34d Remove some CPU cycles (#1469) 30b47998 Fix simulation for jumpdest analysis (#1467) 5c1ec524 proofreading (#1466) c4319dce fix: make add_generators public (#1463) bb48cabd Add math rendering with Katex (#1459) 70483050 Fix fill_gaps (#1465) c2a73ad8 Fix clippy (#1464) 219365d6 Packed rlp prover inputs (#1460) ccd4ff87 Update Cargo.toml de2709d8 Update README.md 7f5fae84 Add files via upload b119e96f Merge pull request #1461 from 0xPolygonZero/eth_trie_utils_bug_fix ab05d181 Bumped `eth_trie_utils` f80ebe77 Remove full memory channel (#1450) a78a29a6 Merge pull request #1423 from topos-protocol/jumpdest_nd b05e84dd Interpreter GenerationInputs (#1455) 3b1ed824 Merge pull request #1447 from topos-protocol/plonky2_doc cda30847 Apply review ac9f704f Fix comment f9c3ad66 Update empty_txn_list fdedf3e3 Merge remote-tracking branch 'public/main' into jumpdest_nd bead1d60 Adress review comments 233ddd4e Constrain syscall/exceptions filter to be boolean (#1458) 6ef0a3c7 Apply suggestions from code review 606732a8 Free up some CPU cycles (#1457) 99a1eb5c Missing review comments ae4a720a Address comments 92aaa404 Apply suggestions from code review 0bf9cd2f Use current context in ecrecover (#1456) c329b368 chore(evm,field,plonky2):fix typos (#1454) 85524cfa Intra doc link 33def084 Merge branch 'main' into plonky2_doc aedfe5df Implement CTL bundling (#1439) c8430dac Add Boolean constraints for `ArithmeticStark` (#1453) 54a13135 Improve some calls to `%mstore_rlp` (#1452) 1715573c Fix problems after address bundling 95c83add Merge pull request #1399 from topos-protocol/refactor_encode_funcs 5b71eb4e Address review comments 3c699be7 Merge remote-tracking branch 'public/main' into refactor_encode_funcs 22e267c3 Address bundling bd02117c Fix `after_mpt_delete_extension_branch` (#1449) f4be34dc Some more dcbfef6d chore: fix typos (#1451) 82804e42 Add some more + module doc 80917cbe Merge remote-tracking branch 'public/main' into refactor_encode_funcs 7fc6b86d Minor 77f51095 Adress reviewer comments cb19f219 Add crate-level documentation (#1444) 47b42856 Remove unused macro 2dacbfe2 Address bundling (#1426) 3e61f06a Remove gas check in sys_stop (#1448) ed2e1bc7 Add comment 3c8b150f Rustdoc 20db596e Add some more explicit doc on plonky2 crate 18a14bf2 Remove assertion 897ba585 Remove assertion in packed verif 1c994737 Address comments f4713c44 Apply suggestions from code review 247d655b Minor 8f1efa15 Fix minor error ab4508fc Add packed verification f46cf4ef Prevent some lints from being allowed (#1443) 6cf4df7d Add initial constraint z polynomial (#1440) ef07eabf Pacify latest clippy (#1442) 1a95f7aa Clippy 9c573a07 Restore simple_transfer and Clippy a85f9872 Fix bug in jumpdest proof generation and check that jumpdest addr < code_len 48b9769e Remove duplicated label a8340496 Rebase to main ae3003a9 Add alternative method to prove txs without pre-loaded table circuits (#1438) c3d707c1 Constrain partial_channel (#1436) dfcf276d Refactor encode_empty_node and encode_branch_node cb3f91a0 add Debug trait to PartitionWitness to enable trace information output (#1437) 24ae0d9d Clippy 3e78865d Remove aborts for invalid jumps and Rebase 0ae56db0 Reabse to main 11d668f5 Remove aborts for invalid jumps 4e569484 Improve proof generation c4025063 Clippy aaa38b33 Fix fmt 08982498 Remove U256::as_u8 in comment 5acabad7 Eliminate nested simulations 0bec6278 Apply suggestions from code review ed260980 Fix jumpdest analisys test ff3dc2e5 Refactor run_next_jumpdest_table_proof 9e39d88a Rebase to main 6ababc96 Remove aborts for invalid jumps 7eff4e27 Constrain first offset of a segment (#1397) 829ae64f Improve proof generation a291d92c Merge pull request #1392 from 0xPolygonZero/dp-from_values-take-ref 7cb04884 Minor cleanup (#1435) 096c7456 Constrain new top to loaded value in MLOAD_GENERAL (#1434) 18e08f4f Filter range checks (#1433) f67ee258 Add exceptions handling to the interpreter (#1393) 536cd1c8 Regenerate tries upon Kernel failure during `hash_final_tries` (#1424) ee91b67c Merge pull request #1432 from 0xPolygonZero/discord_badge 0b56ab75 Added a Discord badge to `README.md` f8f6b07a Change context to current context for BN precompiles (#1428) 68b9f0ad Add ERC721 test (#1425) a64311cf Add aborting signal (#1429) 77f1cd34 Clippy 5a0c1ad8 Fix fmt ad8c2df8 Remove U256::as_u8 in comment 81f13f3f Eliminate nested simulations fdd7ee46 fix: make `from_noncanonical_biguint` work for zero (#1427) 2c5347c4 Apply suggestions from code review 71dff6e9 Constrain MSTORE_32BYTES new offset limbs (#1415) 746e1344 Fix jumpdest analisys test f76ab777 Refactor run_next_jumpdest_table_proof 3e8ad086 Rebase to main bc1a3c48 Merge `push` and `prover_input` flags (#1417) 51ff8c5b Merge pull request #1420 from dzizazda/main 837434cf Fix a minor typo in evm/spec/cpulogic.tex 4e4e61c2 typo fix 00ed16fc minor typo fix 724437d0 typo fix 942e43ab typo fix bfcfcdb4 Add `Checkpoint` heights (#1418) 5607faf3 Check that limbs after the length are 0 (#1419) 3195c205 Merge MSTORE_32BYTES and MLOAD_32BYTES columns (#1414) 56e83956 Merge pull request #1416 from AdventureSeeker987/main 43ecf1df chore: fix some comment typos 7efd147e Use mstore_32bytes to optimize decode_int_given_len (#1413) edfc86c3 Remove is_keccak_sponge (#1410) 170ce5f2 Preinitialize all code segments (#1409) a90aa40b Implement MPT preinitialization (#1406) 4ba7718e Optimize asserts (#1411) 47e24306 Remove GenerationOutputs (#1408) 46b6aa10 Implement degree 2 filters (#1404) 2d36559d Make some functions const (#1407) 7ac6bf2c Implement `PublicValues` retrieval from public inputs (#1405) 6c3e3c0e Use logUp for CTLs (#1398) cb2a22a5 Update stack op cost (#1402) d28ba240 Pacify clippy (#1403) d682769b Fix set_context constraints (#1401) 2d0df393 Merge pull request #1391 from succinctlabs/chris/recursion 32d00967 Fix kernel codehash discrepancy (#1400) 5572da30 Remove intermediary block bloom filters (#1395) 30c944f7 Remove bootstrapping (#1390) e68195fc chore: Remove TODOs about `from_values` taking a reference 37918ccc Revert "chore: from_values takes ref" 471ff68d Optimize `num_bytes` and `hex_prefix_rlp` (#1384) 7cc123e0 chore: from_values takes ref b598e6ff VerifierCircuitData Clone,PartialEq,Eq 64cc1000 Move stack_len_bounds_aux to general columns (#1360) 96f3faf2 Changes in interpreter and implement interpreter version for add11 (#1359) 6dd2e313 Add upgradeability to `AllRecursiveCircuits` and output verifier data (#1387) 3440ba94 Remove extra rows in BytePackingStark (#1388) 2a6065b4 comment ab70bc53 Fix run_syscall in interpreter. (#1351) 2039e18f Fix genesis block number in `prove_block` (#1382) acd3b1ad Merge pull request #1383 from topos-protocol/mpt_specs 110a8eff Update evm/spec/mpts.tex ee450d6d Remove redundant sect about MPT 36e31c53 Address comment 08e0afe4 Fix typo in evm/spec/mpts.tex c7951fcc Update evm/spec/mpts.tex 12b522cb Update evm/spec/mpts.tex 98bed16a Update evm/spec/mpts.tex 0c0818c7 Update evm/spec/mpts.tex c6178a45 Update evm/spec/mpts.tex a3b5e13e Update evm/spec/mpts.tex 3af316f3 Add MPT specs d4b05f37 Add specs for stack handling (#1381) a7364586 Backporting gas handling to the specs (#1379) 2d5a84a1 Add specs for the CPU table (#1375) bec1073c Explain difference between simple opcodes and syscalls (#1378) 48e2b24b Add range check constraints for the looked table (#1380) 8d473168 Add specs for BytePackingStark (#1373) fe311c7f Check is_kernel_mode when halting (#1369) 98b5e5be Initialize blockhashes (#1370) 06933b1d Starting the specs for the CPU logic (#1377) 1f65a8a8 Add exceptions to specs (#1372) 0c4d9a8e CTL and range-check documentation (#1368) f1be8409 Update README.md (#1371) 7a50292a Update README.md f2b75fa5 Update README.md edeb2c76 Merge pull request #1367 from 0xPolygonZero/avm-readme 29762c85 Update README.md d98305b3 Create README.md 398b15c1 wip 79c6de14 Update Keccak-f specs. (#1365) b44fc0d6 Add specs for KeccakSponge (#1366) b9328815 Reduce visibility (#1364) ddecf8bd Update specs for Logic and Arithmetic Tables (#1363) f879d925 Add doc for privileged instructions (#1355) 2c951de4 Update Memory in specs (#1362) 24aa9668 Revert "Make gas fit in 2 limbs (#1261)" (#1361) 40d3c6dd Merge pull request #1294 from matthiasgoergens/matthias/make-clippy-happy eda7fd65 Constrain is_keccak_sponge (#1357) 4dc42c51 Merge public values inside prove_aggreg (#1358) 0e63e661 Implement out of gas exception (#1328) 01f229a8 Add push constraints (#1352) a0876d73 Refactor JUMPDEST analysis (#1347) 6f2b3349 Fix parsing of non-legacy receipts (#1356) f7d5e7c4 Fix MSTORE_32BYTES in interpreter (#1354) 6d751b13 Remove values of last memory channel (#1291) 75ae0eb5 Merge pull request #1346 from 0xPolygonZero/dp-unwrap-hunting 88fcc329 Reduce visibility for a bunch of structs and methods in EVM crate (#1289) 5800e6ad Add run_syscall and tests for sload and sstore (#1344) cc0cdd07 Remove unnecessary code duplication (#1349) 3810bd1a Charge gas for native instructions in interpreter (#1348) 5c41dc4d Range-check keccak sponge inputs to bytes (#1342) d2b5882a Root out some unwraps ec41b754 Fix ranges in AllRecursiveCircuits initialization for log_opcode aggregation test (#1345) 01bbf1a0 Constrain clock (#1343) 954d1a77 Remove logic for multiple txns at once (#1341) 5d5628b5 Move empty_check inside final iteration 605932d1 restore `no-std` support (#1335) e41435e9 Add memory checks for prover_input, as well as range_checks for prover_input, syscalls/exceptions (#1168) fa93454c Add withdrawals (#1322) 19178072 Remove `len` column in `KeccakSpongeStark` (#1334) 099994ab Add test for ERC20 transfer (#1331) d8f6de25 Merge pull request #1329 from shuoer86/main d941539b Fix typos in comments a0ea26f4 Fix typos in comments d2b549af Constrain uninitialized memory to 0 (#1318) f71f227d Add test for selfdestruct (#1321) 1e8ed78f Merge pull request #1320 from topos-protocol/fix-combine-keccak-jumpdest 85b38bec Fix merging of jumpdest and keccak_general. 41362075 Combine JUMPDEST and KECCAK_GENERAL flags. (#1259) 20501d9b Add context constraints (#1260) 3ca16620 Merge pull request #1317 from topos-protocol/more_memcpy_bytes 6332900d Combine PUSH0 and PC flags. (#1256) 0d97b93a Add some documentation in EVM crate (#1295) afd357f4 More of memcpy_bytes 0f299d4c Merge pull request #1316 from topos-protocol/memcpy_tiny af4935cd Merge NOT and POP flags. (#1257) 4b40bc03 Remerge context flags (#1292) c96a8700 Move empty check inside final iteration 0258ad4a Merge pull request #1314 from topos-protocol/refactor_wcopy 6f52b76d Review 7447959f Fix kexit_info in test 666a155d Remove new_stack_top_channel from StackBehavior (#1296) 15a9e992 Fix test on interpreter side 75fdd342 Fix calldatacopy ade5b8c3 Fix a9e47afc Refactor codecopy a1b178f6 Refactor memcpy 3feb04d2 Refactor wcopy syscalls 3aeec83a Add missing constraints for DUP/SWAP (#1310) 44af80f2 Merge pull request #1309 from topos-protocol/fix_padding_in_wcopy c1c1ab6d Fix wcopy and extcodecopy for ranges over code limit ed5ec3ca Merge pull request #1307 from topos-protocol/fmt b31f7061 Apply rustfmt with latest nightly b212fff7 Merge pull request #1306 from topos-protocol/fix_empty_last_chunk 8326db60 refactor: remove usage of unstable `generic_const_exprs` in starky (#1300) 60811d08 Also for memset 83054b0f Handle empty case for memcpy_bytes 8af189b9 Merge pull request #1305 from topos-protocol/memset 385ab3c6 Remove redundant d185d30e Speed-up memset and fix it to write 0 values 0300a322 Merge pull request #1304 from topos-protocol/memcpy_bytes 4140eb54 Fix 503a31b6 Reviews e2b66206 Merge pull request #1302 from topos-protocol/remove_kernel_memory_zeroing 9607a41b Merge pull request #1303 from topos-protocol/amortize_receipt_reset 595dfa6b Merge pull request #1301 from topos-protocol/submod_kernel ba61d15c Add macro for copying sequences of bytes 6d2586ef Amortize bloom reset 40de5059 Remove outdated code 07ffe4f5 Combine stack macros for fewer operations 6ca9970a Alter stack to remove SWAPs for SUBMOD 29005dc5 Use SUBMOD in Kernel 05006deb Pad according to RATE rather than WIDTH (#1299) 49976ea2 Check gas in sys_stop (#1297) 817e3e78 Combine DUP and SWAP (#1254) 29fdd3e3 minor: use explicit builder.assert_zero for readability (#1293) c9391be0 Update check_ctls with extra looking values (#1290) f9242702 Make clippy happy d89966b0 Merge pull request #1288 from 0xPolygonZero/git_dep_org_update a02a2ecb Updated `mir-protocol` --> `0xPolygonZero` 1d604319 Store top of the stack in memory channel 0 (#1215) 762e6f07 Fix hash node case in `mpt_delete_branch` (#1278) d7990ee1 Add journal entry for logs (#1286) 9fd0425f Fix journal order in `sys_selfdestruct` (#1287) 49ca63ee Fix sys_blockhash (#1285) 2aeecc3d Fix failed receipt. (#1284) b4203c3d Make sure success is 0 in contract failure (#1283) 41a29f06 Remove some dead_code in EVM crate (#1281) 8a5eed9d Fix shift constraint (#1280) e58d7795 Remove reg_preimage columns in KeccakStark (#1279) 0de6f949 Remove extra SHL/SHR CTL. (#1270) 51eb7c0b Merge pull request #1276 from topos-protocol/fix_empty_to_encoding 571dc14f Fix encoding for empty recipient 3ac0c4ae Fix genesis state trie root when calling `prove_root` (#1271) cd36e96c Derive clone for txn RLP structs (#1264) 0f19cd0d Make gas fit in 2 limbs (#1261) 4e2cba56 Merge pull request #1262 from topos-protocol/fix_rc_doc 8afd06cf Fix description of Range-Check columns in STARK modules 30005957 Optimize lookup builder (#1258) 1ff6d4a2 Merge pull request #1235 from topos-protocol/new-logup f49fbc8e Transactions trie support (#1232) acc659da Add type 1 and 2 txn for RLP encoding support (#1255) 916ce0dd Merge pull request #1228 from topos-protocol/constrain-genesis-state 5694af79 Merge remote-tracking branch 'mir-plonky2/main' into constrain-genesis-state 75c0e47a Apply comments. 03a95581 Handle additional panics (#1250) 72241ca7 Connect block_gas_used (#1253) a24cd4f3 Merge pull request #1251 from topos-protocol/fix-observe-challenges 043d12c2 Fix observe_block_metadata 8c78271f Add `random` value to block metadata and fix `sys_prevrandao` (#1207) bbc6fe76 Merge branch 'main' into 'new-logup' 3983969c Use function for genesis block connection. 9d0101d6 Merge branch 'main' into 'constrain-genesis-state' 0abc3b92 Apply comments (#1248) d6be2b98 Remove `generic_const_exprs` feature from EVM crate (#1246) 70d6dd97 Merge branch 'main' into new-logup f438d45f Merge branch 'main' into 'new-logup'. 6618cfad Remove SEQUENCE_LEN in BytePackingStark (#1241) 1b7207ee Merge pull request #1244 from topos-protocol/block_metadata_doc 459e9b3d Merge pull request #1245 from topos-protocol/indexing_tables d8874c83 Update ranges indices c468465c Merge pull request #1243 from tamirhemo/main edd3f383 Add some doc for BlockMetadata / ExtraBlockData 8a19f436 Merge pull request #1242 from topos-protocol/fix_multi_row_ctl 8839285f add trait bound a44379b5 fmt and clippy 7d7f01da refactor prove method 4eb6a3b5 Fix eval_table ca441872 Merge branch 'main' into new-logup b60a3d4b Merge pull request #1239 from topos-protocol/mload_mstore_with_packing 053553d4 Reuse new packing instructions for MLOAD and MSTORE 696377ba Merge pull request #1231 from topos-protocol/error_vs_panic b5c28bd6 Rename utility methods for U256 conversion f07351fc Merge pull request #1238 from topos-protocol/cleanup f3ea95ca Merge branch 'main' into error_vs_panic c4be838a Typo d1c00767 Cleanup lookup_test module and reduce module visibility 15064b3a Merge pull request #1229 from topos-protocol/next_row_ctls 1a4caaa0 Move next row logic inside Column ffa71787 Merge remote-tracking branch 'mir/main' into new-logup a9b7b5a6 Revert "Remove where clauses: [(); CpuStark::::COLUMNS]" 8903aec1 Change padding rule for CPU (#1234) 865f185b Merge branch 'main' of github.com:mir-protocol/plonky2 into new-logup 1afcafad Merge pull request #1236 from topos-protocol/ci 5db6abf0 Update clippy in CI 4f0330ad Update clippy in CI ec9e6196 Fix range 66f935a7 Remove where clauses: [(); CpuStark::::COLUMNS] 91000591 Merge branch 'main' of github.com:mir-protocol/plonky2 into new-logup 9697c906 Clippy 7dc2a774 Cleanup c5af894e Add assert with char(F). Cleanup. Fix recursive challenges. 9ab8a118 Remove one helper function 17f661f9 Fix BytePacking range-check. Fix lookup challenges c9c0f8b7 Use CTL challenges for logUP + change comments + add assert f65ad58a Implement logUp d4a8026b Combine mstore_general and mload_general into one flag (#1188) 27d9113f Merge branch 'main' into next_row_ctls 0b5ac312 Merge pull request #1203 from topos-protocol/constrain_nv_stack_len 19220b21 Remove redundant Keccak sponge cols (#1233) 06bc73f7 Combine arithmetic flags on the CPU side (#1187) 61a1c246 Fix CTLs c27fc96a Merge branch 'main' into next_row_ctls f944a08b Fix self_balance_gas_cost and basic_smart_contract. (#1227) 7ebbb47f Swap ordering in stack macro (#1230) 5a1b05ac Remove risks of panic 9508b490 Move byte packing / unpacking to a distinct table (#1212) 3c4f938f Make next row available to CTLs 4d7d9ffa Constrain genesis block's state trie. d1c395ef Merge pull request #1202 from mir-protocol/keccak-preimage 3571f097 Merge pull request #1224 from mir-protocol/latest-nightly 9a8a769d more clippy suggestions 55d05147 clippy suggestions a4e6c6ae clippy suggestions 7415810f clippy suggestions 8af3b0fe clippy suggestions ed8bcf9d clippy suggestions 1dd77d6d fmt e947a624 suppress incorrect Clippy error 967f7b12 latest nightly in CI and rust-toolchain 65917f5f Merge pull request #1222 from mir-protocol/internal_crate_path_stablization 2f1ed951 Merge pull request #1220 from mir-protocol/latest_nightly_fix 90ea0318 Merge pull request #1223 from succinctlabs/uma/change-witness-visibility faa70e07 Merge pull request #1219 from succinctlabs/uma/add-mock-feature-flag a184f09b Made visibilities outside of crate to allow for forking partial witness gen outside of crate 1be1ca4d clippy a6433071 Fixes 5936c67f Now refers to sub-crates using paths (and removed `patch` section) 180c2094 Merge pull request #1208 from topos-protocol/blockhash_opcode 71b2ece1 Merge pull request #1216 from topos-protocol/checkpoint_lengths 0b7c4082 Merge pull request #1218 from topos-protocol/keccak_col 05e9fc0b Apply Nick's comment d0379e94 Apply Nick's comment 4716fe7d Also included clippy fixes introduced by new nightly 6d3d2cb2 Now builds on the latest nightly 5a3c8b26 clippy 0ca796e1 Removed mock feature flag and added mock_build 170f7d83 Fix Clippy 9a06fc9b Fix memop reads, from_prover_inputs and cleanup. ddf2b817 Clippy 1c01d682 Fix overflow check and test. Remove [..8] when using h256_limbs. c30b1834 Change h256_ulimbs 4e0fe74a Apply comments 42f70380 Add blockhash sys opcode 4782519d remove spurious 18d31412 Added mock feature flag and test 258b075f Remove filter column for KeccakStark e6ca4606 Merge pull request #1214 from jtguibas/jtguibas/serde-target 5690b951 Merge pull request #1217 from topos-protocol/cleanup_duplicates fa9aae1f Remove duplicate code 6207f446 Merge pull request #1206 from topos-protocol/missing-public-value-links 8dcb29e5 Display actual trace lengths instead of number of ops 800603d6 feat: serde for targets a7096546 Merge pull request #1209 from topos-protocol/receipts-all-types d4b71c56 Replace genesis state trie check with TODO 6bd17e29 Apply comments dd3b61a3 Merge pull request #1211 from mir-protocol/comment-fix ac89c7cd Fix comment in `proof.rs` bf21b278 Apply comments 9ba2b895 Implement receipts of types 1 and 2 b0764436 Add missing links between public values 8beba569 Constrain next row's stack length ea03e418 Keccak STARK: constraint preimage to equal A on first round 760f09a8 Merge pull request #1201 from shuklaayush/fix/keccak-stark-reg-preimage 301aedf0 fix: constrain higher bits of reg_preimage a0b2b489 Merge pull request #1200 from topos-protocol/fix_empty_txn_list 71967147 Update range from ReceiptTrie PR 6e7fcc9e Merge pull request #1199 from jtguibas/john/make-generate-partial-witness-pub d3f33bae make generate partial_witness pub 62f271a8 Merge pull request #1198 from mir-protocol/public_values_serde 975fd451 Made `PublicValues` serializable 86fb6aa0 Merge pull request #1097 from topos-protocol/receipts_and_logs 6a2e2423 Clippy caae038c Cleanup 5b962f3c Change receipts_trie in basic_smart_contract and self_balance_gas_cost ad9796cb Fix tests and address comments 925cdd53 Cleanup c0b4f155 Implement receipts and logs 44115de7 Merge pull request #1174 from topos-protocol/merge-context-flags a881c70e Merge pull request #1191 from mir-protocol/eth_trie_utils_patch 18ca89f0 Patched plonky2 to use a patch for eth_trie_utils 4e5f6e7e Apply comment 10bbda03 Remove unnecessary changes in the Operation enum c3cb2278 Combine get_context and set_context into one flag 74212a29 Merge pull request #1192 from topos-protocol/misc_constraints f6f9fa31 Merge pull request #1190 from topos-protocol/mpt-remove-cow 06e20f87 Apply comment a94d9282 Merge pull request #1194 from topos-protocol/block_basefee 8476fdcd Refactor 9a450068 Update BlockBaseFee to fit in 2 limbs c138f2d6 Merge pull request #1193 from topos-protocol/observe_pv 68bb4967 Update tests to have a blockgaslimit fitting u32s 976d7521 Observe public values 0b78c43f Remove filtering in membus 91e8d52d Reduce overconstraining in decode module b711e527 Combine a few constraints d96c6491 Merge pull request #1165 from topos-protocol/ci-test d70d67fa Remove copy on write for mpt_insert and mpt_delete 1997bf24 Implement inverse from Fermat little theorem (#1176) eb7bb461 Merge pull request #1189 from topos-protocol/remove_is_bootstrap_kernel_flag 49d92cb8 Remove is_bootstrap_kernel column 683501cc Merge pull request #1183 from topos-protocol/remove_is_cpu_cycle_flag 815a02ab Remove is_cpu_cycle 89e62e55 Use Keccak config in simple tests 7b07229b Add guidance for external contributors to README.md 830fdf53 Merge pull request #1184 from topos-protocol/combine_jump_flags 12f379f9 Combine jump flags 470788d0 Merge pull request #1185 from topos-protocol/combine_simple_logic_flags 7cdb6baf Merge pull request #1177 from topos-protocol/alloc 7829dccf Combine EQ and ISZERO flags dc7e0aa7 Merge pull request #1181 from topos-protocol/combine_logic_flags 654f7cac Comment e10eaad0 Combine all logic flags together 437f57a8 Fix logic CTL 5100e032 Revert changes in cyclic_subgroup_unknown_order 8541a04b Apply Nicholas comment 56ebda49 Address review 12a687d3 Reduce reallocations ee9ce4c5 Combine AND and OR flags in CpuStark 6f98fd76 Merge pull request #1147 from metacraft-labs/gate_make_public_parameter 5f4b15af Connect SHL/SHR operations to the Arithmetic table (#1166) df07ae09 Write trie roots to memory before kernel bootstrapping (#1172) c9eed2bb Connect public values in aggregation circuit (#1169) 397ee266 Merge pull request #1171 from topos-protocol/exception-flag 017e6217 Set exception flag to 1. e6407089 Error instead of panicking for missing preprocessed circuits (#1159) b2626fdc Merge pull request #1162 from topos-protocol/cleanup_attributes 9eeb69f0 Merge pull request #1105 from topos-protocol/poseidon_warning bf1ed783 Merge pull request #1161 from topos-protocol/fix_recursive_ctl c9bd32d5 Fix trait import. (#1163) 9f8c1522 Remove unused attributes 4a762e33 Merge pull request #1160 from topos-protocol/keccak_general 5b9e8d85 Merge branch 'main' into poseidon_warning bd3834c4 Silence Poseidon warnings for ARM targets 5316f890 Clippy 8365608b Convert to u32 instead of u64 c93f9d5f Fix endianness in benefiary limbs bca3e09b Reuse set_public_value_targets f01098a7 Constrain keccak general 9e0719e6 Better document constraints on addcy carries (#1139) d8e314bc Merge pull request #1155 from 0xmozak/matthias/generalise-transpose 8c6e8d63 Merge pull request #1158 from mir-protocol/jacqui/gas-check-spec c52ed29e Gas handling brain dump eebf7eb0 Merge pull request #1157 from mir-protocol/update-versions b414b8e9 fmt f574effe make imports conditional on config 84321955 update versions in cross-crate references 3a556029 update versions for crates.io updates 7537193d Generalise transpose 5b8740a7 Merge pull request #1026 from topos-protocol/memory-ctl-verifier-bus 3b21b87d Merge pull request #1151 from mir-protocol/jacqui/dead-memtable-cols 7a882d0a Clippy 6253a68e Change public values into public inputs 59b73c84 Apply comments 1590c1d0 Fix indices in CTL functions f97deab8 Remove non-passing debug assert 06037f81 Fix the memory CTL and implement the verifier memory bus b3f00d4a Merge pull request #1146 from topos-protocol/overlap-cpu-syscalls 831fe862 Cut 5 Columns From The Memory Table With This One Weird Trick! bfd6b988 Merge pull request #1148 from topos-protocol/lookup_check ee9cd80c Change arg to non-mutable reference 0276446e Add additional lookup unit tests dc70902f Remove always true condition bc246780 Fix name in ID 6ca3f1a9 Make GateRef value public bfa7ab36 Merge pull request #1111 from topos-protocol/lookup_serial 16227f90 Merge syscall and exceptions constraints. 0f52c889 Merge pull request #1145 from mir-protocol/npwardberkeley-patch-1 535fb7d8 Update prover.rs e047676e Merge pull request #1114 from onsen-egg/onsen-egg/lookup-opt a67cfdcb Precompute RE poly evals for challenges 03d90f30 Faster multiplicity counting for lookup tables 167518ed Merge pull request #1143 from succinctlabs/build_issue_on_mac_M2 6a772879 Fix negative quotient issue (#1140) 25678f46 Merge pull request #1144 from mir-protocol/build-in-subdirectories f3e87ec4 CI: build in subdirectories 8a86e195 fix: add itertools/use_std feature flag for [std] 2d8c02bf Merge pull request #1138 from 0xmozak/bing/dep-serde-rc 7ba051f4 Fix failing byte constraint (#1135) 152e3959 Merge pull request #1137 from topos-protocol/fix-kernel-panic e28b484a deps(serde): use rc 1af1afcf Change current context in bignum_modmul cf278eac Merge pull request #1136 from topos-protocol/div_by_zero f116c855 Fix risk of division by zero b27389df Merge pull request #1134 from topos-protocol/avx_tests 04657d24 Fix import 1d6ca589 Add LUT hash to remove CircuitBuilder overhead 4893a860 Merge pull request #1116 from topos-protocol/recursive_ranges 00579850 Merge pull request #1132 from mir-protocol/dependabot/cargo/itertools-0.11.0 8b35fefb Rename cd to common_data for consistency Cf review cbb3da15 Reduce number of lookup accesses b32345cd Update lookup serialization c8020126 Provide methods for ProverOnlyCircuitData serialization 544aff27 Also provide CommonCircuitData in serialization of gates and generators 47781e47 Add CommonCircuitData to gates deserialization method b43d6c1d Add CommonCircuitData to generators deserialization method d684ee2d Switch Field type of generators to be F: RichField + Extendable 5d513207 Update itertools requirement from 0.10.3 to 0.11.0 4400757f Merge pull request #1128 from mir-protocol/dependabot/cargo/hex-literal-0.4.1 dc170915 Merge pull request #1131 from mir-protocol/dependabot/cargo/criterion-0.5.1 c202f4bc Merge pull request #1129 from mir-protocol/dependabot/cargo/ahash-0.8.3 0f284ca6 Merge pull request #1130 from mir-protocol/dependabot/cargo/hashbrown-0.14.0 63b8ceba Merge pull request #1124 from 0xmozak/matthias/remove_unused_deps e3f12709 Merge pull request #1123 from 0xmozak/matthias/fix-readme b0c5ddc0 Update criterion requirement from 0.4.0 to 0.5.1 fc70f36c Update hashbrown requirement from 0.12.3 to 0.14.0 0d9208a6 Update ahash requirement from 0.7.6 to 0.8.3 413f589e Update hex-literal requirement from 0.3.4 to 0.4.1 cc45ac9a Merge pull request #1125 from 0xmozak/matthias/fix-resolver-warning 1f561771 Merge pull request #1126 from 0xmozak/matthias/add-dependabot 7437fe2b Fill modulus in cpu row for Fp254 operations. (#1122) 9e748a47 Enable github's Dependabot 6c2f76d5 Fix resolver warning 4b0fc861 Remove unused dependency `blake2` from `evm` crate f6b2e742 Fix spaces and wording in README ee5d1aa6 Merge pull request #1033 from 0x0ece/transpose 398f86af Merge pull request #1092 from matthiasgoergens/matthias/move_to_field 2d7a94de formatting 94f880b6 Merge pull request #1104 from topos-protocol/serializer dca50adf Merge pull request #1119 from mir-protocol/jacqui/topos-protocol/stack_len_bounds_aux_error 6b493d6f Remove redundant case (error in kernel mode) 1664ab44 Merge pull request #1112 from topos-protocol/fix-generate-jump 7aa5ed3b Merge pull request #1117 from topos-protocol/fix_set_context b9b227c8 Merge pull request #1118 from mir-protocol/revert-1109-new-clippy-fixes 14c40115 Revert "clippy fixes" f08afec6 Merge pull request #1109 from mir-protocol/new-clippy-fixes 5bff02a1 Fix generate_set_context 0a59aa6e Remove need for matching start ranges 325cd2f7 Compute stack_len_bounds_aux correctly in generate_error 224064bf Fix jump operation generation c982826e Add feature "rc" to serde crate import 3870524a Merge pull request #1113 from topos-protocol/ci 6bd575d1 Fix nightly version in CI dbb23587 Merge pull request #964 from topos-protocol/lookup 96fbecd9 ignoring where appropriate (for izip), fixing elsewhere b0568a79 remove useless vec 0fec1124 update itertools 08a6e66d fix 5b08ac58 fix 3c776a8d clippy fixes 4df4d865 No default implementation 91c55d15 Add wrapper types for Lookup and LookupTable 43512371 Review c0fc349c Fix lookup serialization and update with latest serialization changes 7e80b42a Serialize Lookup gates and generators 35abffd5 Implement lookups with logarithmic derivatives in Plonk 6122dccb Move operations to Field 56a127eb Make Buffer available in no-std d960bfe2 Make serializer work with slices instead of Vec 3de92d9e Merge pull request #1102 from mir-protocol/modexp-memory-context-change 605ea47f reset Cargo.toml 897e2e99 fix 54cf74ac addressed comments 975e9a49 fmt 8eeca9be undo dummy change 2fa43121 dummy change to get tests to rerun :P 9bb6da04 fmt afd4bd04 cleanup bc53ddc5 fix 264192aa modexp uses current_general 39d2237d Merge pull request #1101 from mir-protocol/blake_fix ee452fc0 Merge pull request #1099 from mir-protocol/blake_fix_fix ef8ea64d Minor 7559bb2f Minor 83ee5fd6 Minor c8ff80ca Fix blake2 fix 7ca56768 fix 246eb8d8 blake fix 23bc390a Merge pull request #1095 from mir-protocol/jacqui/push0-opcode 3eb41edb William comments 564864ea Remove parts of the copy-on-write logic (#1096) cedeff52 PUSH0 9cc35360 Merge pull request #1082 from mir-protocol/jacqui/simplify-stack-bounds 8ded9e84 Minor: William comment ec07255f Fix halt loop (#1094) 01efa013 Fix account touch in calls (#1093) ba705703 Use current context for pairing memory (#1091) 9838a367 Check call depth in create (#1089) e51c4d0d Set returndata size to 0 in some create errors (#1088) d37c5455 Increment call depth in precompiles (#1087) 56e7ad00 Fix LOG* gas (#1086) 68b15ea5 Fix CALLDATALOAD for large offsets (#1085) f852984e Implement PREVRANDAO as if it was DIFFICULTY (#1084) 6920992e Simplify stack bounds constraints 01175419 Merge pull request #1071 from mir-protocol/jacqui/bad-opcode-witness-generation ae290dbf William PR comments 0f7e1c0b Call stack depth (#1081) 0f874317 Minor fix to REVERT (#1080) 90bb4741 RIPEMD doesn't get untouched (#1079) fd48e5d1 Contract creation fixes (#1078) 77f0d8b5 Don't revert state in CREATE in case of OOF or nonce overflow (#1077) 63a6e706 Fill BLOCKHASH and PREVRANDAO syscalls with dummy code (#1076) 0e23606e Revert #1074 (#1075) 5a13b62d Don't overwrite existing account (#1074) 2cf31f5f Prevent shift ops from panicking (#1073) d3387172 Commit missing file c773476c Minor docs 55b29cac Remove bootloader.asm (#1072) 3ecf5309 Minor bugfixes 448bc719 Lints 1d804e46 Fix stack after precompiles (#1061) 7ab0bba5 Merge branch 'main' into jacqui/bad-opcode-witness-generation b7220428 Error handling 973624f1 Minor fixes to RETURN and RETURNDATACOPY (#1060) 720faa67 Fix create OOG because of code deposit cost (#1062) fbf6591b Warm precompiles earlier (#1065) f605d912 Propagate static flag (#1066) 73079796 Fix pairing invalid input (#1067) 49bbe4e0 Fix arithmetic stark padding (#1069) 0d819cf8 Implement EVM `BYTE` operation (#1059) 8153dc78 Remove `-C prefer-dynamic=y` from CI build. a492d3e1 Fix revert gas bug 7dfdacf2 Fix return and revert gas (#1058) 42f33017 Fix ecrecover edge case (#1057) c0abefda Fix DUP in call gas e6a7b8c5 Add contract creation flag (#1056) 30b97b29 Fix DelegateCall bug 9727eaf1 Fix extcodehash when account is empty (#1055) 08a061bc Implement LOG* gas and remove panic (#1054) 354664c8 Fix ecmul (#1053) 6e303601 Support for type-2 transactions (#1052) 9b0092ab Support for type-1 transactions (#1051) 15dec6fa Encode `to` as B160. (#1011) beefc91d Pop checkpoint in the right place 5a7c176c Fix issues related to CREATE2 collisions (#1050) e720090e Merge pull request #1041 from mir-protocol/storage_addr_h160_to_h256 d57b62ff Perform jumpdest analysis whenever entering a new context (#1049) 971bfba6 EIP-2681: Limit account nonce to 2^64-1 (#1048) 8faea881 Don't add an event for account creation for pre-existing account (#1047) 29fac4ca Check balance in create (#1046) 1616c0ba Fix extcodecopy 3a9e5cc0 More fixes to contract creation (#1045) 49979df9 Fixed failing test a294c7e2 Some fixes to contract creation (#1044) 84c15606 Minor fixes to returndata and create (#1043) 10e6c768 `TrieInputs` now uses `H256` for storage account addresses ce6ac9f8 Merge pull request #1038 from mir-protocol/tests-memory-context-fix c36ed15e Merge pull request #941 from mir-protocol/bls-fp2 6292d8d7 redundant d3986e6b merge successful 59ae7103 merge 244d5e9b Add refund journal event and checkpoint after access address event (#1040) bde7fb50 Various fixes to checkpoint logic (#1039) e5b0fce6 revert testing changes 3a77c5a0 fix 14f92f7b Cargo.toml change for testing b116929f Delete touched recipient in EOA -> EOA (#1037) 6ebee38e fix d05db497 Don't touch contract address in DELEGATECALL or CALLCODE (#1036) bfd6834d Journal of state changes + state reversion (#1028) 74ba3032 MPT deletion (#1025) 202985b2 Fix CALL gas (#1030) 944d4a24 SSTORE refund (#1018) f1cc284d Optimize transpose c134b597 Cross-table lookup for arithmetic stark (#905) 779456c2 Merge pull request #1029 from mir-protocol/precompile-memory-context-change 6f4f00c6 Merge pull request #1027 from mir-protocol/memory-refactor 2c5f6fd6 Fix compile time problems and generic hash implementation (#1024) 76fb3160 Merge branch 'memory-refactor' into precompile-memory-context-change 0d9e3216 fix (mstore_unpacking returns offset) 97aedd11 Merge branch 'memory-refactor' into precompile-memory-context-change 6e7fa6da fix 675d6440 Merge branch 'memory-refactor' into precompile-memory-context-change 57bcb451 use mstore_unpacking and mload_packing 6669f73a use mstore_unpacking and mload_packing af12368a addressed comments & cleanup 98a75774 cleanup 5dc043aa Merge pull request #1012 from honeywest/transpose a4a4fbb3 fmt 057b650f fix a076da75 fix 24159886 precompile memory context change d9694d95 fix 446c3b71 fix b566dbd7 refactor memory/core.asm to make code more reusable 46c7903b Merge pull request #1023 from topos-network/clippy 92c2378c Fix clippy f11921c9 Fix doubly_encode_rlp_scalar in the 0 case. (#1022) 653a6b15 Remove `generic_const_exprs` dependency from field crate. (#1020) 4380395e Merge pull request #1017 from mir-protocol/expmod-fix 91067e58 expmod edge case fix b159c9e7 Merge pull request #1013 from topos-network/overflow-check f0df03f6 Merge pull request #1009 from mir-protocol/expmod_precompile 099e7946 fixes 17a7c57d Change add_or_fault macro 1f39c555 Address overflow-related TODOs in ASM code, using a macro add_or_fault. This is related to https://github.com/mir-protocol/plonky2/pull/930/files/a4ea0965d79561c345e2f77836c07949c7e0bc69 40515dc6 Merge pull request #1014 from toposware/bootstrap_constraint f3de2afc remove test file cae5a2cf fix 050c2e65 fix: calculate gas properly ab8ebdfb Merge pull request #1016 from mir-protocol/remove-proof-challenges-serialization 8d738729 Merge pull request #1015 from mir-protocol/clippy-fix ade5b433 fix 30e58ad2 remove ProofChallenges serialization 8358b85d remove unneeded mut 08e6c352 addressed comments 037c2f5e addressed comments 841c5829 Fix todo in kernel bootstrapping debe65f9 addressed comments 1d8f71f8 optimize transpose_in_place_square_small code ae21ef8f Merge pull request #997 from mir-protocol/pairing-test 84f17699 comments f9aad433 neutral input 397d5953 fix 031fe6ed Merge branch 'main' into expmod_precompile 9e4056e2 cleanup badbf010 store and unpack at end 998cd8ab addressed comments a638ebe0 fix 7ede443e Merge pull request #1006 from mir-protocol/blake_precompile dc076df5 addressed comments 96742f29 addressed comments e40b9edb addressed comments 9b18b3ae fix unit 16928fd0 peculiar... b37e049a fmt f6a49e88 fair naming 4a42ddb2 on stack 57113905 redundant 2aa83d9a Merge branch 'pairing-test' of github.com:mir-protocol/plonky2 into pairing-test ae4b5091 neutral name 503cb8a9 random inp 4ad8520e SNARKV precompile (#1010) b28e3e0d minor ab721fa3 SSTORE gas (#1007) efd5a81b Merge pull request #980 from mir-protocol/serialize_common_circuit_data 13c653bc mul works 6599c90a abstraction 78a368cf fix 8df0c743 remove build_without_randomizing (no longer needed) b640bf63 serialize ProofChallenges 537debdc return bool 89122a3d it works 479e919c fmt 6dc094a8 test 4c235e5a Merge branch 'main' of github.com:mir-protocol/plonky2 into pairing-test b4e06271 fix d31c60a0 clean up d928a70b clean 1e9db292 fixes 67a3edb2 Precompiles exist (#1008) 0d98e4b8 formatting e642b824 move serialization to separate example dc91554d expmod precompile 3b7ad771 cleanup 46d9cee0 charge gas! 9460acc1 rename blake2b ce033410 fix 6a239c4f fix a41cf018 fixed blake tests 11a03c5e Merge pull request #1005 from mir-protocol/precompile-fixes 621c63de clippy fix c083cc63 fix 29a8367b fmt 2d98dd3c commented out unused functions 5dc44916 Merge branch 'main' into blake_precompile 454e0add fixed blake2_f, and testing 137a9966 Merge pull request #998 from mir-protocol/even-smaller-bignum-modexp-test f225ea49 add comment 93398472 Merge branch 'precompile-fixes' into blake_precompile 1eba893e mload_packing macro ffc5d6d6 Merge branch 'main' into precompile-fixes 43f4d2b8 clean more 14ee46c8 cleanup 0f662ed0 fixes b7e93511 New contract hook (#1002) a4b714e6 EIP-3541: Reject new contract code starting with the 0xEF byte (#1003) 472face2 EIP-3860: Limit and meter initcode (#999) f3f3641f rename 975a35c3 fmt f718f857 cleanup 8d50806b fix b9f1c1c5 cleanup 858c59a2 cleanup 45c0d894 cleanup 905c5eb5 deterministic build function 146e6605 don't serialize challenges 66763c7d cleanup 993ed149 seralizing 34a03545 Serialize impls, and use in Fibonacci example ea82d680 Merge pull request #981 from toposware/serialization 86acc15f blake precompile progress b288ff5f Merge branch 'precompile-fixes' into blake_precompile d5060ecd precompile optimizations 26204461 it works 9f0c2f47 blake precompile progress dd58b9b5 dont panic 26d99a9b memory compress 0c55aa04 clean 50752246 clean ff0695d7 renumber memory f1bbf66c it works c01b2bf2 minor 5f564b67 initial work on blake precompile 44a623d4 initialize out in asm 4e0be664 Merge branch 'main' of github.com:mir-protocol/plonky2 into pairing-test b35d2524 Merge pull request #1001 from mir-protocol/eth_trie_utils_0_6_0_bump db93bada Bumped `eth_trie_utils` to `0.6.0` 1a0a6300 EIP170 (#1000) 723f197d Cleanup 099c4b97 msg 82bca7fa error b661a709 twisted check 2a9c5cfd Add serialization check in square_root example bf02a3e8 Make generators public 5de5bfb5 Move serialization files into dedicated module 0e465c1c Customize range specification for AllRecursiveCircuits f71139d9 Add serialisation support for gates, generators, and various structs f7f5fb4e Change display for GoldilocksField 6edd5891 Gas and more for `CREATE(2)` (#995) a8e5613b EOA to precompiles logic (#993) df4a6f01 fix for full modexp test 6d84b988 fmt cb23bfca check for special cases and align with yellow paper bbe64674 tests passing 3628021a fmt d6584dcb restructure tate test 3e437a0c oops remove more debug stuff bf5dc256 undo debug commenting 0df18d5e tests de94ac25 missing file 3444e810 even smaller bignum modexp test, and fixes 21a1a98a reorg ca3a7f8a Merge branch 'main' of github.com:mir-protocol/plonky2 into pairing-test 9b54ee43 refactor c0ced26f Merge pull request #992 from mir-protocol/smaller-bignum-modexp-test 0e082432 reorg ada250f3 Merge branch 'main' into smaller-bignum-modexp-test d8fef87a Only print warning or errors from the log. (#996) 2e16ab04 Replace %stack calls with equivalent opcodes. (#994) 3a3ff87a fmt 7dda0eff works 1f077628 new api c9b09936 compiles d112c716 fmt a704b152 even less thorough :P a4f60a04 less thorough bignum modexp test 049a258b Merge pull request #991 from mir-protocol/disable_couple_tests 8562abe0 Disable a couple tests 690fd100 Merge pull request #990 from mir-protocol/mpt_failure_labels 67593f16 Labels for failed MPT read/insert e6864e98 Merge pull request #985 from toposware/kernel_serial 5dfac715 Fix generic const expressions warning (#984) 9037ceb0 Merge pull request #986 from mir-protocol/incremental_release eb7468e7 Incremental release builds df5a90cc Provide methods for serializing Kernel 6b2503f7 Merge pull request #970 from toposware/env 3cc39fa4 wip 9d60191d Implement returndatasize/returndatacopy for interpreter 191ca102 comment 0b85c8bb getting there 0b9ef768 nl 2106ae07 Merge branch 'bls-fp2' of github.com:mir-protocol/plonky2 into bls-fp2 a5c6b14e Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 9b9cd735 Update evm/src/extension_tower.rs 6946eaca Implement codesize/codecopy for interpreter 0f3285c3 Implement gasprice on the interpreter e9cc5632 Impl caller/address/origin opcodes for interpreter b721236e Precompiles interface (#983) b896f9b2 Merge pull request #974 from toposware/stack_bound c8637635 Remove dummy_yield_constr 4946c3d5 Merge branch 'main' into stack_bound 5fce67d1 Merge pull request #978 from toposware/stack_constraints 32a6bdf1 Merge pull request #971 from toposware/keccak_sponge_is_final_block 58f4568e Merge pull request #982 from toposware/sys_chainid 92d94dc6 Use Block chain id for sys_chainid ba844a24 Change shl/shr behavior as well as BASIC_TERNARY_OP 475b2ba0 Fix copy_returndata_to_mem (#976) c7e60073 Check if context is static for state-changing opcodes (#973) 142be4e1 Implement rest of *CALL opcodes (#972) b202196b switch f2650418 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 1e57ef96 Remove unnecessary constraint 29726f92 Apply review f424bd36 Merge pull request #966 from toposware/interpreter 938e3bd5 Set stack_len_bounds_aux properly 18d27d2f Remove is_final_block column in KeccakSpongeStark 011ea8e4 Fix from review 5b1fd5f2 CALL gas (#969) d79d2c49 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 8130a8a6 Merge pull request #950 from toposware/keccak_sponge 0529fa06 Change endianness within generate_keccak_general 3da8efa6 Implement sar in interpreter b943ddb0 Implement signextend in interpreter 4db00441 Implement sgt in interpreter ac2ccc1e Implement slt in interpreter 232832e3 Implement smod in interpreter 18d317b9 Implement sdiv in interpreter. 0e5e28f6 Merge pull request #968 from toposware/block_interpreter c0fae77c Merge pull request #951 from toposware/prove_block 6124e4d6 Fix BlockCircuitData proofs 0146f48a Cleanup 524b39e2 Reactivate CTL for keccak sponge d1379ac1 Fix hash output writing to memory 5f6098ff Add test for keccakf_u8s 99b0d009 Implement KeccakSpongeStark constraints 2fae2fbc Impl gaslimit opcode for interpreter 9e6f284b Impl chain_id opcode for interpreter ae8ee27e Impl coinbase opcode for interpreter e3572f1d Impl basefee opcode for interpreter 86bd055b Impl difficulty opcode for interpreter 60fed608 Impl number opcode for interpreter a17c6231 Impl timestamp opcode for interpreter 042c0042 Merge pull request #965 from mir-protocol/fix_run_constructor 31e134f0 Delete %set_new_ctx_parent_ctx 1a9f0104 Fix call logic (#963) ab692252 Minor fixes to context creation (#961) 7a65b1d4 Merge pull request #967 from toposware/fix_decode 310107f2 Fix decode constraint cfc54f95 Fix run_constructor d1c9277d Merge pull request #962 from mir-protocol/range-check-example d6bb5d5d range check example af3fa142 Implement sys_return and sys_revert (#959) f24c3537 Update README.md c11f4f41 Merge pull request #960 from mir-protocol/readme-updates fb24b200 README updates: examples and external tutorial 5ac12de9 Fix sys_exp (#958) 923722b1 Fix copy opcodes when offset is large (#957) d59fa59a Merge pull request #925 from mir-protocol/bignum-modexp 6a4e9ab6 fix 90f7ba9a addressed final comments 9690b60b Merge pull request #956 from mir-protocol/doubly_encode_storage_values e70e4fca Doubly RLP-encode storage values 889911e8 redundancy 93dd25a1 fmt 33dc8eae better names 251d7e34 systematize names 4e48fc43 all Stacks 0e3b86de frob 26da6dc7 rev stack d52c15e8 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 6fa59d20 Fix MSTORE8 (#955) f9217272 Fix signed syscalls stack (#954) 3b607bde Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 a061c3cf Merge pull request #952 from mir-protocol/extra_where_clauses 209c1ff1 Remove extra conditions e2f33dd3 Merge pull request #873 from toposware/hashconfig 9ee47ab7 Move HashConfig into GenericConfig associated types 46d5e62a Copy txn data to calldata (#935) 8e04cbfe Self-destruct list (#948) a926dcad Transaction validity checks (#949) f1a99e69 Add patch section to workspace config file e857c020 Make hash functions generic 2ca00a9a Selfdestruct gas and set (#947) 786a71d6 Merge pull request #946 from mir-protocol/selfBalanceGasCost 31cd0f64 Remove dbg 9ae69a7c Add an integration test for the `selfBalanceGasCost` case 56bf0892 Charge gas for extcodecopy (#942) f71d3642 Merge pull request #945 from mir-protocol/remove_CONSUME_GAS 9480cbed Signed operations as syscalls (#933) 2d87c5d6 Remove `CONSUME_GAS` 834522a6 Merge pull request #939 from mir-protocol/termination_fixes be0cccdf Merge pull request #938 from mir-protocol/rework_create_create2 39fdad8c Feedback a0d04ca3 Fix Wcopy when size=0 (#944) 9f1a5f97 Charge gas for keccak (#943) cdaabfe9 Merge branch 'main' into bignum-modexp b667c074 Merge pull request #940 from mir-protocol/eth_trie_utils_bump 3c7bc883 Removed a type alias 0c87a57f addressed comment e4f2e864 fix 1a348eed check for x < m bce25720 documentation 1e5677c4 comments a6ccd350 cleanup fb73e889 uncommented c18377d1 Merge branch 'main' into bignum-modexp 91fb4fc0 fix modexp test 823b06ac fp2 works 3b95e013 bls method 5783a374 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 7b93b81a Merge pull request #931 from mir-protocol/fp381-opcodes cf5a4edc prover input minor improvements 60ad9e03 Bumped `eth_trie_utils` to `0.5.0` c3a5fd86 merge 1f14ae98 skeleton 911dfedd Rework CREATE, CREATE2 syscalls b4eb8373 A few fixes for terminal instructions ce22d945 Access lists (#937) f9fa38d3 Fix new account insert key 7028b6ba comment 7ff2122e Merge branch 'main' of github.com:mir-protocol/plonky2 into fp381-opcodes 1ce47ceb Merge pull request #906 from mir-protocol/fp318 0650d263 remove .scale 3f4d970f Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 c8d2769c fmt 74afec70 remove imports f4e65feb Fix bugs in `wcopy` and `update_mem_words` (#934) 15bafce5 Implement CREATE2 address generation (#936) 874805cc Merge branch 'fp318' of github.com:mir-protocol/plonky2 into fp381-opcodes 645ef664 comment 63ec13e2 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 3425391a more comments d0b2b81e More MemoryError (#932) 1f3e3de7 clean and generalize 1627a9a0 tests pass e471818c comments 1fbe3050 Merge branch 'main' into bignum-modexp 06936c76 Implement various syscalls (#930) 84a0bcf8 cleanup 373062b2 on stack 0e8f6a2f test skeleton 392c29f4 compiles 9ea0ebd7 skeleton 1437affc fmt b847d16e redundancy 13d2ed90 merge 54b8ce74 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 143225f4 finish d928423c cleanup d59501e6 fixes, testing, and in-progress debugging fc72ce46 fp6 works 4d83c58d frob works 2df1439d Return error instead of panic in memory operation (#928) a79271a8 Minor account code fixes (#929) caaf3b4a merge fields cf1e6a76 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 24705e1e addressed comments ff81a565 Merge pull request #927 from mir-protocol/creation_fixes e1ae5392 Fix test afded168 Contract creation fixes 893b88c3 Implement syscalls for BALANCE and SELFBALANCE (#922) fc6487ca Merge pull request #926 from mir-protocol/fix_gas 3c4bc1d8 Fix GAS and implement storage value parsing be309a38 cleanup from comments 42d65839 addressed comments d340ff8c addressed comments 2e0b7992 addressed comments 9803581d fix 1a78f400 restored neq macro be9cbd5a fmt 902bc66a fmt 511f450a resolved conflicts 4aa212ab modexp fix e06f84dd modmul fix 76e70ac4 fixes ad85d61e fix 4cef5aaa modmul and modexp 1e019356 basic bignum b16b8261 Merge pull request #881 from mir-protocol/bignum-basic 6fe8554f Merge pull request #924 from mir-protocol/empty-stack-replacement 652b2bed allow empty stack replacement a5fad9eb addressed comments f6b9d6ee addressed comments 72b5bb0e fmt 2752456e addressed comments 5e98a5f9 adj trait 50388073 rename 25575df5 cleanup ec0f3ce7 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 692575a2 Bump eth_trie_utils version. (#923) 2ab16344 Merge pull request #921 from mir-protocol/dlubarov_misc bdf35374 Misc b80a28db Misc f13d603a Merge pull request #920 from mir-protocol/dlubarov_misc 47fac8e3 Couple fixes & minor refactor 64c76e76 Merge branch 'main' into bignum-basic c491a989 Merge pull request #919 from mir-protocol/mem_expansion f717a40b Charge for memory expansion e8405eff Merge branch 'main' into bignum-basic de246e22 Merge pull request #918 from mir-protocol/fix_read_ext 7ed53142 Fix reads from not-found ext nodes 7853656e Merge pull request #917 from mir-protocol/fix_clobbering a05ed9fc Fix clobbering of RLP data memory c3ba7a89 Merge branch 'main' into bignum-basic 2ac4fcdf Merge pull request #915 from mir-protocol/fix_clone_account 8c692b72 Fix account cloning f514d966 Merge branch 'main' into bignum-basic 994c54ab Merge pull request #912 from mir-protocol/stack_on_panic 5720cf8a updated function name cda31b5e Merge branch 'main' into bignum-basic 9f75132f Merge pull request #889 from mir-protocol/hash-asm-optimization 38f79e49 optimizations with rep 92ee7786 Merge branch 'main' into hash-asm-optimization 69b4a21c Merge branch 'main' into bignum-basic da07a7a8 Merge pull request #914 from mir-protocol/return_post_state 373421a1 Fix tests - need to supply empty code 44c77f55 Input addresses c8d591f6 Add a `prove_with_outputs` method 95347621 div instead of shr cecbfa9b fit c59b979c addmul fix a0a23147 Merge branch 'main' into bignum-basic f518a8b4 Merge branch 'main' into hash-asm-optimization b62bc35d fixes 2d7d2ac3 Merge pull request #886 from toposware/poseidon-native f1ad3da8 fix 062eb82a cleanup e0a4bc31 cleanup fda64475 fmt fa3443a5 new testing interface, and test data bb2233cb Override from_noncanonical_u96() for Goldilocks field 10e7329a Add FFT-based specification for Poseidon MDS layer on x86 targets ee9bfb08 fix 4e736b63 fixes 534395ee fmt 73633354 test data e6027142 cleanup 54eb29e7 fix 6f6c808d more efficient divmod 202990ed Merge branch 'main' into hash-asm-optimization 2195bdd4 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 459d2929 folder 1c71fb34 Merge branch 'main' into bignum-basic 1576a300 Merge pull request #817 from mir-protocol/non-inv e97e8188 fixed iszero and cleanup 12e6527b fixed messed up merge 2a0df523 Merge branch 'main' into hash-asm-optimization 44a0596f fmt 930ebafd Merge branch 'main' into bignum-basic 35fb1499 Merge pull request #904 from mir-protocol/optimize-blake2b 6f8a5100 interface changes b0ed6ae0 cleanup 4ef981e4 initial test data ad38f957 TODO for possible future mul optimization 06276334 carry -> carry_limb e57358bc ge -> cmp and returns 0, 1, -1 d4a485ec Log stack on panic 7fad9eb8 Merge branch 'main' into optimize-blake2b a8956b94 flip limbs 9ec97744 run_ops dd7948e7 merge 5cf8028e Merge branch 'main' into bignum-basic de6f01f4 small optimizations 424d8d22 more optimizations 29df451d optimizations 97cb5c75 bug fix 8f231bd0 optimization 265d39a5 cleanup 85411ac4 fixes 7351a166 fix 684b668b fix 63301d6b refactor sha2 compression 2236f30a more small optimizations e5f8632b small optimizations 213ba8ff optimized initial hash value generation 7c8026e8 cleanup 2020202e optimize hash generation further further df7ea93a optimize hash generation further 3a0d86e2 hash function optimization ef377c0b cleanup 4e8af821 fixes 9ad25b2a optimizations eebdd029 Merge pull request #910 from mir-protocol/optimize-asm d23e4e20 deal with and test zero-len case 4b6a5146 fix 05788a99 compiles d4c7bfd5 addressed comments 725b5a08 cleanup 4a762553 name change c4b511ba addressed comments 2000d308 addressed comments a738afce Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into fp318 24e0b291 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 676a483c fix 476a554a OR -> ADD 87ad5714 addressed comments 04f44ef4 addressed comments 9e7dc7ca addressed comments 6f05a144 Merge branch 'main' into bignum-basic 7b2c4c61 Merge branch 'main' into optimize-blake2b ac068845 Merge pull request #909 from mir-protocol/gas_to_coinbase d5003b7c Gas fees go to coinbase 181e4409 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 625bdb68 skeleton 6dd99e43 Merge pull request #908 from mir-protocol/fix_call_field 84fbbbf4 Couple minor fixes 54f8dcf4 Merge branch 'main' into optimize-blake2b ce25cc84 Merge pull request #907 from toposware/wasm b3e93e91 Fix plonky2 compilation with wasm32-unknown-unknown target a96418b3 unused test 33ccf898 small optimizations fda2e190 restored blake2b_g_function and call_blake2b_g_function macros 4a378bce Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into fp318 18c83e77 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 6d997a65 more tests f2538fff cleanup bac38f82 fix 3a019f99 fix 3662e41d fixes 1100445d cleanup 4f412182 cleanup 0fdd93b8 cleanup f46694e7 more thorough tests 2aff3e10 cleanup 1d7c28ee bug fixes c98bfb0b cleanup af3dc287 cleanup 10893fe0 addmul test: use carry efd5e6ec cleanup 5477c7dd fixes 119eae95 fix 9976a4b0 addmul initial aa605b67 flag functions used only in tests fa605d7b basic bignum 0f55956a optimized initial hash value generation 5994f4d9 cleanup 9d8d81b4 optimize hash generation further further c37d1e25 optimize hash generation further 40f90d83 hash function optimization 70475a5a cleanup 5f592e60 fixes 93abd35f optimizations eea8ab62 Merge pull request #903 from mir-protocol/misc_evm_fixes a6ac0519 Misc EVM fixes 2eed209a Merge pull request #902 from mir-protocol/debug_tries_2 e6aa62f3 Some tooling for debugging tests where the updated tries are not correct 21db4a1b Merge pull request #900 from mir-protocol/sys_gas f117d76b sys_gas f19b7553 Merge pull request #899 from mir-protocol/evm_fixes c558eedd Misc EVM fixes ec216d28 Merge pull request #898 from mir-protocol/move-out-ecdsa 77fb333a Move ecdsa to its own repo 2621d582 Merge pull request #897 from mir-protocol/move-out-u32 18733f11 Move u32 to its own repo b08e7a08 Merge pull request #896 from mir-protocol/move-out-insertion bf8780b2 Move insertion to its own repo 64296bcc Merge pull request #895 from mir-protocol/move-out-waksman b95bc90b moved waksman to outside repo 1ee39b51 fmt ab32f03b fixed multiplication bde5c557 correct mul impl 2c73d5d7 bls field arithmetic 95e5fb59 cleaner rand 6ac59f16 arithmetic skeleton 8ace54dc Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 57ea64e3 Merge pull request #894 from mir-protocol/fix_storage_trie_lookup 1e1e75c9 Fix code that looks for an account's storage trie e8c94632 comment 1d94756e add inverse doc 5aafbaad Merge pull request #893 from mir-protocol/move_out_system_zero 801fa641 link bfaa80a3 Move system-zero to its own repo 13a8d670 loop test 2ea3e5e3 minor changes e3e5c678 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 7781dd36 Plonky2 to 0.1.3 db893831 Merge pull request #892 from mir-protocol/bump_plonky2 2133c7f3 Use new plonky2 52e34265 Bump plonky2 to 0.1.2 79084719 Merge pull request #891 from mir-protocol/fix_hash_or_noop e52b75b0 Fix `hash_or_noop` for general hash sizes d17f3aa4 Merge pull request #890 from mir-protocol/test_fixes 29f0692e Fix a few issues found by EVM tests 2a9d4b1a minor 5e3e40a0 more general kernel peek b89e668b minor f5b45ee4 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 33185476 Merge pull request #885 from mir-protocol/skip_log 745bec8d Skip log_kernel_instruction if debug logs disabled e8865130 put extract in interpreter 63f1fbfa fmt be351110 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 6f2d99c7 Merge pull request #884 from mir-protocol/evm-kernel-tests ea9846de format da7a8879 make hash functions take a location pointer a6ffb4b3 simplify byte extraction 4e4cfb06 function API / remove redundancy 53ab0ada remove blake storage 77a7af76 remove sha2 storage abc762f7 cleaner arithmetic 731c29c4 abstract c6cf1dc5 remove custom bce86718 simplify ripe md test e2cac0bb Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv f3946f75 Gas constraints (#880) 444da8f7 better comments 80e49caa segment virts d320fbfb update curve add with ops 787cc890 change segment 71243fd7 fix pairing code after big BN PR merge 2158c1d2 merge 9e60ee25 segment ac40bd5f Optimize `ecMul` precompile (scalar multiplication on BN254) (#852) 3332fbb4 Merge pull request #882 from mir-protocol/back_to_nightly ff80f28b Revert "Set CI to use an older version of nightly" 85b33093 Merge pull request #879 from 0x0ece/patch-1 614c4ae6 Make le_sum public 40866e77 Refactor arithmetic operation traits (#876) c6492bc5 merge fix 7b367f5c merge 83c0292b Move SHL and SHR generation to the CPU. (#878) b585b6a7 remove macro 31095e1b stack macro a061b88a naming cecad598 stack macro 361d6d72 tests and stacks cb7c638c more comments 69afed92 refactor 57146c83 miller loop test e63cc2aa Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 9f808fc7 align 69228491 Unify generation and verification of ADD/SUB/LT/GT operations (#872) 1c73e238 fp -> fp254 61ac0eff fmt 0f030fae naming for global labels c107c505 comments e1dca870 name 962754be rand impl a950a262 add comments cd5c92b5 merge ca002aea Optimize `ecrecover` ASM (#840) 9990632f Merge pull request #870 from mir-protocol/prep_for_publish 137bc785 Prep for publishing to crates.io 81511380 TODO 6c4ef29f Add range checks to the arithmetic Stark (#866) aed617c1 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv ea01e19c comment 5b124fb1 minor 6e8d4a57 fix 0eef28af bools 3ea8ad90 fmt 6958d46c names and comments 56be7317 comments b46af11f move consts f70243e7 better comments f0a6ec95 clean asm 136cdd05 Remove InterpolationGate trait (#868) 9c8f1166 ocd d98c69f0 better comments 0b81258a stack macros 3bdb2907 Optimized interpolation gate (#861) c9b005d2 new power works 5deb1648 refactor power 60cbdde8 clean 8ca6ba7b clean c13cf972 tate test 75c5938c rewrite w methods ec4cddb7 inv as method 7b524381 en route to ownership 17cfae66 reorg f34b35ed extra comments 94d99cca extra comments 8b670d54 meh 769c615c cleanup 530fb65b cleanup 155e973d slight refactor d2aa937a improved prover input and test api e06a2f2d duh a5c292c7 space 4d783da8 fmt d99cadeb stack macro b2f9d885 remove redundant macros and improve comments 8e62d994 fmt 922d3ebc add module and fix errors 3fcb5591 redundant macro c74a0c25 test inv from memory abab6bf1 test frob from memory 20fb2cb7 read output from memory 5f2baea0 mul test from memory 7f135fc0 reorg b44d9e2d Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 8ae36474 Merge pull request #865 from mir-protocol/increment_nonce eb7d18da fix clippy 54676487 cleaner description d6167a63 complete description 23698b74 more comments fda4b4c1 more comments 985e8160 transmute + comments 6e215386 comments f2e40541 Increment sender nonce + buy gas 0daaa3bf org bc9c431e remove comments 9977ae03 new inverse fe91e119 frob format 37ad3407 frob format ecde3d13 frob tests 9cd1f8a1 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv be19cb81 struct impl style arithmetic 31c5db91 rename module ccd4a38a remove make_stack b753836a Merge pull request #864 from mir-protocol/block_circuit b6f6c210 Block circuit 3a6d693f Merge pull request #863 from mir-protocol/smart_contract_test a2f4a58d log df2ba7a3 Basic smart contract test 07e02f2d Merge pull request #862 from mir-protocol/prover_inputs_error_handling a158effe Use error instead of panicking in FromStr 3fbc8bff move comment ea8cfc95 name 2a2880b7 name 800ceb60 zero name e6bcad6c Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into non-inv 446a0d3f name 81861095 Update evm/src/cpu/kernel/asm/curve/bn254/field_arithmetic/inverse.asm 4f38c3a7 name 70d7fb13 cleaner inv 32f24819 Update evm/src/cpu/kernel/asm/curve/bn254/curve_arithmetic/curve_add.asm 49db35d3 Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into non-inv 42f98a09 Update evm/src/bn254.rs 82ce8153 \n 93a363c1 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 068f7485 Update evm/src/witness/util.rs 698ab6e7 Update evm/src/bn254.rs be7a489c Fix stack overflow 8ba8bb62 Merge pull request #860 from mir-protocol/agg_circuit_2 87be6097 Feedback ae212cfb Merge pull request #859 from mir-protocol/remove_older_evm_recursion_logic 14e6e7e9 Merge pull request #858 from mir-protocol/remove_ctl_defaults e4a5c2c9 Merge pull request #857 from mir-protocol/non_tight_degree_bound f4ac2d4f Fix vk 5df78441 Add aggregation circuit 76b3eb30 more fbb72e16 warning e12c6ad5 Remove some older EVM recursion logic 6655e776 Remove CTL defaults 0ca30840 Merge pull request #855 from mir-protocol/fixed_stark_recursion 5719c0b7 feedback 1ecdb96a Power of two length 2e59cecc import 40aecc8e Allow non-tight degree bound 18ce7ea5 Disable slow test on CI 595e751a Shrink STARK proofs to a constant degree 5cd86b66 names and format 2b91a1a6 simplify miller loop de494dcf remove prints 77798f88 remove loop endpoint de8637ce name 053a0206 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 32cda213 Merge pull request #854 from Sladuca/bool-or-gadget 403e2392 use doc comment 350b9029 add or gadget cca75c77 remove redundant definition c0744d76 TATE TEST PASSES 1f176734 better vec to fp12 9beca707 clean 84fab8d6 clean d4d80f35 rearrange 826702a7 clean f1d5c6bf tuck const e35644e9 miller test passes ef824110 miller in rust f2787a06 more clean 31ee8987 clippy b1f31caf more cleaning 89093b4d clean up 7af11f43 clean up prover code d5cec0e6 clean up code org 3c566e98 tangent and cords work bde569a2 it runs bf7da1c2 POP 41476ce4 fix cee6c653 hex a99b7d51 setup miller 6a93a6be rename e88e28a1 POWER WORKS e9e5528c space 5aab8ac0 first part works bc3adc16 debug pow 05e83526 test 7cd0dbae setup pow 32758829 refactor 950771a6 clean up inverse 95383db4 inverse edits c4e512ef Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv c2462971 inverse works 7788a29f skeleton inv d90a0559 Merge pull request #853 from mir-protocol/ctl_fixes b8e97aaa Fix logic and Keccak CTLs a503b058 fix 15ee75f2 all but inv d6c7… * Bump deps * Make smt_trie a local dependency * fmt * Add temporary bindings to old dependencies * Implement recursive table chain for Poseidon * Bring latest fixes * More missing fixes * More * PR feedback * DUP 0xffffffff * PR feedback * PR feedback * AND -> MOD --------- Co-authored-by: wborgeaud --- Cargo.toml | 8 +- evm_arithmetization/Cargo.toml | 1 + evm_arithmetization/src/all_stark.rs | 19 +- evm_arithmetization/src/cpu/columns/ops.rs | 1 + evm_arithmetization/src/cpu/contextops.rs | 1 + evm_arithmetization/src/cpu/control_flow.rs | 3 +- evm_arithmetization/src/cpu/cpu_stark.rs | 22 + evm_arithmetization/src/cpu/decode.rs | 3 +- evm_arithmetization/src/cpu/gas.rs | 5 +- .../src/cpu/kernel/aggregator.rs | 6 + .../src/cpu/kernel/asm/account_code.asm | 184 ++- .../src/cpu/kernel/asm/balance.asm | 7 +- .../src/cpu/kernel/asm/core/create.asm | 27 +- .../asm/core/create_contract_account.asm | 55 +- .../src/cpu/kernel/asm/core/nonce.asm | 26 +- .../src/cpu/kernel/asm/core/process_txn.asm | 3 +- .../src/cpu/kernel/asm/core/terminate.asm | 12 +- .../src/cpu/kernel/asm/core/transfer.asm | 93 +- .../src/cpu/kernel/asm/core/util.asm | 25 +- .../kernel/asm/journal/account_destroyed.asm | 18 +- .../cpu/kernel/asm/journal/code_change.asm | 15 +- .../cpu/kernel/asm/journal/nonce_change.asm | 5 +- .../cpu/kernel/asm/journal/storage_change.asm | 45 +- .../src/cpu/kernel/asm/main.asm | 6 +- .../src/cpu/kernel/asm/mpt/delete/delete.asm | 21 - .../kernel/asm/mpt/storage/storage_read.asm | 27 +- .../kernel/asm/mpt/storage/storage_write.asm | 48 +- .../src/cpu/kernel/asm/smt/delete.asm | 255 ++++ .../src/cpu/kernel/asm/smt/hash.asm | 85 ++ .../src/cpu/kernel/asm/smt/insert.asm | 175 +++ .../src/cpu/kernel/asm/smt/keys.asm | 131 ++ .../src/cpu/kernel/asm/smt/read.asm | 110 ++ .../src/cpu/kernel/asm/smt/utils.asm | 129 ++ .../asm/transactions/common_decoding.asm | 27 +- .../cpu/kernel/asm/transactions/type_0.asm | 1 + .../cpu/kernel/asm/transactions/type_1.asm | 1 + .../cpu/kernel/asm/transactions/type_2.asm | 1 + .../src/cpu/kernel/constants/mod.rs | 22 +- .../src/cpu/kernel/constants/smt_type.rs | 23 + .../src/cpu/kernel/interpreter.rs | 25 +- evm_arithmetization/src/cpu/kernel/opcodes.rs | 1 + .../src/cpu/kernel/tests/account_code.rs | 199 +-- .../src/cpu/kernel/tests/add11.rs | 633 ++++---- .../src/cpu/kernel/tests/balance.rs | 237 +-- .../src/cpu/kernel/tests/mpt/delete.rs | 159 +-- .../src/cpu/kernel/tests/mpt/hash.rs | 133 +- .../src/cpu/kernel/tests/mpt/insert.rs | 229 +-- .../src/cpu/kernel/tests/mpt/load.rs | 380 +++-- .../src/cpu/kernel/tests/mpt/mod.rs | 8 +- .../src/cpu/kernel/tests/mpt/read.rs | 40 +- .../src/cpu/kernel/tests/receipt.rs | 1268 +++++++++-------- evm_arithmetization/src/cpu/stack.rs | 6 + .../src/fixed_recursive_verifier.rs | 8 + evm_arithmetization/src/generation/mod.rs | 35 +- evm_arithmetization/src/generation/mpt.rs | 149 +- .../src/generation/prover_input.rs | 38 +- evm_arithmetization/src/generation/state.rs | 23 +- .../src/generation/trie_extractor.rs | 13 +- evm_arithmetization/src/lib.rs | 1 + evm_arithmetization/src/poseidon/columns.rs | 155 ++ evm_arithmetization/src/poseidon/mod.rs | 2 + .../src/poseidon/poseidon_stark.rs | 562 ++++++++ evm_arithmetization/src/prover.rs | 16 + evm_arithmetization/src/verifier.rs | 1 + evm_arithmetization/src/witness/gas.rs | 1 + evm_arithmetization/src/witness/operation.rs | 84 +- evm_arithmetization/src/witness/traces.rs | 23 +- evm_arithmetization/src/witness/transition.rs | 14 +- evm_arithmetization/src/witness/util.rs | 40 +- evm_arithmetization/tests/add11_yml.rs | 104 +- .../tests/basic_smart_contract.rs | 124 +- evm_arithmetization/tests/empty_txn_list.rs | 25 +- evm_arithmetization/tests/erc20.rs | 169 ++- evm_arithmetization/tests/erc721.rs | 160 +-- evm_arithmetization/tests/log_opcode.rs | 1173 ++++++++------- .../tests/self_balance_gas_cost.rs | 108 +- evm_arithmetization/tests/selfdestruct.rs | 80 +- evm_arithmetization/tests/simple_transfer.rs | 86 +- evm_arithmetization/tests/withdrawals.rs | 43 +- proof_gen/Cargo.toml | 4 +- smt_trie/Cargo.toml | 39 + smt_trie/README.md | 2 + smt_trie/src/bits.rs | 103 ++ smt_trie/src/code.rs | 75 + smt_trie/src/db.rs | 23 + smt_trie/src/keys.rs | 99 ++ smt_trie/src/lib.rs | 8 + smt_trie/src/smt.rs | 423 ++++++ smt_trie/src/smt_test.rs | 274 ++++ smt_trie/src/utils.rs | 89 ++ trace_decoder/Cargo.toml | 6 +- 91 files changed, 6156 insertions(+), 3190 deletions(-) create mode 100644 evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm create mode 100644 evm_arithmetization/src/cpu/kernel/asm/smt/hash.asm create mode 100644 evm_arithmetization/src/cpu/kernel/asm/smt/insert.asm create mode 100644 evm_arithmetization/src/cpu/kernel/asm/smt/keys.asm create mode 100644 evm_arithmetization/src/cpu/kernel/asm/smt/read.asm create mode 100644 evm_arithmetization/src/cpu/kernel/asm/smt/utils.asm create mode 100644 evm_arithmetization/src/cpu/kernel/constants/smt_type.rs create mode 100644 evm_arithmetization/src/poseidon/columns.rs create mode 100644 evm_arithmetization/src/poseidon/mod.rs create mode 100644 evm_arithmetization/src/poseidon/poseidon_stark.rs create mode 100644 smt_trie/Cargo.toml create mode 100644 smt_trie/README.md create mode 100644 smt_trie/src/bits.rs create mode 100644 smt_trie/src/code.rs create mode 100644 smt_trie/src/db.rs create mode 100644 smt_trie/src/keys.rs create mode 100644 smt_trie/src/lib.rs create mode 100644 smt_trie/src/smt.rs create mode 100644 smt_trie/src/smt_test.rs create mode 100644 smt_trie/src/utils.rs diff --git a/Cargo.toml b/Cargo.toml index ed9b1da16..6119293e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,11 @@ [workspace] -members = ["mpt_trie", "proof_gen", "trace_decoder", "evm_arithmetization"] +members = [ + "evm_arithmetization", + "mpt_trie", + "proof_gen", + "smt_trie", + "trace_decoder" +] resolver = "2" [workspace.dependencies] diff --git a/evm_arithmetization/Cargo.toml b/evm_arithmetization/Cargo.toml index 12a3438a8..1d671938d 100644 --- a/evm_arithmetization/Cargo.toml +++ b/evm_arithmetization/Cargo.toml @@ -42,6 +42,7 @@ serde_json = { workspace = true } # Local dependencies mpt_trie = { version = "0.2.0", path = "../mpt_trie" } +smt_trie = { version = "0.1.0", path = "../smt_trie" } [target.'cfg(not(target_env = "msvc"))'.dependencies] jemallocator = "0.5.0" diff --git a/evm_arithmetization/src/all_stark.rs b/evm_arithmetization/src/all_stark.rs index 942b5cd2f..43e471db1 100644 --- a/evm_arithmetization/src/all_stark.rs +++ b/evm_arithmetization/src/all_stark.rs @@ -23,6 +23,8 @@ use crate::logic; use crate::logic::LogicStark; use crate::memory::memory_stark; use crate::memory::memory_stark::MemoryStark; +use crate::poseidon::poseidon_stark; +use crate::poseidon::poseidon_stark::PoseidonStark; /// Structure containing all STARKs and the cross-table lookups. #[derive(Clone)] @@ -34,6 +36,7 @@ pub struct AllStark, const D: usize> { pub(crate) keccak_sponge_stark: KeccakSpongeStark, pub(crate) logic_stark: LogicStark, pub(crate) memory_stark: MemoryStark, + pub(crate) poseidon_stark: PoseidonStark, pub(crate) cross_table_lookups: Vec>, } @@ -49,6 +52,7 @@ impl, const D: usize> Default for AllStark { keccak_sponge_stark: KeccakSpongeStark::default(), logic_stark: LogicStark::default(), memory_stark: MemoryStark::default(), + poseidon_stark: PoseidonStark::default(), cross_table_lookups: all_cross_table_lookups(), } } @@ -64,6 +68,7 @@ impl, const D: usize> AllStark { self.keccak_sponge_stark.num_lookup_helper_columns(config), self.logic_stark.num_lookup_helper_columns(config), self.memory_stark.num_lookup_helper_columns(config), + 0, ] } } @@ -80,6 +85,7 @@ pub enum Table { KeccakSponge = 4, Logic = 5, Memory = 6, + Poseidon = 7, } impl Deref for Table { @@ -88,12 +94,12 @@ impl Deref for Table { fn deref(&self) -> &Self::Target { // Hacky way to implement `Deref` for `Table` so that we don't have to // call `Table::Foo as usize`, but perhaps too ugly to be worth it. - [&0, &1, &2, &3, &4, &5, &6][*self as TableIdx] + [&0, &1, &2, &3, &4, &5, &6, &7][*self as TableIdx] } } /// Number of STARK tables. -pub(crate) const NUM_TABLES: usize = Table::Memory as usize + 1; +pub(crate) const NUM_TABLES: usize = Table::Poseidon as usize + 1; impl Table { /// Returns all STARK table indices. @@ -106,6 +112,7 @@ impl Table { Self::KeccakSponge, Self::Logic, Self::Memory, + Self::Poseidon, ] } } @@ -120,6 +127,7 @@ pub(crate) fn all_cross_table_lookups() -> Vec> { ctl_keccak_outputs(), ctl_logic(), ctl_memory(), + ctl_poseidon(), ] } @@ -306,3 +314,10 @@ fn ctl_memory() -> CrossTableLookup { ); CrossTableLookup::new(all_lookers, memory_looked) } + +fn ctl_poseidon() -> CrossTableLookup { + CrossTableLookup::new( + vec![cpu_stark::ctl_poseidon()], + poseidon_stark::ctl_looked(), + ) +} diff --git a/evm_arithmetization/src/cpu/columns/ops.rs b/evm_arithmetization/src/cpu/columns/ops.rs index c15d65722..266354fd8 100644 --- a/evm_arithmetization/src/cpu/columns/ops.rs +++ b/evm_arithmetization/src/cpu/columns/ops.rs @@ -24,6 +24,7 @@ pub(crate) struct OpsColumnsView { pub shift: T, /// Combines JUMPDEST and KECCAK_GENERAL flags. pub jumpdest_keccak_general: T, + pub poseidon: T, /// Combines JUMP and JUMPI flags. pub jumps: T, /// Combines PUSH and PROVER_INPUT flags. diff --git a/evm_arithmetization/src/cpu/contextops.rs b/evm_arithmetization/src/cpu/contextops.rs index 6a7abed89..c3d4640af 100644 --- a/evm_arithmetization/src/cpu/contextops.rs +++ b/evm_arithmetization/src/cpu/contextops.rs @@ -23,6 +23,7 @@ const KEEPS_CONTEXT: OpsColumnsView = OpsColumnsView { not_pop: true, shift: true, jumpdest_keccak_general: true, + poseidon: true, push_prover_input: true, jumps: true, pc_push0: true, diff --git a/evm_arithmetization/src/cpu/control_flow.rs b/evm_arithmetization/src/cpu/control_flow.rs index 832db1961..12899baf7 100644 --- a/evm_arithmetization/src/cpu/control_flow.rs +++ b/evm_arithmetization/src/cpu/control_flow.rs @@ -8,7 +8,7 @@ use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsume use crate::cpu::columns::{CpuColumnsView, COL_MAP}; use crate::cpu::kernel::aggregator::KERNEL; -const NATIVE_INSTRUCTIONS: [usize; 12] = [ +const NATIVE_INSTRUCTIONS: [usize; 13] = [ COL_MAP.op.binary_op, COL_MAP.op.ternary_op, COL_MAP.op.fp254_op, @@ -17,6 +17,7 @@ const NATIVE_INSTRUCTIONS: [usize; 12] = [ COL_MAP.op.not_pop, COL_MAP.op.shift, COL_MAP.op.jumpdest_keccak_general, + COL_MAP.op.poseidon, // Not PROVER_INPUT: it is dealt with manually below. // not JUMPS (possible need to jump) COL_MAP.op.pc_push0, diff --git a/evm_arithmetization/src/cpu/cpu_stark.rs b/evm_arithmetization/src/cpu/cpu_stark.rs index 4e60694b0..9c6c43249 100644 --- a/evm_arithmetization/src/cpu/cpu_stark.rs +++ b/evm_arithmetization/src/cpu/cpu_stark.rs @@ -429,6 +429,28 @@ pub(crate) fn ctl_filter_set_context() -> Filter { ) } +/// Returns the `TableWithColumns` for the CPU rows calling POSEIDON. +pub(crate) fn ctl_poseidon() -> TableWithColumns { + let mut columns = Vec::new(); + for channel in 0..3 { + for i in 0..VALUE_LIMBS / 2 { + columns.push(Column::linear_combination([ + (COL_MAP.mem_channels[channel].value[2 * i], F::ONE), + ( + COL_MAP.mem_channels[channel].value[2 * i + 1], + F::from_canonical_u64(1 << 32), + ), + ])); + } + } + columns.extend(Column::singles_next_row(COL_MAP.mem_channels[0].value)); + TableWithColumns::new( + *Table::Cpu, + columns, + Some(Filter::new_simple(Column::single(COL_MAP.op.poseidon))), + ) +} + /// Disable the specified memory channels. /// Since channel 0 contains the top of the stack and is handled specially, /// channels to disable are 1, 2 or both. All cases can be expressed as a vec. diff --git a/evm_arithmetization/src/cpu/decode.rs b/evm_arithmetization/src/cpu/decode.rs index 081e3862c..8bbb5730b 100644 --- a/evm_arithmetization/src/cpu/decode.rs +++ b/evm_arithmetization/src/cpu/decode.rs @@ -25,13 +25,14 @@ use crate::cpu::columns::{CpuColumnsView, COL_MAP}; /// Note: invalid opcodes are not represented here. _Any_ opcode is permitted to /// decode to `is_invalid`. The kernel then verifies that the opcode was /// _actually_ invalid. -const OPCODES: [(u8, usize, bool, usize); 5] = [ +const OPCODES: [(u8, usize, bool, usize); 6] = [ // (start index of block, number of top bits to check (log2), kernel-only, flag column) // ADD, MUL, SUB, DIV, MOD, LT, GT and BYTE flags are handled partly manually here, and partly // through the Arithmetic table CTL. ADDMOD, MULMOD and SUBMOD flags are handled partly // manually here, and partly through the Arithmetic table CTL. FP254 operation flags are // handled partly manually here, and partly through the Arithmetic table CTL. (0x14, 1, false, COL_MAP.op.eq_iszero), + (0x22, 0, true, COL_MAP.op.poseidon), // AND, OR and XOR flags are handled partly manually here, and partly through the Logic table // CTL. NOT and POP are handled manually here. // SHL and SHR flags are handled partly manually here, and partly through the Logic table CTL. diff --git a/evm_arithmetization/src/cpu/gas.rs b/evm_arithmetization/src/cpu/gas.rs index 69ebf2c51..d7338655f 100644 --- a/evm_arithmetization/src/cpu/gas.rs +++ b/evm_arithmetization/src/cpu/gas.rs @@ -27,8 +27,9 @@ const SIMPLE_OPCODES: OpsColumnsView> = OpsColumnsView { not_pop: None, // This is handled manually below shift: G_VERYLOW, jumpdest_keccak_general: None, // This is handled manually below. - push_prover_input: None, // This is handled manually below. - jumps: None, // Combined flag handled separately. + poseidon: KERNEL_ONLY_INSTR, + push_prover_input: None, // This is handled manually below. + jumps: None, // Combined flag handled separately. pc_push0: G_BASE, dup_swap: G_VERYLOW, context_op: KERNEL_ONLY_INSTR, diff --git a/evm_arithmetization/src/cpu/kernel/aggregator.rs b/evm_arithmetization/src/cpu/kernel/aggregator.rs index 637655255..0915f31a4 100644 --- a/evm_arithmetization/src/cpu/kernel/aggregator.rs +++ b/evm_arithmetization/src/cpu/kernel/aggregator.rs @@ -126,6 +126,12 @@ pub(crate) fn combined_kernel() -> Kernel { include_str!("asm/mpt/storage/storage_read.asm"), include_str!("asm/mpt/storage/storage_write.asm"), include_str!("asm/mpt/util.asm"), + include_str!("asm/smt/delete.asm"), + include_str!("asm/smt/hash.asm"), + include_str!("asm/smt/insert.asm"), + include_str!("asm/smt/keys.asm"), + include_str!("asm/smt/read.asm"), + include_str!("asm/smt/utils.asm"), include_str!("asm/rlp/decode.asm"), include_str!("asm/rlp/encode.asm"), include_str!("asm/rlp/encode_rlp_scalar.asm"), diff --git a/evm_arithmetization/src/cpu/kernel/asm/account_code.asm b/evm_arithmetization/src/cpu/kernel/asm/account_code.asm index 2654bedc7..0bdabe2e9 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/account_code.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/account_code.asm @@ -24,17 +24,9 @@ extcodehash_dead: global extcodehash: // stack: address, retdest - %mpt_read_state_trie - // stack: account_ptr, retdest - DUP1 ISZERO %jumpi(retzero) - %add_const(3) - // stack: codehash_ptr, retdest - %mload_trie_data + %key_code %smt_read_state %mload_trie_data // stack: codehash, retdest SWAP1 JUMP -retzero: - %stack (account_ptr, retdest) -> (retdest, 0) - JUMP %macro extcodehash %stack (address) -> (address, %%after) @@ -44,7 +36,7 @@ retzero: %macro ext_code_empty %extcodehash - %eq_const(@EMPTY_STRING_HASH) + %eq_const(@EMPTY_STRING_POSEIDON_HASH) %endmacro %macro extcodesize @@ -76,11 +68,9 @@ global sys_extcodesize: global extcodesize: // stack: address, retdest - %next_context_id - // stack: codesize_ctx, address, retdest - SWAP1 - // stack: address, codesize_ctx, retdest - %jump(load_code) + %key_code_length %smt_read_state %mload_trie_data + // stack: codesize, retdest + SWAP1 JUMP // Loads the code at `address` into memory, in the code segment of the given context, starting at offset 0. // Checks that the hash of the loaded code corresponds to the `codehash` in the state trie. @@ -96,14 +86,8 @@ load_code_ctd: DUP1 ISZERO %jumpi(load_code_non_existent_account) // Load the code non-deterministically in memory and return the length. PROVER_INPUT(account_code) - %stack (code_size, codehash, ctx, retdest) -> (ctx, code_size, codehash, retdest, code_size) - // Check that the hash of the loaded code equals `codehash`. - // ctx == DST, as SEGMENT_CODE == offset == 0. - KECCAK_GENERAL - // stack: shouldbecodehash, codehash, retdest, code_size - %assert_eq - // stack: retdest, code_size - JUMP + // stack: padded_code_size, codehash, ctx, retdest + %jump(poseidon_hash_code) load_code_non_existent_account: // Write 0 at address 0 for soundness: SEGMENT_CODE == 0, hence ctx == addr. @@ -134,3 +118,157 @@ load_code_padded_ctd: MSTORE_GENERAL // stack: retdest, code_size JUMP + +// TODO: This could certainly be optimized, or implemented directly in the Poseidon Stark. +global poseidon_hash_code: + // stack: padded_code_size, codehash, ctx, retdest + %stack (padded_code_size, codehash, ctx) -> (0, 0, padded_code_size, ctx, codehash) +poseidon_hash_code_loop: + // stack: i, capacity, padded_code_size, ctx, codehash, retdest + DUP3 DUP2 EQ %jumpi(poseidon_hash_code_after) + %stack (i, capacity, code_size, ctx) -> (i, ctx, i, capacity, code_size, ctx) + ADD MLOAD_GENERAL + %stack (b, i, capacity, code_size, ctx) -> (1, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(8) ADD + %stack (b, i, capacity, code_size, ctx) -> (2, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(16) ADD + %stack (b, i, capacity, code_size, ctx) -> (3, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(24) ADD + %stack (b, i, capacity, code_size, ctx) -> (4, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(32) ADD + %stack (b, i, capacity, code_size, ctx) -> (5, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(40) ADD + %stack (b, i, capacity, code_size, ctx) -> (6, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(48) ADD + + %stack (b, i, capacity, code_size, ctx) -> (7, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(64) ADD + %stack (b, i, capacity, code_size, ctx) -> (8, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(72) ADD + %stack (b, i, capacity, code_size, ctx) -> (9, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(80) ADD + %stack (b, i, capacity, code_size, ctx) -> (10, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(88) ADD + %stack (b, i, capacity, code_size, ctx) -> (11, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(96) ADD + %stack (b, i, capacity, code_size, ctx) -> (12, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(104) ADD + %stack (b, i, capacity, code_size, ctx) -> (13, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(112) ADD + + %stack (b, i, capacity, code_size, ctx) -> (14, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(128) ADD + %stack (b, i, capacity, code_size, ctx) -> (15, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(136) ADD + %stack (b, i, capacity, code_size, ctx) -> (16, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(144) ADD + %stack (b, i, capacity, code_size, ctx) -> (17, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(152) ADD + %stack (b, i, capacity, code_size, ctx) -> (18, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(160) ADD + %stack (b, i, capacity, code_size, ctx) -> (19, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(168) ADD + %stack (b, i, capacity, code_size, ctx) -> (20, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(176) ADD + + %stack (b, i, capacity, code_size, ctx) -> (21, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(192) ADD + %stack (b, i, capacity, code_size, ctx) -> (22, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(200) ADD + %stack (b, i, capacity, code_size, ctx) -> (23, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(208) ADD + %stack (b, i, capacity, code_size, ctx) -> (24, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(216) ADD + %stack (b, i, capacity, code_size, ctx) -> (25, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(224) ADD + %stack (b, i, capacity, code_size, ctx) -> (26, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(232) ADD + %stack (b, i, capacity, code_size, ctx) -> (27, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(240) ADD + %stack (B0, i, capacity, code_size, ctx) -> (i, capacity, code_size, ctx, B0) + + %stack (i, capacity, code_size, ctx) -> (28, i, ctx, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL + %stack (b, i, capacity, code_size, ctx) -> (29, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(8) ADD + %stack (b, i, capacity, code_size, ctx) -> (30, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(16) ADD + %stack (b, i, capacity, code_size, ctx) -> (31, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(24) ADD + %stack (b, i, capacity, code_size, ctx) -> (32, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(32) ADD + %stack (b, i, capacity, code_size, ctx) -> (33, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(40) ADD + %stack (b, i, capacity, code_size, ctx) -> (34, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(48) ADD + + %stack (b, i, capacity, code_size, ctx) -> (35, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(64) ADD + %stack (b, i, capacity, code_size, ctx) -> (36, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(72) ADD + %stack (b, i, capacity, code_size, ctx) -> (37, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(80) ADD + %stack (b, i, capacity, code_size, ctx) -> (38, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(88) ADD + %stack (b, i, capacity, code_size, ctx) -> (39, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(96) ADD + %stack (b, i, capacity, code_size, ctx) -> (40, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(104) ADD + %stack (b, i, capacity, code_size, ctx) -> (41, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(112) ADD + + %stack (b, i, capacity, code_size, ctx) -> (42, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(128) ADD + %stack (b, i, capacity, code_size, ctx) -> (43, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(136) ADD + %stack (b, i, capacity, code_size, ctx) -> (44, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(144) ADD + %stack (b, i, capacity, code_size, ctx) -> (45, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(152) ADD + %stack (b, i, capacity, code_size, ctx) -> (46, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(160) ADD + %stack (b, i, capacity, code_size, ctx) -> (47, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(168) ADD + %stack (b, i, capacity, code_size, ctx) -> (48, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(176) ADD + + %stack (b, i, capacity, code_size, ctx) -> (49, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(192) ADD + %stack (b, i, capacity, code_size, ctx) -> (50, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(200) ADD + %stack (b, i, capacity, code_size, ctx) -> (51, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(208) ADD + %stack (b, i, capacity, code_size, ctx) -> (52, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(216) ADD + %stack (b, i, capacity, code_size, ctx) -> (53, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(224) ADD + %stack (b, i, capacity, code_size, ctx) -> (54, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(232) ADD + %stack (b, i, capacity, code_size, ctx) -> (55, i, ctx, b, i, capacity, code_size, ctx) + ADD ADD MLOAD_GENERAL %shl_const(240) ADD + %stack (B1, i, capacity, code_size, ctx, B0) -> (B0, B1, capacity, i, code_size, ctx) + POSEIDON + %stack (capacity, i, padded_code_size, ctx) -> (i, capacity, padded_code_size, ctx) + // stack: i, capacity, padded_code_size, ctx, codehash, retdest + %add_const(56) + %jump(poseidon_hash_code_loop) + +global poseidon_hash_code_after: + // stack: i, capacity, padded_code_size, ctx, codehash, retdest + %stack (i, capacity, padded_code_size, ctx, codehash) -> (capacity, codehash, padded_code_size, ctx) + %assert_eq + // stack: padded_code_size, ctx, retdest + %decrement +remove_padding_loop: + // stack: offset, ctx, retdest + DUP2 DUP2 ADD DUP1 MLOAD_GENERAL + // stack: code[offset], offset+ctx, offset, ctx, retdest + SWAP1 PUSH 0 MSTORE_GENERAL + // stack: code[offset], offset, ctx, retdest + %and_const(1) %jumpi(remove_padding_after) + // stack: offset, ctx, retdest + %decrement %jump(remove_padding_loop) + +remove_padding_after: + %stack (offset, ctx, retdest) -> (retdest, offset) + JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/balance.asm b/evm_arithmetization/src/cpu/kernel/asm/balance.asm index d39f66063..daf2ff855 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/balance.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/balance.asm @@ -27,12 +27,7 @@ global sys_balance: global balance: // stack: address, retdest - %mpt_read_state_trie - // stack: account_ptr, retdest - DUP1 ISZERO %jumpi(retzero) // If the account pointer is null, return 0. - %add_const(1) - // stack: balance_ptr, retdest - %mload_trie_data + %key_balance %smt_read_state %mload_trie_data // stack: balance, retdest SWAP1 JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/create.asm b/evm_arithmetization/src/cpu/kernel/asm/core/create.asm index 80f8f4618..07b2af6d9 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/create.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/create.asm @@ -83,13 +83,10 @@ global create_common: DUP1 %nonce %eq_const(@MAX_NONCE) %jumpi(nonce_overflow) // EIP-2681 %increment_nonce // stack: address, value, code_offset, code_len, kexit_info - %checkpoint - // stack: address, value, code_offset, code_len, kexit_info DUP2 DUP2 %address %transfer_eth %jumpi(panic) // We checked the balance above, so this should never happen. DUP2 DUP2 %address %journal_add_balance_transfer // Add journal entry for the balance transfer. - %create_context // stack: new_ctx, address, value, code_offset, code_len, kexit_info GET_CONTEXT @@ -172,7 +169,8 @@ after_constructor: %returndatasize PUSH @SEGMENT_RETURNDATA GET_CONTEXT %build_address_no_offset // stack: addr, len - KECCAK_GENERAL + PROVER_INPUT(poseidon_code) // TODO: FIX THIS! + %stack (codehash, addr, len) -> (codehash) // stack: codehash, leftover_gas, success, address, kexit_info %observe_new_contract DUP4 @@ -251,17 +249,16 @@ create_too_deep: global set_codehash: // stack: addr, codehash, retdest DUP1 %insert_touched_addresses - DUP1 %mpt_read_state_trie - // stack: account_ptr, addr, codehash, retdest - %add_const(3) - // stack: codehash_ptr, addr, codehash, retdest - DUP1 %mload_trie_data - // stack: prev_codehash, codehash_ptr, addr, codehash, retdest - DUP3 %journal_add_code_change // Add the code change to the journal. - %stack (codehash_ptr, addr, codehash) -> (codehash_ptr, codehash) - %mstore_trie_data - // stack: retdest - JUMP + DUP1 %key_code %smt_read_state %mload_trie_data + // stack: prev_codehash, addr, codehash, retdest + DUP2 %key_code_length %smt_read_state %mload_trie_data + %stack (prev_code_length, prev_codehash, addr) -> (addr, prev_codehash, prev_code_length, addr) + %journal_add_code_change // Add the code change to the journal. + // stack: addr, codehash, retdest + DUP2 DUP2 %key_code %smt_insert_state + %returndatasize DUP2 %key_code_length %smt_insert_state + // stack: addr, codehash, retdest + %pop2 JUMP // Check and charge gas cost for initcode size. See EIP-3860. // Pre stack: code_size, kexit_info diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm b/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm index b45d45ca5..512dd37a0 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm @@ -4,50 +4,39 @@ %macro create_contract_account // stack: address DUP1 %insert_touched_addresses - DUP1 %mpt_read_state_trie - // stack: existing_account_ptr, address + // stack: address // If the account doesn't exist, there's no need to check its balance or nonce, // so we can skip ahead, setting existing_balance = existing_account_ptr = 0. - DUP1 ISZERO %jumpi(%%add_account) + DUP1 %key_code %smt_read_state ISZERO %jumpi(%%add_account) // Check that the nonce is 0. - // stack: existing_account_ptr, address - DUP1 %mload_trie_data // nonce = account[0] - // stack: nonce, existing_account_ptr, address + // stack: address + DUP1 %nonce + // stack: nonce, address %jumpi(%%error_collision) - // stack: existing_account_ptr, address + // stack: address // Check that the code is empty. - %add_const(3) - // stack: existing_codehash_ptr, address - DUP1 %mload_trie_data // codehash = account[3] - %eq_const(@EMPTY_STRING_HASH) ISZERO %jumpi(%%error_collision) - // stack: existing_codehash_ptr, address - %sub_const(2) %mload_trie_data // balance = account[1] + DUP1 %extcodehash + %eq_const(@EMPTY_STRING_POSEIDON_HASH) ISZERO %jumpi(%%error_collision) + DUP1 %balance %jump(%%do_insert) %%add_account: - // stack: existing_balance, address - DUP2 %journal_add_account_created + // stack: address + DUP1 %journal_add_account_created + PUSH 0 %%do_insert: // stack: new_acct_value, address // Write the new account's data to MPT data, and get a pointer to it. - %get_trie_data_size - // stack: account_ptr, new_acct_value, address - PUSH 0 DUP4 %journal_add_nonce_change - PUSH 1 %append_to_trie_data // nonce = 1 - // stack: account_ptr, new_acct_value, address - SWAP1 %append_to_trie_data // balance = new_acct_value - // stack: account_ptr, address - PUSH 0 %append_to_trie_data // storage_root = nil - // stack: account_ptr, address - PUSH @EMPTY_STRING_HASH %append_to_trie_data // code_hash = keccak('') - // stack: account_ptr, address - SWAP1 - // stack: address, account_ptr - %addr_to_state_key - // stack: state_key, account_ptr - %mpt_insert_state_trie - // stack: (empty) + // stack: new_acct_value, address + PUSH 0 DUP3 %journal_add_nonce_change + %stack (new_acct_value, address) -> (address, 1, new_acct_value, address) + %key_nonce %smt_insert_state // nonce = 1 + // stack: new_acct_value, address + DUP2 %key_balance %smt_insert_state // balance = new_acct_value + %stack (address) -> (address, @EMPTY_STRING_POSEIDON_HASH) + %key_code %smt_insert_state + // stack: empty PUSH 0 // success %jump(%%end) @@ -55,7 +44,7 @@ // (This should be impossible with contract creation transactions or CREATE, but possible with CREATE2.) // So we return 1 to indicate an error. %%error_collision: - %stack (existing_account_ptr, address) -> (1) + %stack (address) -> (1) %%end: // stack: status diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/nonce.asm b/evm_arithmetization/src/cpu/kernel/asm/core/nonce.asm index 48486be9e..fe955b927 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/nonce.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/nonce.asm @@ -3,12 +3,8 @@ // Post stack: (empty) global nonce: // stack: address, retdest - %mpt_read_state_trie - // stack: account_ptr, retdest - // The nonce is the first account field, so we deref the account pointer itself. - // Note: We don't need to handle account_ptr=0, as trie_data[0] = 0, - // so the deref will give 0 (the default nonce) as desired. - %mload_trie_data + %key_nonce + %smt_read_state %mload_trie_data // stack: nonce, retdest SWAP1 JUMP @@ -23,9 +19,9 @@ global nonce: global increment_nonce: // stack: address, retdest DUP1 - %mpt_read_state_trie - // stack: account_ptr, address, retdest - DUP1 ISZERO %jumpi(increment_nonce_no_such_account) + %key_nonce %smt_read_state + // stack: nonce_ptr, address, retdest + DUP1 ISZERO %jumpi(create_nonce) // stack: nonce_ptr, address, retdest DUP1 %mload_trie_data // stack: nonce, nonce_ptr, address, retdest @@ -38,8 +34,16 @@ global increment_nonce: // stack: address, retdest POP JUMP -global increment_nonce_no_such_account: - PANIC + +create_nonce: + // stack: nonce_ptr, address, retdest + POP + // stack: address, retdest + PUSH 0 DUP2 %journal_add_nonce_change + // stack: address, retdest + %key_nonce + %stack (key_nonce) -> (key_nonce, 1) + %jump(smt_insert_state) // Convenience macro to call increment_nonce and return where we left off. %macro increment_nonce diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm b/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm index c70287a6f..1c77b01e0 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm @@ -205,7 +205,8 @@ global process_contract_creation_txn_after_constructor: GET_CONTEXT %build_address_no_offset // stack: addr, len - KECCAK_GENERAL + PROVER_INPUT(poseidon_code) // TODO: FIX THIS! + %stack (codehash, addr, len) -> (codehash) // stack: codehash, leftover_gas, new_ctx, address, retdest, success %observe_new_contract DUP4 diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm b/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm index 8572f34f2..5528eb7e4 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm @@ -91,14 +91,10 @@ global sys_selfdestruct: // Set the balance of the address to 0. // stack: balance, address, recipient, kexit_info - PUSH 0 - // stack: 0, balance, address, recipient, kexit_info - DUP3 %mpt_read_state_trie - // stack: account_ptr, 0, balance, address, recipient, kexit_info - %add_const(1) - // stack: balance_ptr, 0, balance, address, recipient, kexit_info - %mstore_trie_data - + DUP1 ISZERO %jumpi(selfdestruct_balance_is_zero) + DUP2 %key_balance %smt_delete_state + // stack: balance, address, recipient, kexit_info +selfdestruct_balance_is_zero: %stack (balance, address, recipient, kexit_info) -> (address, recipient, address, recipient, balance, kexit_info) diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm b/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm index 0517cf3a8..148d37d1a 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm @@ -29,28 +29,35 @@ global transfer_eth_failure: global deduct_eth: // stack: addr, amount, retdest DUP1 %insert_touched_addresses - %mpt_read_state_trie - // stack: account_ptr, amount, retdest - DUP1 ISZERO %jumpi(deduct_eth_no_such_account) // If the account pointer is null, return 1. - %add_const(1) - // stack: balance_ptr, amount, retdest + DUP2 ISZERO %jumpi(deduct_eth_noop) + DUP1 %key_balance %smt_read_state + // stack: balance_ptr, addr, amount, retdest DUP1 %mload_trie_data - // stack: balance, balance_ptr, amount, retdest - DUP1 DUP4 GT - // stack: amount > balance, balance, balance_ptr, amount, retdest + // stack: balance, balance_ptr, addr, amount, retdest + DUP1 DUP5 GT + // stack: amount > balance, balance, balance_ptr, addr, amount, retdest %jumpi(deduct_eth_insufficient_balance) - %stack (balance, balance_ptr, amount, retdest) -> (balance, amount, balance_ptr, retdest, 0) + // stack: balance, balance_ptr, addr, amount, retdest + DUP1 DUP5 EQ + // stack: amount == balance, balance, balance_ptr, addr, amount, retdest + %jumpi(deduct_eth_delete_balance) + %stack (balance, balance_ptr, addr, amount, retdest) -> (balance, amount, balance_ptr, retdest, 0) SUB SWAP1 // stack: balance_ptr, balance - amount, retdest, 0 %mstore_trie_data // stack: retdest, 0 JUMP -global deduct_eth_no_such_account: - %stack (account_ptr, amount, retdest) -> (retdest, 1) +deduct_eth_insufficient_balance: + %stack (balance, balance_ptr, addr, amount, retdest) -> (retdest, 1) + JUMP +deduct_eth_delete_balance: + %stack (balance, balance_ptr, addr, amount, retdest) -> (addr, retdest, 0) + %key_balance %smt_delete_state + // stack: retdest, 0 JUMP -global deduct_eth_insufficient_balance: - %stack (balance, balance_ptr, amount, retdest) -> (retdest, 1) +deduct_eth_noop: + %stack (addr, amount, retdest) -> (retdest, 0) JUMP // Convenience macro to call deduct_eth and return where we left off. @@ -65,42 +72,42 @@ global deduct_eth_insufficient_balance: global add_eth: // stack: addr, amount, retdest DUP1 %insert_touched_addresses - DUP1 %mpt_read_state_trie - // stack: account_ptr, addr, amount, retdest - DUP1 ISZERO %jumpi(add_eth_new_account) // If the account pointer is null, we need to create the account. - %add_const(1) - // stack: balance_ptr, addr, amount, retdest - DUP1 %mload_trie_data - // stack: balance, balance_ptr, addr, amount, retdest - %stack (balance, balance_ptr, addr, amount) -> (amount, balance, balance_ptr) - ADD - // stack: new_balance, balance_ptr, retdest - SWAP1 - // stack: balance_ptr, new_balance, retdest - %mstore_trie_data + DUP2 ISZERO %jumpi(add_eth_noop) + // stack: addr, amount, retdest + DUP1 %key_code %smt_read_state %mload_trie_data + // stack: codehash, addr, amount, retdest + ISZERO %jumpi(add_eth_new_account) // If the account is empty, we need to create the account. + // stack: addr, amount, retdest + %key_balance DUP1 %smt_read_state + DUP1 ISZERO %jumpi(add_eth_zero_balance) + %stack (balance_ptr, key_balance, amount) -> (balance_ptr, amount, balance_ptr) + // stack: balance_ptr, amount, balance_ptr, retdest + %mload_trie_data ADD + // stack: balance+amount, balance_ptr, retdest + SWAP1 %mstore_trie_data + JUMP +add_eth_zero_balance: + // stack: balance_ptr, key_balance, amount, retdest + POP + // stack: key_balance, amount, retdest + %smt_insert_state // stack: retdest JUMP + global add_eth_new_account: - // stack: null_account_ptr, addr, amount, retdest - POP // stack: addr, amount, retdest - DUP2 ISZERO %jumpi(add_eth_new_account_zero) DUP1 %journal_add_account_created - %get_trie_data_size // pointer to new account we're about to create - // stack: new_account_ptr, addr, amount, retdest - SWAP2 - // stack: amount, addr, new_account_ptr, retdest - PUSH 0 %append_to_trie_data // nonce - %append_to_trie_data // balance - // stack: addr, new_account_ptr, retdest - PUSH 0 %append_to_trie_data // storage root pointer - PUSH @EMPTY_STRING_HASH %append_to_trie_data // code hash - // stack: addr, new_account_ptr, retdest - %addr_to_state_key - // stack: key, new_account_ptr, retdest - %jump(mpt_insert_state_trie) + // stack: addr, amount, retdest + DUP1 %key_code + %stack (key_code) -> (key_code, @EMPTY_STRING_POSEIDON_HASH) + %smt_insert_state + // stack: addr, amount, retdest + %key_balance + // stack: key_balance, amount, retdest + %smt_insert_state + JUMP -add_eth_new_account_zero: +add_eth_noop: // stack: addr, amount, retdest %pop2 JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/util.asm b/evm_arithmetization/src/cpu/kernel/asm/core/util.asm index a77329bd8..58c655d58 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/util.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/util.asm @@ -34,29 +34,26 @@ // Returns 1 if the account is non-existent, 0 otherwise. %macro is_non_existent // stack: addr - %mpt_read_state_trie ISZERO + %key_code %smt_read_state ISZERO %endmacro // Returns 1 if the account is empty, 0 otherwise. %macro is_empty // stack: addr - %mpt_read_state_trie - // stack: account_ptr - DUP1 ISZERO %jumpi(%%false) - // stack: account_ptr - DUP1 %mload_trie_data - // stack: nonce, account_ptr + DUP1 %key_nonce %smt_read_state %mload_trie_data + // stack: nonce, addr ISZERO %not_bit %jumpi(%%false) - %increment DUP1 %mload_trie_data - // stack: balance, balance_ptr + // stack: addr + DUP1 %key_balance %smt_read_state %mload_trie_data + // stack: balance, addr ISZERO %not_bit %jumpi(%%false) - %add_const(2) %mload_trie_data - // stack: code_hash - PUSH @EMPTY_STRING_HASH - EQ + // stack: addr + %key_code %smt_read_state %mload_trie_data + // stack: codehash + %eq_const(@EMPTY_STRING_POSEIDON_HASH) %jump(%%after) %%false: - // stack: account_ptr + // stack: addr POP PUSH 0 %%after: diff --git a/evm_arithmetization/src/cpu/kernel/asm/journal/account_destroyed.asm b/evm_arithmetization/src/cpu/kernel/asm/journal/account_destroyed.asm index 3806a891d..5986161a5 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/journal/account_destroyed.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/journal/account_destroyed.asm @@ -16,17 +16,13 @@ revert_account_destroyed_contd: SWAP1 // Remove `prev_balance` from `target`'s balance. // stack: target, address, prev_balance, retdest - %mpt_read_state_trie - %add_const(1) - // stack: target_balance_ptr, address, prev_balance, retdest - DUP3 - DUP2 %mload_trie_data - // stack: target_balance, prev_balance, target_balance_ptr, address, prev_balance, retdest - SUB SWAP1 %mstore_trie_data + %key_balance DUP1 %smt_read_state %mload_trie_data + // stack: target_balance, target_balance_key, address, prev_balance, retdest + %stack (target_balance, target_balance_key, address, prev_balance) -> (target_balance, prev_balance, target_balance_key, address, prev_balance) + // stack: target_balance, prev_balance, target_balance_key, address, prev_balance, retdest + SUB SWAP1 %smt_insert_state // Set `address`'s balance to `prev_balance`. // stack: address, prev_balance, retdest - %mpt_read_state_trie - %add_const(1) - %mstore_trie_data + %key_balance %smt_insert_state + // stack: retdest JUMP - diff --git a/evm_arithmetization/src/cpu/kernel/asm/journal/code_change.asm b/evm_arithmetization/src/cpu/kernel/asm/journal/code_change.asm index 5bb637c72..61e5d5718 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/journal/code_change.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/journal/code_change.asm @@ -1,18 +1,15 @@ -// struct CodeChange { address, prev_codehash } +// struct CodeChange { address, prev_codehash, prev_code_length } %macro journal_add_code_change - %journal_add_2(@JOURNAL_ENTRY_CODE_CHANGE) + %journal_add_3(@JOURNAL_ENTRY_CODE_CHANGE) %endmacro global revert_code_change: // stack: entry_ptr, ptr, retdest POP - %journal_load_2 - // stack: address, prev_codehash, retdest - %mpt_read_state_trie - // stack: account_ptr, prev_codehash, retdest - %add_const(3) - // stack: codehash_ptr, prev_codehash, retdest - %mstore_trie_data + %journal_load_3 + %stack (address, prev_codehash, prev_code_length) -> (address, prev_codehash, address, prev_code_length) + %key_code %smt_insert_state + %key_code_length %smt_insert_state // stack: retdest JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/journal/nonce_change.asm b/evm_arithmetization/src/cpu/kernel/asm/journal/nonce_change.asm index 3ab8f1367..99d6c6554 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/journal/nonce_change.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/journal/nonce_change.asm @@ -9,9 +9,6 @@ global revert_nonce_change: POP %journal_load_2 // stack: address, prev_nonce, retdest - %mpt_read_state_trie - // stack: nonce_ptr, prev_nonce retdest - %mstore_trie_data + %key_nonce %smt_insert_state // stack: retdest JUMP - diff --git a/evm_arithmetization/src/cpu/kernel/asm/journal/storage_change.asm b/evm_arithmetization/src/cpu/kernel/asm/journal/storage_change.asm index 752674d1e..5ff87cd1d 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/journal/storage_change.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/journal/storage_change.asm @@ -11,47 +11,12 @@ global revert_storage_change: // stack: address, slot, prev_value, retdest DUP3 ISZERO %jumpi(delete) // stack: address, slot, prev_value, retdest - SWAP1 %slot_to_storage_key - // stack: storage_key, address, prev_value, retdest - PUSH 64 // storage_key has 64 nibbles - // stack: 64, storage_key, address, prev_value, retdest - DUP3 %mpt_read_state_trie - DUP1 ISZERO %jumpi(panic) - // stack: account_ptr, 64, storage_key, address, prev_value, retdest - %add_const(2) - // stack: storage_root_ptr_ptr, 64, storage_key, address, prev_value, retdest - %mload_trie_data - %get_trie_data_size - DUP6 %append_to_trie_data - %stack (prev_value_ptr, storage_root_ptr, num_nibbles, storage_key, address, prev_value, retdest) -> - (storage_root_ptr, num_nibbles, storage_key, prev_value_ptr, new_storage_root, address, retdest) - %jump(mpt_insert) + %key_storage %smt_insert_state + // stack: retdest + JUMP delete: // stack: address, slot, prev_value, retdest - SWAP2 POP - %stack (slot, address, retdest) -> (slot, new_storage_root, address, retdest) - %slot_to_storage_key - // stack: storage_key, new_storage_root, address, retdest - PUSH 64 // storage_key has 64 nibbles - // stack: 64, storage_key, new_storage_root, address, retdest - DUP4 %mpt_read_state_trie - DUP1 ISZERO %jumpi(panic) - // stack: account_ptr, 64, storage_key, new_storage_root, address, retdest - %add_const(2) - // stack: storage_root_ptr_ptr, 64, storage_key, new_storage_root, address, retdest - %mload_trie_data - // stack: storage_root_ptr, 64, storage_key, new_storage_root, address, retdest - %jump(mpt_delete) - -new_storage_root: - // stack: new_storage_root_ptr, address, retdest - DUP2 %mpt_read_state_trie - // stack: account_ptr, new_storage_root_ptr, address, retdest - - // Update account with our new storage root pointer. - %add_const(2) - // stack: account_storage_root_ptr_ptr, new_storage_root_ptr, address, retdest - %mstore_trie_data - // stack: address, retdest + %key_storage %smt_delete_state + // stack: prev_value, retdest POP JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/main.asm b/evm_arithmetization/src/cpu/kernel/asm/main.asm index bcb49ecce..45e573856 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/main.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/main.asm @@ -36,8 +36,8 @@ global hash_initial_tries: // can check the value provided by the prover. // We initialize the segment length with 1 because the segment contains // the null pointer `0` when the tries are empty. - PUSH 1 - %mpt_hash_state_trie %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE) %assert_eq + PUSH 2 + %smt_hash_state %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE) %assert_eq // stack: trie_data_len %mpt_hash_txn_trie %mload_global_metadata(@GLOBAL_METADATA_TXN_TRIE_DIGEST_BEFORE) %assert_eq // stack: trie_data_len @@ -93,7 +93,7 @@ global perform_final_checks: %pop3 PUSH 1 // initial trie data length global check_state_trie: - %mpt_hash_state_trie %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER) %assert_eq + %smt_hash_state %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER) %assert_eq global check_txn_trie: %mpt_hash_txn_trie %mload_global_metadata(@GLOBAL_METADATA_TXN_TRIE_DIGEST_AFTER) %assert_eq global check_receipt_trie: diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete.asm index 913ba1fcf..5df1e283b 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete.asm @@ -22,24 +22,3 @@ mpt_delete_leaf: %pop4 PUSH 0 // empty node ptr SWAP1 JUMP - -global delete_account: - %stack (address, retdest) -> (address, delete_account_save, retdest) - %addr_to_state_key - // stack: key, delete_account_save, retdest - PUSH 64 - // stack: 64, key, delete_account_save, retdest - %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) - // stack: state_root_prt, 64, key, delete_account_save, retdest - %jump(mpt_delete) -delete_account_save: - // stack: updated_state_root_ptr, retdest - %mstore_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) - JUMP - -%macro delete_account - %stack (address) -> (address, %%after) - %jump(delete_account) -%%after: - // stack: (empty) -%endmacro \ No newline at end of file diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_read.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_read.asm index db9fe4222..9025842e9 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_read.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_read.asm @@ -5,35 +5,18 @@ %endmacro global sload_current: - %stack (slot) -> (slot, after_storage_read) - %slot_to_storage_key - // stack: storage_key, after_storage_read - PUSH 64 // storage_key has 64 nibbles - %current_storage_trie - // stack: storage_root_ptr, 64, storage_key, after_storage_read - %jump(mpt_read) - -global after_storage_read: - // stack: value_ptr, retdest - DUP1 %jumpi(storage_key_exists) - - // Storage key not found. Return default value_ptr = 0, - // which derefs to 0 since @SEGMENT_TRIE_DATA[0] = 0. - %stack (value_ptr, retdest) -> (retdest, 0) - JUMP - -global storage_key_exists: - // stack: value_ptr, retdest + // stack: slot, retdest + %address + // stack: addr, slot, retdest + %key_storage %smt_read_state %mload_trie_data // stack: value, retdest - SWAP1 - JUMP + SWAP1 JUMP // Read a word from the current account's storage trie. // // Pre stack: kexit_info, slot // Post stack: value - global sys_sload: // stack: kexit_info, slot SWAP1 diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm index 22c5d29de..a46375894 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm @@ -111,32 +111,12 @@ sstore_after_refund: // stack: slot, value, kexit_info DUP2 ISZERO %jumpi(sstore_delete) - // First we write the value to MPT data, and get a pointer to it. - %get_trie_data_size - // stack: value_ptr, slot, value, kexit_info - SWAP2 - // stack: value, slot, value_ptr, kexit_info - %append_to_trie_data - // stack: slot, value_ptr, kexit_info - - // Next, call mpt_insert on the current account's storage root. - %stack (slot, value_ptr) -> (slot, value_ptr, after_storage_insert) - %slot_to_storage_key - // stack: storage_key, value_ptr, after_storage_insert, kexit_info - PUSH 64 // storage_key has 64 nibbles - %current_storage_trie - // stack: storage_root_ptr, 64, storage_key, value_ptr, after_storage_insert, kexit_info - %jump(mpt_insert) - -after_storage_insert: - // stack: new_storage_root_ptr, kexit_info - %current_account_data - // stack: account_ptr, new_storage_root_ptr, kexit_info - - // Update the copied account with our new storage root pointer. - %add_const(2) - // stack: account_storage_root_ptr_ptr, new_storage_root_ptr, kexit_info - %mstore_trie_data + // stack: slot, value, kexit_info + %address + // stack: addr, slot, value, kexit_info + %key_storage + // stack: storage_key, value, kexit_info + %smt_insert_state // stack: kexit_info EXIT_KERNEL @@ -148,12 +128,10 @@ sstore_noop: // Delete the slot from the storage trie. sstore_delete: // stack: slot, value, kexit_info - SWAP1 POP - PUSH after_storage_insert SWAP1 - // stack: slot, after_storage_insert, kexit_info - %slot_to_storage_key - // stack: storage_key, after_storage_insert, kexit_info - PUSH 64 // storage_key has 64 nibbles - %current_storage_trie - // stack: storage_root_ptr, 64, storage_key, after_storage_insert, kexit_info - %jump(mpt_delete) + %address + // stack: address, slot, value, kexit_info + %key_storage + // stack: key_storage, value, kexit_info + %smt_delete_state + // stack: value, kexit_info + POP EXIT_KERNEL diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm new file mode 100644 index 000000000..3f131704a --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm @@ -0,0 +1,255 @@ +%macro smt_delete_state + %stack (key) -> (key, %%after) + %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) // node_ptr + // stack: node_ptr, key, retdest + %jump(smt_delete) +%%after: + // stack: new_node_ptr + %mstore_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) + // stack: (emtpy) +%endmacro + +// Return a copy of the given node with the given key deleted. +// Assumes that the key is in the SMT. +// +// Pre stack: node_ptr, key, retdest +// Post stack: updated_node_ptr +global smt_delete: + // stack: node_ptr, key, retdest + SWAP1 %split_key + %stack (k0, k1, k2, k3, node_ptr) -> (node_ptr, 0, k0, k1, k2, k3) +smt_delete_with_keys: + // stack: node_ptr, level, ks, retdest + DUP1 %mload_trie_data + // stack: node_type, node_ptr, level, ks, retdest + // Increment node_ptr, so it points to the node payload instead of its type. + SWAP1 %increment SWAP1 + // stack: node_type, node_payload_ptr, level, ks, retdest + + DUP1 %eq_const(@SMT_NODE_INTERNAL) %jumpi(smt_delete_internal) + %eq_const(@SMT_NODE_LEAF) %jumpi(smt_delete_leaf) + PANIC // Should never happen. + +smt_delete_leaf: + // stack: node_payload_ptr, level, ks, retdest + %pop6 + PUSH 0 // empty node ptr + SWAP1 JUMP + +smt_delete_internal: + // stack: node_type, node_payload_ptr, level, ks, retdest + POP + // stack: node_payload_ptr, level, ks, retdest + DUP2 %and_const(3) // level mod 4 + // stack: level%4, node_payload_ptr, level, ks, retdest + DUP1 %eq_const(0) %jumpi(smt_delete_internal_0) + DUP1 %eq_const(1) %jumpi(smt_delete_internal_1) + DUP1 %eq_const(2) %jumpi(smt_delete_internal_2) + %eq_const(3) %jumpi(smt_delete_internal_3) + PANIC +smt_delete_internal_0: + // stack: level%4, node_payload_ptr, level, ks, retdest + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k0, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk0, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, newk0, k1, k2, k3 ) + %jump(smt_delete_internal_contd) +smt_delete_internal_1: + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k1, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk1, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, newk1, k2, k3 ) + %jump(smt_delete_internal_contd) +smt_delete_internal_2: + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k2, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk2, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, k1, newk2, k3 ) + %jump(smt_delete_internal_contd) +smt_delete_internal_3: + %stack (node_payload_ptr, level, k0, k1, k2, k3 ) -> (k3, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk3, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, k1, k2, newk3 ) +smt_delete_internal_contd: + //stack: bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + PUSH internal_update + //stack: internal_update, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %rep 7 + DUP8 + %endrep + //stack: bit, node_payload_ptr, level, k0, k1, k2, k3, internal_update, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + ADD + //stack: child_ptr_ptr, level, k0, k1, k2, k3, internal_update, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %mload_trie_data + //stack: child_ptr, level, k0, k1, k2, k3, internal_update, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + SWAP1 %increment SWAP1 + //stack: child_ptr, level+1, k0, k1, k2, k3, internal_update, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %jump(smt_delete_with_keys) + +// Update the internal node, possibly deleting it, or returning a leaf node. +internal_update: + // Update the child first. + //stack: deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP2 PUSH 1 SUB + //stack: 1-bit, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP4 ADD + //stack: sibling_ptr_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %mload_trie_data DUP1 %mload_trie_data + //stack: sibling_node_type, sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP1 %eq_const(@SMT_NODE_HASH) %jumpi(sibling_is_hash) + DUP1 %eq_const(@SMT_NODE_LEAF) %jumpi(sibling_is_leaf) + %eq_const(@SMT_NODE_INTERNAL) %jumpi(sibling_is_internal) + PANIC // Should never happen. +sibling_is_internal: + //stack: sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP +insert_child: + //stack: deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %stack (deleted_child_ptr, bit, node_payload_ptr) -> (node_payload_ptr, bit, deleted_child_ptr, node_payload_ptr) + ADD %mstore_trie_data + // stack: node_payload_ptr, level, ks, retdest + %decrement + %stack (node_ptr, level, k0, k1, k2, k3, retdest) -> (retdest, node_ptr) + JUMP + +sibling_is_hash: + // stack: sibling_node_type, sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP + //stack: sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %increment %mload_trie_data + // stack: hash, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %jumpi(insert_child) // Sibling is non-empty hash node. +sibling_is_empty: + // stack: deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP1 %mload_trie_data + // stack: deleted_child_node_type, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP1 %eq_const(@SMT_NODE_HASH) %jumpi(sibling_is_empty_child_is_hash) + DUP1 %eq_const(@SMT_NODE_LEAF) %jumpi(sibling_is_empty_child_is_leaf) +sibling_is_empty_child_is_internal: + // stack: deleted_child_node_type, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP + // stack: deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %jump(insert_child) + +sibling_is_empty_child_is_hash: + // stack: deleted_child_node_type, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP + // stack: deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP1 %increment %mload_trie_data + // stack: hash, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %jumpi(insert_child) +sibling_is_empty_child_is_empty: + // We can just delete this node. + // stack: deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %pop8 + SWAP1 PUSH 0 + // stack: retdest, 0 + JUMP + +sibling_is_empty_child_is_leaf: + // stack: deleted_child_node_type, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP + // stack: deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %increment + // stack: deleted_child_key_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP4 + // stack: level, deleted_child_key_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP3 + // stack: bit, level, deleted_child_key_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP3 %mload_trie_data + // stack: child_key, bit, level, deleted_child_key_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %recombine_key + // stack: new_child_key, deleted_child_key_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP2 %mstore_trie_data + // stack: deleted_child_key_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %decrement + // stack: deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + SWAP7 + // stack: k3, bit, node_payload_ptr, level, k0, k1, k2, deleted_child_ptr, retdest + %pop7 + // stack: deleted_child_ptr, retdest + SWAP1 JUMP + +sibling_is_leaf: + // stack: sibling_node_type, sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP + // stack: sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + DUP2 %is_non_empty_node + // stack: child_is_non_empty, sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %jumpi(sibling_is_leaf_child_is_non_empty) +sibling_is_leaf_child_is_empty: + // stack: sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + %increment + // stack: sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP5 + // stack: level, sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP4 + // stack: bit, level, sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + PUSH 1 SUB + // stack: obit, level, sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP3 %mload_trie_data + // stack: sibling_key, obit, level, sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %recombine_key + // stack: new_key, sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + DUP2 %mstore_trie_data + // stack: sibling_key_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + %decrement + // stack: sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + SWAP8 + // stack: k3, deleted_child_ptr, bit, node_payload_ptr, level, k0, k1, k2, sibling_ptr, retdest + %pop8 + // stack: sibling_ptr, retdest + SWAP1 JUMP + +sibling_is_leaf_child_is_non_empty: + // stack: sibling_ptr, deleted_child_ptr, bit, node_payload_ptr, level, ks, retdest + POP + // stack: deleted_child_ptr, node_payload_ptr, bit, retdest + %jump(insert_child) + + +global delete_account: + %stack (address, retdest) -> (address, retdest) + DUP1 %key_nonce + // stack: key_nonce, address, retdest + DUP1 %smt_read_state ISZERO %jumpi(zero_nonce) + // stack: key_nonce, address, retdest + DUP1 %smt_delete_state + // stack: key_nonce, address, retdest +zero_nonce: + // stack: key_nonce, address, retdest + POP + // stack: address, retdest + DUP1 %key_balance + // stack: key_balance, address, retdest + DUP1 %smt_read_state ISZERO %jumpi(zero_balance) + // stack: key_balance, address, retdest + DUP1 %smt_delete_state + // stack: key_balance, address, retdest +zero_balance: + // stack: key_balance, address, retdest + POP + // stack: address, retdest + DUP1 %key_code + // stack: key_code, address, retdest + DUP1 %smt_read_state ISZERO %jumpi(zero_code) + // stack: key_code, address, retdest + DUP1 %smt_delete_state + // stack: key_code, address, retdest +zero_code: + // stack: key_code, address, retdest + POP + // stack: address, retdest + DUP1 %key_code_length + // stack: key_code_length, address, retdest + DUP1 %smt_read_state ISZERO %jumpi(zero_code_length) + // stack: key_code_length, address, retdest + DUP1 %smt_delete_state +zero_code_length: + // N.B.: We don't delete the storage, since there's no way of knowing keys used. + // stack: key_code_length, address, retdest + %pop2 JUMP + +%macro delete_account + %stack (address) -> (address, %%after) + %jump(delete_account) +%%after: + // stack: (empty) +%endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/hash.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/hash.asm new file mode 100644 index 000000000..08dbfd1a1 --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/hash.asm @@ -0,0 +1,85 @@ +%macro smt_hash_state + %stack (cur_len) -> (cur_len, %%after) + %jump(smt_hash_state) +%%after: +%endmacro + +// Root hash of the state SMT. +global smt_hash_state: + // stack: cur_len, retdest + %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) + +// Root hash of SMT stored at `trie_data[ptr]`. +// Pseudocode: +// ``` +// hash( HashNode { h } ) = h +// hash( InternalNode { left, right } ) = Poseidon(hash(left) || hash(right) || [0,0,0,0]) +// hash( Leaf { rem_key, val_hash } ) = Poseidon(rem_key || val_hash || [1,0,0,0]) +// ``` +global smt_hash: + // stack: ptr, cur_len, retdest + DUP1 + %mload_trie_data + // stack: node, node_ptr, cur_len, retdest + DUP1 %eq_const(@SMT_NODE_HASH) %jumpi(smt_hash_hash) + DUP1 %eq_const(@SMT_NODE_INTERNAL) %jumpi(smt_hash_internal) + %eq_const(@SMT_NODE_LEAF) %jumpi(smt_hash_leaf) +smt_hash_unknown_node_type: + PANIC + +smt_hash_hash: + // stack: node, node_ptr, cur_len, retdest + POP + // stack: node_ptr, cur_len, retdest + SWAP1 %add_const(2) SWAP1 + // stack: node_ptr, cur_len, retdest + %increment + // stack: node_ptr+1, cur_len, retdest + %mload_trie_data + %stack (hash, cur_len, retdest) -> (retdest, hash, cur_len) + JUMP + +smt_hash_internal: + // stack: node, node_ptr, cur_len, retdest + POP + // stack: node_ptr, cur_len, retdest + SWAP1 %add_const(3) SWAP1 + %increment + // stack: node_ptr+1, cur_len, retdest + DUP1 + %mload_trie_data + %stack (left_child_ptr, node_ptr_plus_1, cur_len, retdest) -> (left_child_ptr, cur_len, smt_hash_internal_after_left, node_ptr_plus_1, retdest) + %jump(smt_hash) +smt_hash_internal_after_left: + %stack (left_hash, cur_len, node_ptr_plus_1, retdest) -> (node_ptr_plus_1, left_hash, cur_len, retdest) + %increment + // stack: node_ptr+2, left_hash, cur_len, retdest + %mload_trie_data + %stack (right_child_ptr, left_hash, cur_len, retdest) -> (right_child_ptr, cur_len, smt_hash_internal_after_right, left_hash, retdest) + %jump(smt_hash) +smt_hash_internal_after_right: + %stack (right_hash, cur_len, left_hash) -> (left_hash, right_hash, 0, cur_len) + POSEIDON + %stack (hash, cur_len, retdest) -> (retdest, hash, cur_len) + JUMP + +smt_hash_leaf: + // stack: node_ptr, cur_len, retdest + SWAP1 %add_const(3) SWAP1 + // stack: node_ptr, cur_len, retdest + %increment + // stack: node_ptr+1, cur_len, retdest + DUP1 %increment + // stack: node_ptr+2, node_ptr+1, cur_len, retdest + %mload_trie_data + // stack: value, node_ptr+1, cur_len, retdest + SWAP1 + // stack: node_ptr+1, value, cur_len, retdest + %mload_trie_data + %stack (rem_key, value) -> (value, smt_hash_leaf_contd, rem_key) + %jump(hash_limbs) +smt_hash_leaf_contd: + %stack (value_hash, rem_key) -> (rem_key, value_hash, 1) + POSEIDON + %stack (hash, cur_len, retdest) -> (retdest, hash, cur_len) + JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/insert.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/insert.asm new file mode 100644 index 000000000..20589b083 --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/insert.asm @@ -0,0 +1,175 @@ +// Insert a key-value pair in the state SMT. +global smt_insert_state: + // stack: key, value, retdest + DUP2 ISZERO %jumpi(insert_zero) + // stack: key, value, retdest + %stack (key, value) -> (key, value, smt_insert_state_after) + %split_key + // stack: k0, k1, k2, k3, value, smt_insert_state_after, retdest + PUSH 0 + // stack: level, k0, k1, k2, k3, value, smt_insert_state_after, retdest + %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) // node_ptr + // stack: node_ptr, level, k0, k1, k2, k3, value, smt_insert_state_after, retdest + %jump(smt_insert) + +smt_insert_state_after: + // stack: root_ptr, retdest + %mstore_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) + // stack: retdest + JUMP + +%macro smt_insert_state + %stack (key, value_ptr) -> (key, value_ptr, %%after) + %jump(smt_insert_state) +%%after: +%endmacro + +// Insert a key-value pair in the SMT at `trie_data[node_ptr]`. +// Pseudocode: +// ``` +// insert( HashNode { h }, key, value ) = if h == 0 then Leaf { key, value } else PANIC +// insert( InternalNode { left, right }, key, value ) = if key&1 { insert( right, key>>1, value ) } else { insert( left, key>>1, value ) } +// insert( Leaf { key', value' }, key, value ) = { +// let internal = new InternalNode; +// insert(internal, key', value'); +// insert(internal, key, value); +// return internal;} +// ``` +global smt_insert: + // stack: node_ptr, level, ks, value, retdest + DUP1 %mload_trie_data + // stack: node_type, node_ptr, level, ks, value, retdest + // Increment node_ptr, so it points to the node payload instead of its type. + SWAP1 %increment SWAP1 + // stack: node_type, node_payload_ptr, level, ks, value, retdest + + DUP1 %eq_const(@SMT_NODE_HASH) %jumpi(smt_insert_hash) + DUP1 %eq_const(@SMT_NODE_INTERNAL) %jumpi(smt_insert_internal) + DUP1 %eq_const(@SMT_NODE_LEAF) %jumpi(smt_insert_leaf) + PANIC + +smt_insert_hash: + // stack: node_type, node_payload_ptr, level, ks, value, retdest + POP + // stack: node_payload_ptr, level, ks, value, retdest + %mload_trie_data + // stack: hash, level, ks, value, retdest + ISZERO %jumpi(smt_insert_empty) + PANIC // Trying to insert in a non-empty hash node. +smt_insert_empty: + // stack: level, ks, value, retdest + POP + // stack: ks, value, retdest + %combine_key + // stack: key, value, retdest + %get_trie_data_size + // stack: index, key, value, retdest + PUSH @SMT_NODE_LEAF %append_to_trie_data + %stack (index, key, value) -> (key, value, index) + %append_to_trie_data // key + %append_to_trie_data // value + // stack: index, retdest + SWAP1 JUMP + +smt_insert_internal: + // stack: node_type, node_payload_ptr, level, ks, value, retdest + POP + // stack: node_payload_ptr, level, ks, value, retdest + DUP2 %and_const(3) // level mod 4 + // stack: level%4, node_payload_ptr, level, ks, value, retdest + DUP1 %eq_const(0) %jumpi(smt_insert_internal_0) + DUP1 %eq_const(1) %jumpi(smt_insert_internal_1) + DUP1 %eq_const(2) %jumpi(smt_insert_internal_2) + %eq_const(3) %jumpi(smt_insert_internal_3) + PANIC +smt_insert_internal_0: + // stack: level%4, node_payload_ptr, level, ks, value, retdest + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k0, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk0, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, newk0, k1, k2, k3 ) + %jump(smt_insert_internal_contd) +smt_insert_internal_1: + // stack: level%4, node_payload_ptr, level, ks, value, retdest + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k1, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk1, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, newk1, k2, k3 ) + %jump(smt_insert_internal_contd) +smt_insert_internal_2: + // stack: level%4, node_payload_ptr, level, ks, value, retdest + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k2, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk2, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, k1, newk2, k3 ) + %jump(smt_insert_internal_contd) +smt_insert_internal_3: + // stack: level%4, node_payload_ptr, level, ks, value, retdest + %stack (node_payload_ptr, level, k0, k1, k2, k3 ) -> (k3, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk3, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, k1, k2, newk3 ) + %jump(smt_insert_internal_contd) +smt_insert_internal_contd: + // stack: bit, node_payload_ptr, level, ks, value, retdest + DUP2 ADD + // stack: child_ptr_ptr, node_payload_ptr, level, ks, value, retdest + DUP1 %mload_trie_data + // stack: child_ptr, child_ptr_ptr, node_payload_ptr, level, ks, value, retdest + SWAP3 %increment SWAP3 + %stack (child_ptr, child_ptr_ptr, node_payload_ptr, level_plus_1, k0, k1, k2, k3, value, retdest) -> + (child_ptr, level_plus_1, k0, k1, k2, k3, value, smt_insert_internal_after, child_ptr_ptr, node_payload_ptr, retdest) + %jump(smt_insert) + +smt_insert_internal_after: + // stack: new_node_ptr, child_ptr_ptr, node_payload_ptr, retdest + SWAP1 %mstore_trie_data + // stack: node_payload_ptr, retdest + %decrement + SWAP1 JUMP + +smt_insert_leaf: + // stack: node_type, node_payload_ptr, level, ks, value, retdest + POP + %stack (node_payload_ptr, level, k0, k1, k2, k3, value) -> (k0, k1, k2, k3, node_payload_ptr, level, k0, k1, k2, k3, value) + %combine_key + // stack: rem_key, node_payload_ptr, level, ks, value, retdest + DUP2 %mload_trie_data + // stack: existing_key, rem_key, node_payload_ptr, level, ks, value, retdest + DUP2 DUP2 EQ %jumpi(smt_insert_leaf_same_key) + // stack: existing_key, rem_key, node_payload_ptr, level, ks, value, retdest + // We create an internal node with two empty children, and then we insert the two leaves. + %get_trie_data_size + // stack: index, existing_key, rem_key, node_payload_ptr, level, ks, value, retdest + PUSH @SMT_NODE_INTERNAL %append_to_trie_data + PUSH 0 %append_to_trie_data // Empty hash node + PUSH 0 %append_to_trie_data // Empty hash node + // stack: index, existing_key, rem_key, node_payload_ptr, level, ks, value, retdest + SWAP1 %split_key + // stack: existing_k0, existing_k1, existing_k2, existing_k3, index, rem_key, node_payload_ptr, level, ks, value, retdest + DUP7 %increment %mload_trie_data + // stack: existing_value, existing_k0, existing_k1, existing_k2, existing_k3, index, rem_key, node_payload_ptr, level, ks, value, retdest + DUP9 + %stack (level, existing_value, existing_k0, existing_k1, existing_k2, existing_k3, index) -> (index, level, existing_k0, existing_k1, existing_k2, existing_k3, existing_value, after_first_leaf) + %jump(smt_insert) +after_first_leaf: + // stack: internal_ptr, rem_key, node_payload_ptr, level, ks, value, retdest + %stack (internal_ptr, rem_key, node_payload_ptr, level, k0, k1, k2, k3, value) -> (internal_ptr, level, k0, k1, k2, k3, value) + %jump(smt_insert) + +smt_insert_leaf_same_key: + // stack: existing_key, rem_key, node_payload_ptr, level, ks, value, retdest + %pop2 + %stack (node_payload_ptr, level, k0, k1, k2, k3, value) -> (node_payload_ptr, value, node_payload_ptr) + %increment %mstore_trie_data + // stack: node_payload_ptr, retdest + %decrement + // stack: node_ptr, retdest + SWAP1 JUMP + +insert_zero: + // stack: key, value, retdest + DUP1 %smt_read_state %mload_trie_data %jumpi(delete) + // stack: key, value, retdest + %pop2 JUMP +delete: + // stack: key, value, retdest + %smt_delete_state + // stack: value, retdest + POP JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/keys.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/keys.asm new file mode 100644 index 000000000..0d8b09013 --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/keys.asm @@ -0,0 +1,131 @@ +/// See `smt_trie::keys.rs` for documentation. + +// addr = sum_{0<=i<5} a_i << (32i) +%macro key_balance + // stack: addr + PUSH 0x100000000 + // stack: u32max, addr + DUP1 DUP3 MOD + // stack: a_0, u32max, addr + DUP2 DUP4 %shr_const(32) MOD %shl_const(64) ADD + // stack: a_0 + a_1<<64, u32max, addr + DUP2 DUP4 %shr_const(64) MOD %shl_const(128) ADD + // stack: a_0 + a_1<<64 + a_2<<128, u32max, addr + DUP2 DUP4 %shr_const(96) MOD %shl_const(192) ADD + // stack: a_0 + a_1<<64 + a_2<<128 + a_3<<192, u32max, addr + SWAP2 %shr_const(128) + // stack: a_4, u32max, a_0 + a_1<<64 + a_2<<128 + a_3<<192 + %stack (y, u32max, x) -> (x, y, @POSEIDON_HASH_ZEROS) + POSEIDON +%endmacro + +// addr = sum_{0<=i<5} a_i << (32i) +%macro key_nonce + // stack: addr + PUSH 0x100000000 + // stack: u32max, addr + DUP1 DUP3 MOD + // stack: a_0, u32max, addr + DUP2 DUP4 %shr_const(32) MOD %shl_const(64) ADD + // stack: a_0 + a_1<<64, u32max, addr + DUP2 DUP4 %shr_const(64) MOD %shl_const(128) ADD + // stack: a_0 + a_1<<64 + a_2<<128, u32max, addr + DUP2 DUP4 %shr_const(96) MOD %shl_const(192) ADD + // stack: a_0 + a_1<<64 + a_2<<128 + a_3<<192, u32max, addr + SWAP2 %shr_const(128) + // stack: a_4, u32max, a_0 + a_1<<64 + a_2<<128 + a_3<<192 + %add_const(0x100000000000000000000000000000000) // SMT_KEY_NONCE (=1) << 128 + %stack (y, u32max, x) -> (x, y, @POSEIDON_HASH_ZEROS) + POSEIDON +%endmacro + +// addr = sum_{0<=i<5} a_i << (32i) +%macro key_code + // stack: addr + PUSH 0x100000000 + // stack: u32max, addr + DUP1 DUP3 MOD + // stack: a_0, u32max, addr + DUP2 DUP4 %shr_const(32) MOD %shl_const(64) ADD + // stack: a_0 + a_1<<64, u32max, addr + DUP2 DUP4 %shr_const(64) MOD %shl_const(128) ADD + // stack: a_0 + a_1<<64 + a_2<<128, u32max, addr + DUP2 DUP4 %shr_const(96) MOD %shl_const(192) ADD + // stack: a_0 + a_1<<64 + a_2<<128 + a_3<<192, u32max, addr + SWAP2 %shr_const(128) + // stack: a_4, u32max, a_0 + a_1<<64 + a_2<<128 + a_3<<192 + %add_const(0x200000000000000000000000000000000) // SMT_KEY_CODE (=2) << 128 + %stack (y, u32max, x) -> (x, y, @POSEIDON_HASH_ZEROS) + POSEIDON +%endmacro + +// addr = sum_{0<=i<5} a_i << (32i) +%macro key_code_length + // stack: addr + PUSH 0x100000000 + // stack: u32max, addr + DUP1 DUP3 MOD + // stack: a_0, u32max, addr + DUP2 DUP4 %shr_const(32) MOD %shl_const(64) ADD + // stack: a_0 + a_1<<64, u32max, addr + DUP2 DUP4 %shr_const(64) MOD %shl_const(128) ADD + // stack: a_0 + a_1<<64 + a_2<<128, u32max, addr + DUP2 DUP4 %shr_const(96) MOD %shl_const(192) ADD + // stack: a_0 + a_1<<64 + a_2<<128 + a_3<<192, u32max, addr + SWAP2 %shr_const(128) + // stack: a_4, u32max, a_0 + a_1<<64 + a_2<<128 + a_3<<192 + %add_const(0x400000000000000000000000000000000) // SMT_KEY_CODE_LENGTH (=4) << 128 + %stack (y, u32max, x) -> (x, y, @POSEIDON_HASH_ZEROS) + POSEIDON +%endmacro + +// addr = sum_{0<=i<5} a_i << (32i) +%macro key_storage + %stack (addr, slot) -> (slot, %%after, addr) + %jump(hash_limbs) +%%after: + // stack: capacity, addr + SWAP1 + // stack: addr, capacity + PUSH 0x100000000 + // stack: u32max, addr, capacity + DUP1 DUP3 MOD + // stack: a_0, u32max, addr + DUP2 DUP4 %shr_const(32) MOD %shl_const(64) ADD + // stack: a_0 + a_1<<64, u32max, addr + DUP2 DUP4 %shr_const(64) MOD %shl_const(128) ADD + // stack: a_0 + a_1<<64 + a_2<<128, u32max, addr + DUP2 DUP4 %shr_const(96) MOD %shl_const(192) ADD + // stack: a_0 + a_1<<64 + a_2<<128 + a_3<<192, u32max, addr + SWAP2 %shr_const(128) + // stack: a_4, u32max, a_0 + a_1<<64 + a_2<<128 + a_3<<192 + %add_const(0x300000000000000000000000000000000) // SMT_KEY_STORAGE (=3) << 128 + %stack (y, u32max, x, capacity) -> (x, y, capacity) + POSEIDON +%endmacro + +// slot = sum_{0<=i<8} s_i << (32i) +global hash_limbs: + // stack: slot, retdest + PUSH 0x100000000 + // stack: u32max, slot, retdest + DUP1 DUP3 MOD + // stack: s_0, u32max, slot + DUP2 DUP4 %shr_const(32) MOD %shl_const(64) ADD + // stack: s_0 + s_1<<64, u32max, slot + DUP2 DUP4 %shr_const(64) MOD %shl_const(128) ADD + // stack: s_0 + s_1<<64 + s_2<<128, u32max, slot + DUP2 DUP4 %shr_const(96) MOD %shl_const(192) ADD + // stack: s_0 + s_1<<64 + s_2<<128 + s_3<<192, u32max, slot + DUP2 DUP4 %shr_const(128) MOD + // stack: s_4, s_0 + s_1<<64 + s_2<<128 + s_3<<192, u32max, slot + DUP3 DUP5 %shr_const(160) MOD %shl_const(64) ADD + // stack: s_4 + s_5<<64, s_0 + s_1<<64 + s_2<<128 + s_3<<192, u32max, slot + DUP3 DUP5 %shr_const(192) MOD %shl_const(128) ADD + // stack: s_4 + s_5<<64 + s_6<<128, s_0 + s_1<<64 + s_2<<128 + s_3<<192, u32max, slot + DUP3 DUP5 %shr_const(224) MOD %shl_const(192) ADD + // stack: s_4 + s_5<<64 + s_6<<128 + s_7<<192, s_0 + s_1<<64 + s_2<<128 + s_3<<192, u32max, slot + %stack (b, a, u32max, slot) -> (a, b, 0) + POSEIDON + // stack: hash, retdest + SWAP1 JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/read.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/read.asm new file mode 100644 index 000000000..cd4bce33d --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/read.asm @@ -0,0 +1,110 @@ +// Given a key, return a pointer to the associated SMT entry. +// Returns 0 if the key is not in the SMT. +global smt_read_state: + // stack: key, retdest + %split_key + // stack: k0, k1, k2, k3, retdest + PUSH 0 + // stack: level, k0, k1, k2, k3, retdest + %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) // node_ptr + // stack: node_ptr, level, k0, k1, k2, k3, retdest + %jump(smt_read) + +// Convenience macro to call smt_read_state and return where we left off. +%macro smt_read_state + %stack (key) -> (key, %%after) + %jump(smt_read_state) +%%after: +%endmacro + +// Return a pointer to the data at the given key in the SMT at `trie_data[node_ptr]`. +// Pseudocode: +// ``` +// read( HashNode { h }, key ) = if h == 0 then 0 else PANIC +// read( InternalNode { left, right }, key ) = if key&1 { read( right, key>>1 ) } else { read( left, key>>1 ) } +// read( Leaf { rem_key', value }, key ) = if rem_key == rem_key' then &value else 0 +// ``` +global smt_read: + // stack: node_ptr, level, ks, retdest + DUP1 %mload_trie_data + // stack: node_type, node_ptr, level, ks, retdest + // Increment node_ptr, so it points to the node payload instead of its type. + SWAP1 %increment SWAP1 + // stack: node_type, node_payload_ptr, level, ks, retdest + + DUP1 %eq_const(@SMT_NODE_HASH) %jumpi(smt_read_hash) + DUP1 %eq_const(@SMT_NODE_INTERNAL) %jumpi(smt_read_internal) + %eq_const(@SMT_NODE_LEAF) %jumpi(smt_read_leaf) + PANIC + +smt_read_hash: + // stack: node_type, node_payload_ptr, level, ks, retdest + POP + // stack: node_payload_ptr, level, ks, retdest + %mload_trie_data + // stack: hash, level, ks, retdest + ISZERO %jumpi(smt_read_empty) + PANIC // Trying to read a non-empty hash node. Should never happen. + +smt_read_empty: + %stack (level, k0, k1, k2, k3, retdest) -> (retdest, 0) + JUMP + +smt_read_internal: + // stack: node_type, node_payload_ptr, level, ks, retdest + POP + // stack: node_payload_ptr, level, ks, retdest + DUP2 %and_const(3) // level mod 4 + // stack: level%4, node_payload_ptr, level, ks, retdest + DUP1 %eq_const(0) %jumpi(smt_read_internal_0) + DUP1 %eq_const(1) %jumpi(smt_read_internal_1) + DUP1 %eq_const(2) %jumpi(smt_read_internal_2) + DUP1 %eq_const(3) %jumpi(smt_read_internal_3) + PANIC +smt_read_internal_0: + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k0, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk0, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, newk0, k1, k2, k3 ) + %jump(smt_read_internal_contd) +smt_read_internal_1: + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k1, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk1, node_payload_ptr, level , k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, newk1, k2, k3 ) + %jump(smt_read_internal_contd) +smt_read_internal_2: + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k2, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk2, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, k1, newk2, k3 ) + %jump(smt_read_internal_contd) +smt_read_internal_3: + %stack (level_mod_4, node_payload_ptr, level, k0, k1, k2, k3 ) -> (k3, node_payload_ptr, level, k0, k1, k2, k3 ) + %pop_bit + %stack (bit, newk3, node_payload_ptr, level, k0, k1, k2, k3 ) -> (bit, node_payload_ptr, level, k0, k1, k2, newk3 ) +smt_read_internal_contd: + // stack: bit, node_payload_ptr, level, k0, k1, k2, k3, retdest + ADD + // stack: child_ptr_ptr, level, k0, k1, k2, k3, retdest + %mload_trie_data + // stack: child_ptr, level, k0, k1, k2, k3, retdest + SWAP1 %increment SWAP1 + // stack: child_ptr, level+1, k0, k1, k2, k3, retdest + %jump(smt_read) + +smt_read_leaf: + // stack: node_payload_ptr, level, ks, retdest + DUP1 %mload_trie_data + // stack: rem_key, node_payload_ptr, level, ks, retdest + SWAP1 + // stack: node_payload_ptr, rem_key, level, ks, retdest + %increment + %stack (value_ptr, rem_key, level, k0, k1, k2, k3) -> (k0, k1, k2, k3, rem_key, value_ptr) + %combine_key + // stack: this_rem_key, rem_key, value_ptr, retdest + EQ %jumpi(smt_read_existing_leaf) +smt_read_non_existing_leaf: + %stack (value_ptr, retdest) -> (retdest, 0) + JUMP + +smt_read_existing_leaf: + // stack: value_ptr, retdest + SWAP1 JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/utils.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/utils.asm new file mode 100644 index 000000000..4f8c2832e --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/utils.asm @@ -0,0 +1,129 @@ +// Input: x +// Output: (x&1, x>>1) +%macro pop_bit + // stack: key + DUP1 %shr_const(1) + // stack: key>>1, key + SWAP1 %mod_const(2) + // stack: key&1, key>>1 +%endmacro + +// Returns a non-zero value if the node is non-empty. +%macro is_non_empty_node + // stack: node_ptr + DUP1 %mload_trie_data %jumpi(%%end) // If the node is not a hash node, node_ptr is non-zero. + // The node is a hash node + // stack: node_ptr + %increment %mload_trie_data + // stack: hash +%%end: +%endmacro + +// Input: key = k0 + k1.2^64 + k2.2^128 + k3.2^192, with 0<=ki<2^64. +// Output: (k0, k1, k2, k3) +%macro split_key + // stack: key + DUP1 %shr_const(128) %mod_const(0x10000000000000000) + // stack: k2, key + DUP2 %shr_const(64) %mod_const(0x10000000000000000) + // stack: k1, k2, key + DUP3 %shr_const(192) + // stack: k3, k1, k2, key + SWAP3 %mod_const(0x10000000000000000) + // stack: k0, k1, k2, k3 +%endmacro + +// Input: (k0, k1, k2, k3) +// Output: k0 + k1.2^64 + k2.2^128 + k3.2^192 +%macro combine_key + // stack: k0, k1, k2, k3 + SWAP1 %shl_const(64) ADD + // stack: k0 + k1<<64, k2, k3 + SWAP1 %shl_const(128) ADD + // stack: k0 + k1<<64 + k2<<128, k3 + SWAP1 %shl_const(192) ADD + // stack: k0 + k1<<64 + k2<<128 + k3<<192 +%endmacro + + +// Pseudocode: +// ``` +// def recombine_key(key, bit, level): +// k0, k1, k2, k3 = [(key>>(64*i))&(2**64-1) for i in range(4)] +// match level%4: +// 0 => k0 = 2*k0 + bit +// 1 => k1 = 2*k1 + bit +// 2 => k2 = 2*k2 + bit +// 3 => k3 = 2*k3 + bit +// return k0 + (k1<<64) + (k2<<128) + (k3<<192) +// ``` +%macro recombine_key + // stack: key, bit, level + SWAP1 + // stack: bit, key, level + SWAP2 + // stack: level, key, bit + %mod_const(4) + // stack: level%4, key, bit + DUP1 %eq_const(0) %jumpi(%%recombine_key_0) + DUP1 %eq_const(1) %jumpi(%%recombine_key_1) + DUP1 %eq_const(2) %jumpi(%%recombine_key_2) + DUP1 %eq_const(3) %jumpi(%%recombine_key_3) + PANIC +%%recombine_key_0: + // stack: level%4, key, bit + POP + // stack: key, bit + %split_key + // stack: k0, k1, k2, k3, bit + %shl_const(1) + // stack: k0<<1, k1, k2, k3, bit + DUP5 ADD + // stack: k0<<1 + bit, k1, k2, k3, bit + %combine_key + %stack (newkey, bit) -> (newkey) + %jump(%%after) +%%recombine_key_1: + // stack: level%4, key, bit + POP + // stack: key, bit + %split_key + // stack: k0, k1, k2, k3, bit + DUP2 %shl_const(1) + // stack: k1<<1, k0, k1, k2, k3, bit + DUP6 ADD + // stack: k1<<1 + bit, k0, k1, k2, k3, bit + SWAP2 POP + %combine_key + %stack (newkey, bit) -> (newkey) + %jump(%%after) +%%recombine_key_2: + // stack: key, bit + POP + // stack: key, bit + %split_key + // stack: k0, k1, k2, k3, bit + DUP3 %shl_const(1) + // stack: k2<<1, k0, k1, k2, k3, bit + DUP6 ADD + // stack: k2<<1 + bit, k0, k1, k2, k3, bit + SWAP3 POP + %combine_key + %stack (newkey, bit) -> (newkey) + %jump(%%after) +%%recombine_key_3: + // stack: key, bit + POP + // stack: key, bit + %split_key + // stack: k0, k1, k2, k3, bit + DUP4 %shl_const(1) + // stack: k3<<1, k0, k1, k2, k3, bit + DUP6 ADD + // stack: k3<<1 + bit, k0, k1, k2, k3, bit + SWAP4 POP + %combine_key + %stack (newkey, bit) -> (newkey) +%%after: + // stack: newkey +%endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/common_decoding.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/common_decoding.asm index 223e0a62e..3adf73b1a 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/common_decoding.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/common_decoding.asm @@ -221,9 +221,8 @@ decode_and_store_access_list_finish: %endmacro insert_accessed_storage_keys_with_original_value: - %stack (addr, key, retdest) -> (key, addr, after_read, addr, key, retdest) - %jump(sload_with_addr) -after_read: + %stack (addr, key, retdest) -> (addr, key, addr, key, retdest) + %key_storage %smt_read_state %mload_trie_data %stack (value, addr, key, retdest) -> ( addr, key, value, retdest) %insert_accessed_storage_keys // stack: cold_access, value_ptr, value, retdest @@ -233,25 +232,3 @@ after_read: // stack: cold_access, retdest POP JUMP - - -sload_with_addr: - %stack (slot, addr) -> (slot, addr, after_storage_read) - %slot_to_storage_key - // stack: storage_key, addr, after_storage_read - PUSH 64 // storage_key has 64 nibbles - %stack (n64, storage_key, addr, after_storage_read) -> (addr, n64, storage_key, after_storage_read) - %mpt_read_state_trie - // stack: account_ptr, 64, storage_key, after_storage_read - DUP1 ISZERO %jumpi(ret_zero) // TODO: Fix this. This should never happen. - // stack: account_ptr, 64, storage_key, after_storage_read - %add_const(2) - // stack: storage_root_ptr_ptr - %mload_trie_data - // stack: storage_root_ptr, 64, storage_key, after_storage_read - %jump(mpt_read) - -ret_zero: - // stack: account_ptr, 64, storage_key, after_storage_read, retdest - %pop4 - PUSH 0 SWAP1 JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm index 12d105b8a..6eaf019c1 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm @@ -84,6 +84,7 @@ type_0_compute_signed_data: // otherwise, it is // keccak256(rlp([nonce, gas_price, gas_limit, to, value, data])) + %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_NONCE) diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm index f8a7a556e..ecbe37333 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm @@ -38,6 +38,7 @@ global process_type_1_txn: // The signatureYParity, signatureR, signatureS elements of this transaction represent a secp256k1 signature // over keccak256(0x01 || rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, accessList])). type_1_compute_signed_data: + %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_CHAIN_ID) diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm index 41bdfd4ed..e1b6bfcd5 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm @@ -41,6 +41,7 @@ global process_type_2_txn: // The signature_y_parity, signature_r, signature_s elements of this transaction represent a secp256k1 signature over // keccak256(0x02 || rlp([chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, destination, amount, data, access_list])) type_2_compute_signed_data: + %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_CHAIN_ID) diff --git a/evm_arithmetization/src/cpu/kernel/constants/mod.rs b/evm_arithmetization/src/cpu/kernel/constants/mod.rs index ffe3f6666..d373bfaf5 100644 --- a/evm_arithmetization/src/cpu/kernel/constants/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/constants/mod.rs @@ -6,6 +6,7 @@ use hex_literal::hex; use crate::cpu::kernel::constants::context_metadata::ContextMetadata; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; use crate::cpu::kernel::constants::journal_entry::JournalEntry; +use crate::cpu::kernel::constants::smt_type::PartialSmtType; use crate::cpu::kernel::constants::trie_type::PartialTrieType; use crate::cpu::kernel::constants::txn_fields::NormalizedTxnField; use crate::memory::segments::Segment; @@ -14,6 +15,7 @@ pub(crate) mod context_metadata; mod exc_bitfields; pub(crate) mod global_metadata; pub(crate) mod journal_entry; +pub(crate) mod smt_type; pub(crate) mod trie_type; pub(crate) mod txn_fields; @@ -56,6 +58,7 @@ pub(crate) fn evm_constants() -> HashMap { c.insert(MAX_NONCE.0.into(), U256::from(MAX_NONCE.1)); c.insert(CALL_STACK_LIMIT.0.into(), U256::from(CALL_STACK_LIMIT.1)); + c.insert(POSEIDON_HASH_ZEROS.0.into(), POSEIDON_HASH_ZEROS.1); for segment in Segment::all() { c.insert(segment.var_name().into(), (segment as usize).into()); @@ -75,6 +78,9 @@ pub(crate) fn evm_constants() -> HashMap { for trie_type in PartialTrieType::all() { c.insert(trie_type.var_name().into(), (trie_type as u32).into()); } + for trie_type in PartialSmtType::all() { + c.insert(trie_type.var_name().into(), (trie_type as u32).into()); + } for entry in JournalEntry::all() { c.insert(entry.var_name().into(), (entry as u32).into()); } @@ -116,7 +122,7 @@ const MISC_CONSTANTS: [(&str, [u8; 32]); 4] = [ ), ]; -const HASH_CONSTANTS: [(&str, [u8; 32]); 2] = [ +const HASH_CONSTANTS: [(&str, [u8; 32]); 3] = [ // Hash of an empty string: keccak(b'').hex() ( "EMPTY_STRING_HASH", @@ -127,6 +133,10 @@ const HASH_CONSTANTS: [(&str, [u8; 32]); 2] = [ "EMPTY_NODE_HASH", hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"), ), + ( + "EMPTY_STRING_POSEIDON_HASH", + hex!("3baed9289a384f6c1c05d92b56c801c2d2e2a7050d6c16538b814fa186835c79"), + ), ]; const EC_CONSTANTS: [(&str, [u8; 32]); 20] = [ @@ -293,3 +303,13 @@ const CODE_SIZE_LIMIT: [(&str, u64); 3] = [ const MAX_NONCE: (&str, u64) = ("MAX_NONCE", 0xffffffffffffffff); const CALL_STACK_LIMIT: (&str, u64) = ("CALL_STACK_LIMIT", 1024); + +const POSEIDON_HASH_ZEROS: (&str, U256) = ( + "POSEIDON_HASH_ZEROS", + U256([ + 4330397376401421145, + 14124799381142128323, + 8742572140681234676, + 14345658006221440202, + ]), +); diff --git a/evm_arithmetization/src/cpu/kernel/constants/smt_type.rs b/evm_arithmetization/src/cpu/kernel/constants/smt_type.rs new file mode 100644 index 000000000..b134598bc --- /dev/null +++ b/evm_arithmetization/src/cpu/kernel/constants/smt_type.rs @@ -0,0 +1,23 @@ +#[derive(Copy, Clone, Debug)] +pub(crate) enum PartialSmtType { + Hash = 0, + Internal = 1, + Leaf = 2, +} + +impl PartialSmtType { + pub(crate) const COUNT: usize = 3; + + pub(crate) fn all() -> [Self; Self::COUNT] { + [Self::Hash, Self::Internal, Self::Leaf] + } + + /// The variable name that gets passed into kernel assembly code. + pub(crate) fn var_name(&self) -> &'static str { + match self { + Self::Hash => "SMT_NODE_HASH", + Self::Internal => "SMT_NODE_INTERNAL", + Self::Leaf => "SMT_NODE_LEAF", + } + } +} diff --git a/evm_arithmetization/src/cpu/kernel/interpreter.rs b/evm_arithmetization/src/cpu/kernel/interpreter.rs index 593b4a3f8..9c2802b4c 100644 --- a/evm_arithmetization/src/cpu/kernel/interpreter.rs +++ b/evm_arithmetization/src/cpu/kernel/interpreter.rs @@ -7,7 +7,13 @@ use std::collections::{BTreeSet, HashMap}; use anyhow::anyhow; use ethereum_types::{BigEndianHash, U256}; use mpt_trie::partial_trie::PartialTrie; -use plonky2::field::types::Field; +use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::field::types::{Field, PrimeField64}; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::poseidon::Poseidon; +use serde::Serialize; +use smt_trie::smt::{hash_serialize, hash_serialize_u256}; +use smt_trie::utils::hashout2u; use crate::byte_packing::byte_packing_stark::BytePackingOp; use crate::cpu::columns::CpuColumnsView; @@ -66,7 +72,7 @@ struct InterpreterRegistersState { registers: RegistersState, } -pub(crate) fn run_interpreter( +pub(crate) fn run_interpreter( initial_offset: usize, initial_stack: Vec, ) -> anyhow::Result> { @@ -81,7 +87,7 @@ pub(crate) struct InterpreterMemoryInitialization { pub memory: Vec<(usize, Vec)>, } -pub(crate) fn run_interpreter_with_memory( +pub(crate) fn run_interpreter_with_memory( memory_init: InterpreterMemoryInitialization, ) -> anyhow::Result> { let label = KERNEL.global_labels[&memory_init.label]; @@ -100,7 +106,7 @@ pub(crate) fn run_interpreter_with_memory( Ok(interpreter) } -pub(crate) fn run( +pub(crate) fn run( initial_offset: usize, initial_stack: Vec, ) -> anyhow::Result> { @@ -111,7 +117,7 @@ pub(crate) fn run( /// Simulates the CPU execution from `state` until the program counter reaches /// `final_label` in the current context. -pub(crate) fn simulate_cpu_and_get_user_jumps( +pub(crate) fn simulate_cpu_and_get_user_jumps( final_label: &str, state: &GenerationState, ) -> Option>> { @@ -139,7 +145,7 @@ pub(crate) fn simulate_cpu_and_get_user_jumps( } } -impl Interpreter { +impl Interpreter { /// Returns an instance of `Interpreter` given `GenerationInputs`, and /// assuming we are initializing with the `KERNEL` code. pub(crate) fn new_with_generation_inputs( @@ -257,7 +263,7 @@ impl Interpreter { ), ( GlobalMetadata::StateTrieRootDigestBefore, - h2u(tries.state_trie.hash()), + hash_serialize_u256(&tries.state_smt), ), ( GlobalMetadata::TransactionTrieRootDigestBefore, @@ -740,7 +746,7 @@ impl Interpreter { } } -impl State for Interpreter { +impl State for Interpreter { //// Returns a `GenerationStateCheckpoint` to save the current registers and /// reset memory operations to the empty vector. fn checkpoint(&mut self) -> GenerationStateCheckpoint { @@ -881,7 +887,7 @@ impl State for Interpreter { } } -impl Transition for Interpreter { +impl Transition for Interpreter { fn generate_jumpdest_analysis(&mut self, dst: usize) -> bool { if self.is_jumpdest_analysis && !self.generation_state.registers.is_kernel { self.add_jumpdest_offset(dst); @@ -957,6 +963,7 @@ fn get_mnemonic(opcode: u8) -> &'static str { 0x1d => "SAR", 0x20 => "KECCAK256", 0x21 => "KECCAK_GENERAL", + 0x22 => "POSEIDON", 0x30 => "ADDRESS", 0x31 => "BALANCE", 0x32 => "ORIGIN", diff --git a/evm_arithmetization/src/cpu/kernel/opcodes.rs b/evm_arithmetization/src/cpu/kernel/opcodes.rs index 538fe0a10..47cc97a3c 100644 --- a/evm_arithmetization/src/cpu/kernel/opcodes.rs +++ b/evm_arithmetization/src/cpu/kernel/opcodes.rs @@ -39,6 +39,7 @@ pub fn get_opcode(mnemonic: &str) -> u8 { "SAR" => 0x1d, "KECCAK256" => 0x20, "KECCAK_GENERAL" => 0x21, + "POSEIDON" => 0x22, "ADDRESS" => 0x30, "BALANCE" => 0x31, "ORIGIN" => 0x32, diff --git a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs index a8102f36c..a06138b94 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs @@ -1,14 +1,20 @@ use std::collections::HashMap; use anyhow::Result; -use ethereum_types::{Address, BigEndianHash, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; use rand::{thread_rng, Rng}; +use smt_trie::code::{hash_bytecode_u256, hash_contract_bytecode}; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::{hashout2u, key2u}; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::context_metadata::ContextMetadata::{self, GasLimit}; @@ -22,7 +28,7 @@ use crate::witness::memory::MemoryAddress; use crate::witness::operation::CONTEXT_SCALING_FACTOR; use crate::Node; -pub(crate) fn initialize_mpts( +pub(crate) fn initialize_mpts( interpreter: &mut Interpreter, trie_inputs: &TrieInputs, ) { @@ -62,8 +68,8 @@ fn test_account(code: &[u8]) -> AccountRlp { AccountRlp { nonce: U256::from(1111), balance: U256::from(2222), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak(code), + code_hash: hashout2u(hash_contract_bytecode(code.to_vec())), + code_length: code.len().into(), } } @@ -75,14 +81,14 @@ fn random_code() -> Vec { // Stolen from `tests/mpt/insert.rs` // Prepare the interpreter by inserting the account in the state trie. -fn prepare_interpreter( +fn prepare_interpreter( interpreter: &mut Interpreter, address: Address, account: &AccountRlp, ) -> Result<()> { - let mpt_insert_state_trie = KERNEL.global_labels["mpt_insert_state_trie"]; - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; - let mut state_trie: HashedPartialTrie = Default::default(); + let smt_insert_state = KERNEL.global_labels["smt_insert_state"]; + let smt_hash_state = KERNEL.global_labels["smt_hash_state"]; + let mut state_smt = Smt::::default(); let trie_inputs = Default::default(); initialize_mpts(interpreter, &trie_inputs); @@ -91,48 +97,52 @@ fn prepare_interpreter( keccak(address.to_fixed_bytes()).as_bytes(), )); // Next, execute mpt_insert_state_trie. - interpreter.generation_state.registers.program_counter = mpt_insert_state_trie; let trie_data = interpreter.get_trie_data_mut(); if trie_data.is_empty() { // In the assembly we skip over 0, knowing trie_data[0] = 0 by default. // Since we don't explicitly set it to 0, we need to do so here. trie_data.push(Some(0.into())); + trie_data.push(Some(0.into())); } - let value_ptr = trie_data.len(); - trie_data.push(Some(account.nonce)); - trie_data.push(Some(account.balance)); - // In memory, storage_root gets interpreted as a pointer to a storage trie, - // so we have to ensure the pointer is valid. It's easiest to set it to 0, - // which works as an empty node, since trie_data[0] = 0 = MPT_TYPE_EMPTY. - trie_data.push(Some(H256::zero().into_uint())); - trie_data.push(Some(account.code_hash.into_uint())); let trie_data_len = trie_data.len().into(); interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); - interpreter - .push(0xDEADBEEFu32.into()) - .expect("The stack should not overflow"); - interpreter - .push(value_ptr.into()) - .expect("The stack should not overflow"); // value_ptr - interpreter - .push(k.try_into_u256().unwrap()) - .expect("The stack should not overflow"); // key + for (key, value) in [ + (key_balance(address), account.balance), + (key_nonce(address), account.nonce), + (key_code(address), account.code_hash), + (key_code_length(address), account.code_length), + ] { + if value.is_zero() { + continue; + } + interpreter.generation_state.registers.program_counter = smt_insert_state; + interpreter + .push(0xDEADBEEFu32.into()) + .expect("The stack should not overflow"); + interpreter + .push(value) + .expect("The stack should not overflow"); // value_ptr + let keyu = key2u(key); + interpreter + .push(keyu) + .expect("The stack should not overflow"); // key - interpreter.run()?; - assert_eq!( - interpreter.stack().len(), - 0, - "Expected empty stack after insert, found {:?}", - interpreter.stack() - ); + interpreter.run()?; + assert_eq!( + interpreter.stack().len(), + 0, + "Expected empty stack after insert, found {:?}", + interpreter.stack() + ); + } // Now, execute mpt_hash_state_trie. - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; + interpreter.generation_state.registers.program_counter = smt_hash_state; interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(1.into()) // Initial length of the trie data segment, unused. + .push(2.into()) // Initial length of the trie data segment, unused. .expect("The stack should not overflow"); interpreter.run()?; @@ -142,10 +152,10 @@ fn prepare_interpreter( "Expected 2 items on stack after hashing, found {:?}", interpreter.stack() ); - let hash = H256::from_uint(&interpreter.stack()[1]); + let hash = interpreter.stack()[1]; - state_trie.insert(k, rlp::encode(account).to_vec()); - let expected_state_trie_hash = state_trie.hash(); + set_account(&mut state_smt, address, account, &HashMap::new()); + let expected_state_trie_hash = hashout2u(state_smt.root); assert_eq!(hash, expected_state_trie_hash); Ok(()) @@ -174,8 +184,10 @@ fn test_extcodesize() -> Result<()> { interpreter .push(U256::from_big_endian(address.as_bytes())) .expect("The stack should not overflow"); - interpreter.generation_state.inputs.contract_code = - HashMap::from([(keccak(&code), code.clone())]); + interpreter.generation_state.inputs.contract_code = HashMap::from([( + hashout2u(hash_contract_bytecode(code.clone())), + code.clone(), + )]); interpreter.run()?; assert_eq!(interpreter.stack(), vec![code.len().into()]); @@ -243,8 +255,10 @@ fn test_extcodecopy() -> Result<()> { interpreter .push((0xDEADBEEFu64 + (1 << 32)).into()) .expect("The stack should not overflow"); // kexit_info - interpreter.generation_state.inputs.contract_code = - HashMap::from([(keccak(&code), code.clone())]); + interpreter.generation_state.inputs.contract_code = HashMap::from([( + hashout2u(hash_contract_bytecode(code.clone())), + code.clone(), + )]); interpreter.run()?; assert!(interpreter.stack().is_empty()); @@ -265,7 +279,7 @@ fn test_extcodecopy() -> Result<()> { /// Prepare the interpreter for storage tests by inserting all necessary /// accounts in the state trie, adding the code we want to context 1 and /// switching the context. -fn prepare_interpreter_all_accounts( +fn prepare_interpreter_all_accounts( interpreter: &mut Interpreter, trie_inputs: TrieInputs, addr: [u8; 20], @@ -306,12 +320,8 @@ fn sstore() -> Result<()> { // We take the same `to` account as in add11_yml. let addr = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - let addr_hashed = keccak(addr); - - let addr_nibbles = Nibbles::from_bytes_be(addr_hashed.as_bytes()).unwrap(); - let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x60, 0x00, 0x55, 0x00]; - let code_hash = keccak(code); + let code_hash = hash_bytecode_u256(code.to_vec()); let account_before = AccountRlp { balance: 0x0de0b6b3a7640000u64.into(), @@ -319,15 +329,18 @@ fn sstore() -> Result<()> { ..AccountRlp::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - - state_trie_before.insert(addr_nibbles, rlp::encode(&account_before).to_vec()); + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(addr), + &account_before, + &HashMap::new(), + ); let trie_inputs = TrieInputs { - state_trie: state_trie_before.clone(), + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: Node::Empty.into(), - storage_tries: vec![(addr_hashed, Node::Empty.into())], }; let initial_stack = vec![]; @@ -349,21 +362,9 @@ fn sstore() -> Result<()> { interpreter.pop().expect("Stack should not be empty"); interpreter.pop().expect("Stack should not be empty"); - // The code should have added an element to the storage of `to_account`. We run - // `mpt_hash_state_trie` to check that. - let account_after = AccountRlp { - balance: 0x0de0b6b3a7640000u64.into(), - code_hash, - storage_root: HashedPartialTrie::from(Node::Leaf { - nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), - value: vec![2], - }) - .hash(), - ..AccountRlp::default() - }; - // Now, execute mpt_hash_state_trie. - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; + // Now, execute smt_hash_state. + let smt_hash_state = KERNEL.global_labels["smt_hash_state"]; + interpreter.generation_state.registers.program_counter = smt_hash_state; interpreter.set_is_kernel(true); interpreter.set_context(0); interpreter @@ -381,12 +382,17 @@ fn sstore() -> Result<()> { interpreter.stack() ); - let hash = H256::from_uint(&interpreter.stack()[1]); + let hash = interpreter.stack()[1]; - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert(addr_nibbles, rlp::encode(&account_after).to_vec()); + let mut expected_state_smt_after = Smt::::default(); + set_account( + &mut expected_state_smt_after, + H160(addr), + &account_before, + &[(0.into(), 2.into())].into(), + ); - let expected_state_trie_hash = expected_state_trie_after.hash(); + let expected_state_trie_hash = hashout2u(expected_state_smt_after.root); assert_eq!(hash, expected_state_trie_hash); Ok(()) } @@ -397,17 +403,13 @@ fn sload() -> Result<()> { // We take the same `to` account as in add11_yml. let addr = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - let addr_hashed = keccak(addr); - - let addr_nibbles = Nibbles::from_bytes_be(addr_hashed.as_bytes()).unwrap(); - // This code is similar to the one in add11_yml's contract, but we pop the added // value and carry out an SLOAD instead of an SSTORE. We also add a PUSH at // the end. let code = [ 0x60, 0x01, 0x60, 0x01, 0x01, 0x50, 0x60, 0x00, 0x54, 0x60, 0x03, 0x00, ]; - let code_hash = keccak(code); + let code_hash = hash_bytecode_u256(code.to_vec()); let account_before = AccountRlp { balance: 0x0de0b6b3a7640000u64.into(), @@ -415,15 +417,18 @@ fn sload() -> Result<()> { ..AccountRlp::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - - state_trie_before.insert(addr_nibbles, rlp::encode(&account_before).to_vec()); + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(addr), + &account_before, + &HashMap::new(), + ); let trie_inputs = TrieInputs { - state_trie: state_trie_before.clone(), + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: Node::Empty.into(), - storage_tries: vec![(addr_hashed, Node::Empty.into())], }; let initial_stack = vec![]; @@ -458,17 +463,17 @@ fn sload() -> Result<()> { interpreter .pop() .expect("The stack length should not be empty."); - // Now, execute mpt_hash_state_trie. We check that the state trie has not - // changed. - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; + + // Now, execute smt_hash_state. + let smt_hash_state = KERNEL.global_labels["smt_hash_state"]; + interpreter.generation_state.registers.program_counter = smt_hash_state; interpreter.set_is_kernel(true); interpreter.set_context(0); interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow."); interpreter - .push(1.into()) // Initial length of the trie data segment, unused. + .push(2.into()) // Initial length of the trie data segment, unused. .expect("The stack should not overflow."); interpreter.run()?; @@ -480,6 +485,7 @@ fn sload() -> Result<()> { ); let trie_data_segment_len = interpreter.stack()[0]; + dbg!(interpreter.get_memory_segment(Segment::TrieData)); assert_eq!( trie_data_segment_len, interpreter @@ -488,9 +494,24 @@ fn sload() -> Result<()> { .into() ); - let hash = H256::from_uint(&interpreter.stack()[1]); + let hash = interpreter.stack()[1]; - let expected_state_trie_hash = state_trie_before.hash(); + let expected_state_trie_hash = hashout2u(state_smt_before.root); assert_eq!(hash, expected_state_trie_hash); Ok(()) } + +pub(crate) fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/src/cpu/kernel/tests/add11.rs b/evm_arithmetization/src/cpu/kernel/tests/add11.rs index c2be6a0bd..8ae88a645 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/add11.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/add11.rs @@ -1,307 +1,326 @@ -use std::collections::HashMap; -use std::str::FromStr; - -use ethereum_types::{Address, BigEndianHash, H256}; -use hex_literal::hex; -use keccak_hash::keccak; -use mpt_trie::nibbles::Nibbles; -use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie}; -use plonky2::field::goldilocks_field::GoldilocksField as F; - -use crate::cpu::kernel::aggregator::KERNEL; -use crate::cpu::kernel::interpreter::Interpreter; -use crate::generation::mpt::{AccountRlp, LegacyReceiptRlp}; -use crate::generation::TrieInputs; -use crate::proof::{BlockHashes, BlockMetadata, TrieRoots}; -use crate::GenerationInputs; - -#[test] -fn test_add11_yml() { - let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); - let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); - let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender); - let to_hashed = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - - let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x60, 0x00, 0x55, 0x00]; - let code_hash = keccak(code); - - let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); - contract_code.insert(code_hash, code.to_vec()); - - let beneficiary_account_before = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - let sender_account_before = AccountRlp { - balance: 0x0de0b6b3a7640000u64.into(), - ..AccountRlp::default() - }; - let to_account_before = AccountRlp { - balance: 0x0de0b6b3a7640000u64.into(), - code_hash, - ..AccountRlp::default() - }; - - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_before).to_vec(), - ); - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec()); - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec()); - - let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries: vec![(to_hashed, Node::Empty.into())], - }; - - let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); - - let gas_used = 0xa868u64.into(); - - let expected_state_trie_after = { - let beneficiary_account_after = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - let sender_account_after = AccountRlp { - balance: 0xde0b6b3a75be550u64.into(), - nonce: 1.into(), - ..AccountRlp::default() - }; - let to_account_after = AccountRlp { - balance: 0xde0b6b3a76586a0u64.into(), - code_hash, - // Storage map: { 0 => 2 } - storage_root: HashedPartialTrie::from(Node::Leaf { - nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), - value: vec![2], - }) - .hash(), - ..AccountRlp::default() - }; - - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - ); - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec()); - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec()); - expected_state_trie_after - }; - let receipt_0 = LegacyReceiptRlp { - status: true, - cum_gas_used: gas_used, - bloom: vec![0; 256].into(), - logs: vec![], - }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); - receipts_trie.insert( - Nibbles::from_str("0x80").unwrap(), - rlp::encode(&receipt_0).to_vec(), - ); - let transactions_trie: HashedPartialTrie = Node::Leaf { - nibbles: Nibbles::from_str("0x80").unwrap(), - value: txn.to_vec(), - } - .into(); - - let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), - transactions_root: transactions_trie.hash(), - receipts_root: receipts_trie.hash(), - }; - - let block_metadata = BlockMetadata { - block_beneficiary: Address::from(beneficiary), - block_timestamp: 0x03e8.into(), - block_number: 1.into(), - block_difficulty: 0x020000.into(), - block_random: H256::from_uint(&0x020000.into()), - block_gaslimit: 0xff112233u32.into(), - block_chain_id: 1.into(), - block_base_fee: 0xa.into(), - block_gas_used: gas_used, - block_bloom: [0.into(); 8], - }; - - let inputs = GenerationInputs { - signed_txn: Some(txn.to_vec()), - withdrawals: vec![], - tries: tries_before, - trie_roots_after, - contract_code: contract_code.clone(), - block_metadata, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), - txn_number_before: 0.into(), - gas_used_before: 0.into(), - gas_used_after: gas_used, - block_hashes: BlockHashes { - prev_hashes: vec![H256::default(); 256], - cur_hash: H256::default(), - }, - }; - - let initial_stack = vec![]; - let initial_offset = KERNEL.global_labels["main"]; - let mut interpreter: Interpreter = - Interpreter::new_with_generation_inputs(initial_offset, initial_stack, inputs); - - interpreter.set_is_kernel(true); - interpreter.run().expect("Proving add11 failed."); -} - -#[test] -fn test_add11_yml_with_exception() { - // In this test, we make sure that the user code throws a stack underflow - // exception. - let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); - let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); - let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender); - let to_hashed = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - - let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x8e, 0x00]; - let code_hash = keccak(code); - - let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); - contract_code.insert(code_hash, code.to_vec()); - - let beneficiary_account_before = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - let sender_account_before = AccountRlp { - balance: 0x0de0b6b3a7640000u64.into(), - ..AccountRlp::default() - }; - let to_account_before = AccountRlp { - balance: 0x0de0b6b3a7640000u64.into(), - code_hash, - ..AccountRlp::default() - }; - - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_before).to_vec(), - ); - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec()); - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec()); - - let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries: vec![(to_hashed, Node::Empty.into())], - }; - - let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); - let txn_gas_limit = 400_000; - let gas_price = 10; - - // Here, since the transaction fails, it consumes its gas limit, and does - // nothing else. - let expected_state_trie_after = { - let beneficiary_account_after = beneficiary_account_before; - // This is the only account that changes: the nonce and the balance are updated. - let sender_account_after = AccountRlp { - balance: sender_account_before.balance - txn_gas_limit * gas_price, - nonce: 1.into(), - ..AccountRlp::default() - }; - let to_account_after = to_account_before; - - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - ); - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec()); - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec()); - expected_state_trie_after - }; - - let receipt_0 = LegacyReceiptRlp { - status: false, - cum_gas_used: txn_gas_limit.into(), - bloom: vec![0; 256].into(), - logs: vec![], - }; - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); - receipts_trie.insert( - Nibbles::from_str("0x80").unwrap(), - rlp::encode(&receipt_0).to_vec(), - ); - let transactions_trie: HashedPartialTrie = Node::Leaf { - nibbles: Nibbles::from_str("0x80").unwrap(), - value: txn.to_vec(), - } - .into(); - - let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), - transactions_root: transactions_trie.hash(), - receipts_root: receipts_trie.hash(), - }; - - let block_metadata = BlockMetadata { - block_beneficiary: Address::from(beneficiary), - block_timestamp: 0x03e8.into(), - block_number: 1.into(), - block_difficulty: 0x020000.into(), - block_random: H256::from_uint(&0x020000.into()), - block_gaslimit: 0xff112233u32.into(), - block_chain_id: 1.into(), - block_base_fee: 0xa.into(), - block_gas_used: txn_gas_limit.into(), - block_bloom: [0.into(); 8], - }; - - let inputs = GenerationInputs { - signed_txn: Some(txn.to_vec()), - withdrawals: vec![], - tries: tries_before, - trie_roots_after, - contract_code: contract_code.clone(), - block_metadata, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), - txn_number_before: 0.into(), - gas_used_before: 0.into(), - gas_used_after: txn_gas_limit.into(), - block_hashes: BlockHashes { - prev_hashes: vec![H256::default(); 256], - cur_hash: H256::default(), - }, - }; - - let initial_stack = vec![]; - let initial_offset = KERNEL.global_labels["main"]; - let mut interpreter: Interpreter = - Interpreter::new_with_generation_inputs(initial_offset, initial_stack, inputs); - - interpreter.set_is_kernel(true); - interpreter - .run() - .expect("Proving add11 with exception failed."); -} +// use std::collections::HashMap; +// use std::str::FromStr; + +// use mpt_trie::nibbles::Nibbles; +// use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie}; +// use ethereum_types::{Address, BigEndianHash, H256}; +// use hex_literal::hex; +// use keccak_hash::keccak; +// use plonky2::field::goldilocks_field::GoldilocksField as F; + +// use crate::cpu::kernel::aggregator::KERNEL; +// use crate::cpu::kernel::constants::context_metadata::ContextMetadata; +// use crate::cpu::kernel::interpreter::Interpreter; +// use crate::generation::mpt::{AccountRlp, LegacyReceiptRlp}; +// use crate::generation::TrieInputs; +// use crate::proof::{BlockHashes, BlockMetadata, TrieRoots}; +// use crate::GenerationInputs; + +// #[test] +// fn test_add11_yml() { +// let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); +// let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); +// let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); + +// let beneficiary_state_key = keccak(beneficiary); +// let sender_state_key = keccak(sender); +// let to_hashed = keccak(to); + +// let beneficiary_nibbles = +// Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); +// let sender_nibbles = +// Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); +// let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); + +// let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x60, 0x00, 0x55, 0x00]; +// let code_hash = keccak(code); + +// let mut contract_code = HashMap::new(); +// contract_code.insert(keccak(vec![]), vec![]); +// contract_code.insert(code_hash, code.to_vec()); + +// let beneficiary_account_before = AccountRlp { +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let sender_account_before = AccountRlp { +// balance: 0x0de0b6b3a7640000u64.into(), +// ..AccountRlp::default() +// }; +// let to_account_before = AccountRlp { +// balance: 0x0de0b6b3a7640000u64.into(), +// code_hash, +// ..AccountRlp::default() +// }; + +// let mut state_trie_before = HashedPartialTrie::from(Node::Empty); +// state_trie_before.insert( +// beneficiary_nibbles, +// rlp::encode(&beneficiary_account_before).to_vec(), +// ); +// state_trie_before.insert(sender_nibbles, +// rlp::encode(&sender_account_before).to_vec()); state_trie_before. +// insert(to_nibbles, rlp::encode(&to_account_before).to_vec()); + +// let tries_before = TrieInputs { +// state_trie: state_trie_before, +// transactions_trie: Node::Empty.into(), +// receipts_trie: Node::Empty.into(), +// storage_tries: vec![(to_hashed, Node::Empty.into())], +// }; + +// let txn = +// hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16" +// ); + +// let gas_used = 0xa868u64.into(); + +// let expected_state_trie_after = { +// let beneficiary_account_after = AccountRlp { +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let sender_account_after = AccountRlp { +// balance: 0xde0b6b3a75be550u64.into(), +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let to_account_after = AccountRlp { +// balance: 0xde0b6b3a76586a0u64.into(), +// code_hash, +// // Storage map: { 0 => 2 } +// storage_root: HashedPartialTrie::from(Node::Leaf { +// nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), +// value: vec![2], +// }) +// .hash(), +// ..AccountRlp::default() +// }; + +// let mut expected_state_trie_after = +// HashedPartialTrie::from(Node::Empty); expected_state_trie_after. +// insert( beneficiary_nibbles, +// rlp::encode(&beneficiary_account_after).to_vec(), +// ); +// expected_state_trie_after +// .insert(sender_nibbles, +// rlp::encode(&sender_account_after).to_vec()); +// expected_state_trie_after.insert(to_nibbles, +// rlp::encode(&to_account_after).to_vec()); expected_state_trie_after +// }; +// let receipt_0 = LegacyReceiptRlp { +// status: true, +// cum_gas_used: gas_used, +// bloom: vec![0; 256].into(), +// logs: vec![], +// }; +// let mut receipts_trie = HashedPartialTrie::from(Node::Empty); +// receipts_trie.insert( +// Nibbles::from_str("0x80").unwrap(), +// rlp::encode(&receipt_0).to_vec(), +// ); +// let transactions_trie: HashedPartialTrie = Node::Leaf { +// nibbles: Nibbles::from_str("0x80").unwrap(), +// value: txn.to_vec(), +// } +// .into(); + +// let trie_roots_after = TrieRoots { +// state_root: expected_state_trie_after.hash(), +// transactions_root: transactions_trie.hash(), +// receipts_root: receipts_trie.hash(), +// }; + +// let block_metadata = BlockMetadata { +// block_beneficiary: Address::from(beneficiary), +// block_timestamp: 0x03e8.into(), +// block_number: 1.into(), +// block_difficulty: 0x020000.into(), +// block_random: H256::from_uint(&0x020000.into()), +// block_gaslimit: 0xff112233u32.into(), +// block_chain_id: 1.into(), +// block_base_fee: 0xa.into(), +// block_gas_used: gas_used, +// block_bloom: [0.into(); 8], +// }; + +// let tries_inputs = GenerationInputs { +// signed_txn: Some(txn.to_vec()), +// withdrawals: vec![], +// tries: tries_before, +// trie_roots_after, +// contract_code: contract_code.clone(), +// block_metadata, +// checkpoint_state_trie_root: +// HashedPartialTrie::from(Node::Empty).hash(), txn_number_before: +// 0.into(), gas_used_before: 0.into(), +// gas_used_after: gas_used, +// block_hashes: BlockHashes { +// prev_hashes: vec![H256::default(); 256], +// cur_hash: H256::default(), +// }, +// }; + +// let initial_stack = vec![]; +// let mut interpreter: Interpreter = +// Interpreter::new_with_generation_inputs_and_kernel(0, initial_stack, +// tries_inputs); + +// let route_txn_label = KERNEL.global_labels["main"]; +// // Switch context and initialize memory with the data we need for the +// tests. interpreter.generation_state.registers.program_counter = +// route_txn_label; interpreter.set_context_metadata_field(0, +// ContextMetadata::GasLimit, 1_000_000.into()); interpreter. +// set_is_kernel(true); interpreter.run().expect("Proving add11 failed."); +// } + +// #[test] +// fn test_add11_yml_with_exception() { +// // In this test, we make sure that the user code throws a stack underflow +// exception. let beneficiary = +// hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); let sender = +// hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let to = +// hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); + +// let beneficiary_state_key = keccak(beneficiary); +// let sender_state_key = keccak(sender); +// let to_hashed = keccak(to); + +// let beneficiary_nibbles = +// Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); +// let sender_nibbles = +// Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); +// let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); + +// let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x8e, 0x00]; +// let code_hash = keccak(code); + +// let mut contract_code = HashMap::new(); +// contract_code.insert(keccak(vec![]), vec![]); +// contract_code.insert(code_hash, code.to_vec()); + +// let beneficiary_account_before = AccountRlp { +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let sender_account_before = AccountRlp { +// balance: 0x0de0b6b3a7640000u64.into(), +// ..AccountRlp::default() +// }; +// let to_account_before = AccountRlp { +// balance: 0x0de0b6b3a7640000u64.into(), +// code_hash, +// ..AccountRlp::default() +// }; + +// let mut state_trie_before = HashedPartialTrie::from(Node::Empty); +// state_trie_before.insert( +// beneficiary_nibbles, +// rlp::encode(&beneficiary_account_before).to_vec(), +// ); +// state_trie_before.insert(sender_nibbles, +// rlp::encode(&sender_account_before).to_vec()); state_trie_before. +// insert(to_nibbles, rlp::encode(&to_account_before).to_vec()); + +// let tries_before = TrieInputs { +// state_trie: state_trie_before, +// transactions_trie: Node::Empty.into(), +// receipts_trie: Node::Empty.into(), +// storage_tries: vec![(to_hashed, Node::Empty.into())], +// }; + +// let txn = +// hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16" +// ); let txn_gas_limit = 400_000; +// let gas_price = 10; + +// // Here, since the transaction fails, it consumes its gas limit, and does +// nothing else. let expected_state_trie_after = { +// let beneficiary_account_after = beneficiary_account_before; +// // This is the only account that changes: the nonce and the balance +// are updated. let sender_account_after = AccountRlp { +// balance: sender_account_before.balance - txn_gas_limit * +// gas_price, nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let to_account_after = to_account_before; + +// let mut expected_state_trie_after = +// HashedPartialTrie::from(Node::Empty); expected_state_trie_after. +// insert( beneficiary_nibbles, +// rlp::encode(&beneficiary_account_after).to_vec(), +// ); +// expected_state_trie_after +// .insert(sender_nibbles, +// rlp::encode(&sender_account_after).to_vec()); +// expected_state_trie_after.insert(to_nibbles, +// rlp::encode(&to_account_after).to_vec()); expected_state_trie_after +// }; + +// let receipt_0 = LegacyReceiptRlp { +// status: false, +// cum_gas_used: txn_gas_limit.into(), +// bloom: vec![0; 256].into(), +// logs: vec![], +// }; +// let mut receipts_trie = HashedPartialTrie::from(Node::Empty); +// receipts_trie.insert( +// Nibbles::from_str("0x80").unwrap(), +// rlp::encode(&receipt_0).to_vec(), +// ); +// let transactions_trie: HashedPartialTrie = Node::Leaf { +// nibbles: Nibbles::from_str("0x80").unwrap(), +// value: txn.to_vec(), +// } +// .into(); + +// let trie_roots_after = TrieRoots { +// state_root: expected_state_trie_after.hash(), +// transactions_root: transactions_trie.hash(), +// receipts_root: receipts_trie.hash(), +// }; + +// let block_metadata = BlockMetadata { +// block_beneficiary: Address::from(beneficiary), +// block_timestamp: 0x03e8.into(), +// block_number: 1.into(), +// block_difficulty: 0x020000.into(), +// block_random: H256::from_uint(&0x020000.into()), +// block_gaslimit: 0xff112233u32.into(), +// block_chain_id: 1.into(), +// block_base_fee: 0xa.into(), +// block_gas_used: txn_gas_limit.into(), +// block_bloom: [0.into(); 8], +// }; + +// let tries_inputs = GenerationInputs { +// signed_txn: Some(txn.to_vec()), +// withdrawals: vec![], +// tries: tries_before, +// trie_roots_after, +// contract_code: contract_code.clone(), +// block_metadata, +// checkpoint_state_trie_root: +// HashedPartialTrie::from(Node::Empty).hash(), txn_number_before: +// 0.into(), gas_used_before: 0.into(), +// gas_used_after: txn_gas_limit.into(), +// block_hashes: BlockHashes { +// prev_hashes: vec![H256::default(); 256], +// cur_hash: H256::default(), +// }, +// }; + +// let initial_stack = vec![]; +// let mut interpreter: Interpreter = +// Interpreter::new_with_generation_inputs_and_kernel(0, initial_stack, +// tries_inputs); + +// let route_txn_label = KERNEL.global_labels["main"]; +// // Switch context and initialize memory with the data we need for the +// tests. interpreter.generation_state.registers.program_counter = +// route_txn_label; interpreter.set_context_metadata_field(0, +// ContextMetadata::GasLimit, 1_000_000.into()); interpreter. +// set_is_kernel(true); interpreter +// .run() +// .expect("Proving add11 with exception failed."); +// } diff --git a/evm_arithmetization/src/cpu/kernel/tests/balance.rs b/evm_arithmetization/src/cpu/kernel/tests/balance.rs index 984772027..6fffabe10 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/balance.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/balance.rs @@ -1,133 +1,134 @@ -use anyhow::Result; -use ethereum_types::{Address, BigEndianHash, H256, U256}; -use keccak_hash::keccak; -use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; -use plonky2::field::goldilocks_field::GoldilocksField as F; -use plonky2::field::types::Field; -use rand::{thread_rng, Rng}; +// use anyhow::Result; +// use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; +// use ethereum_types::{Address, BigEndianHash, H256, U256}; +// use keccak_hash::keccak; +// use plonky2::field::goldilocks_field::GoldilocksField as F; +// use plonky2::field::types::Field; +// use rand::{thread_rng, Rng}; -use crate::cpu::kernel::aggregator::KERNEL; -use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -use crate::cpu::kernel::interpreter::Interpreter; -use crate::cpu::kernel::tests::account_code::initialize_mpts; -use crate::cpu::kernel::tests::mpt::nibbles_64; -use crate::generation::mpt::AccountRlp; -use crate::Node; +// use crate::cpu::kernel::aggregator::KERNEL; +// use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; +// use crate::cpu::kernel::interpreter::Interpreter; +// use crate::cpu::kernel::tests::account_code::initialize_mpts; +// use crate::cpu::kernel::tests::mpt::nibbles_64; +// use crate::generation::mpt::AccountRlp; +// use crate::Node; -// Test account with a given code hash. -fn test_account(balance: U256) -> AccountRlp { - AccountRlp { - nonce: U256::from(1111), - balance, - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: H256::from_uint(&U256::from(8888)), - } -} +// // Test account with a given code hash. +// fn test_account(balance: U256) -> AccountRlp { +// AccountRlp { +// nonce: U256::from(1111), +// balance, +// storage_root: HashedPartialTrie::from(Node::Empty).hash(), +// code_hash: H256::from_uint(&U256::from(8888)), +// } +// } -// Stolen from `tests/mpt/insert.rs` -// Prepare the interpreter by inserting the account in the state trie. -fn prepare_interpreter( - interpreter: &mut Interpreter, - address: Address, - account: &AccountRlp, -) -> Result<()> { - let mpt_insert_state_trie = KERNEL.global_labels["mpt_insert_state_trie"]; - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; - let mut state_trie: HashedPartialTrie = Default::default(); - let trie_inputs = Default::default(); +// // Stolen from `tests/mpt/insert.rs` +// // Prepare the interpreter by inserting the account in the state trie. +// fn prepare_interpreter( +// interpreter: &mut Interpreter, +// address: Address, +// account: &AccountRlp, +// ) -> Result<()> { +// let mpt_insert_state_trie = +// KERNEL.global_labels["mpt_insert_state_trie"]; let mpt_hash_state_trie = +// KERNEL.global_labels["mpt_hash_state_trie"]; let mut state_trie: +// HashedPartialTrie = Default::default(); let trie_inputs = +// Default::default(); - initialize_mpts(interpreter, &trie_inputs); - assert_eq!(interpreter.stack(), vec![]); +// initialize_mpts(interpreter, &trie_inputs); +// assert_eq!(interpreter.stack(), vec![]); - let k = nibbles_64(U256::from_big_endian( - keccak(address.to_fixed_bytes()).as_bytes(), - )); - // Next, execute mpt_insert_state_trie. - interpreter.generation_state.registers.program_counter = mpt_insert_state_trie; - let trie_data = interpreter.get_trie_data_mut(); - if trie_data.is_empty() { - // In the assembly we skip over 0, knowing trie_data[0] = 0 by default. - // Since we don't explicitly set it to 0, we need to do so here. - trie_data.push(Some(0.into())); - } - let value_ptr = trie_data.len(); - trie_data.push(Some(account.nonce)); - trie_data.push(Some(account.balance)); - // In memory, storage_root gets interpreted as a pointer to a storage trie, - // so we have to ensure the pointer is valid. It's easiest to set it to 0, - // which works as an empty node, since trie_data[0] = 0 = MPT_TYPE_EMPTY. - trie_data.push(Some(H256::zero().into_uint())); - trie_data.push(Some(account.code_hash.into_uint())); - let trie_data_len = trie_data.len().into(); - interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); - interpreter - .push(0xDEADBEEFu32.into()) - .expect("The stack should not overflow"); - interpreter - .push(value_ptr.into()) - .expect("The stack should not overflow"); // value_ptr - interpreter - .push(k.try_into_u256().unwrap()) - .expect("The stack should not overflow"); // key +// let k = nibbles_64(U256::from_big_endian( +// keccak(address.to_fixed_bytes()).as_bytes(), +// )); +// // Next, execute mpt_insert_state_trie. +// interpreter.generation_state.registers.program_counter = +// mpt_insert_state_trie; let trie_data = interpreter.get_trie_data_mut(); +// if trie_data.is_empty() { +// // In the assembly we skip over 0, knowing trie_data[0] = 0 by +// default. // Since we don't explicitly set it to 0, we need to do so +// here. trie_data.push(0.into()); +// } +// let value_ptr = trie_data.len(); +// trie_data.push(account.nonce); +// trie_data.push(account.balance); +// // In memory, storage_root gets interpreted as a pointer to a storage +// trie, // so we have to ensure the pointer is valid. It's easiest to set +// it to 0, // which works as an empty node, since trie_data[0] = 0 = +// MPT_TYPE_EMPTY. trie_data.push(H256::zero().into_uint()); +// trie_data.push(account.code_hash.into_uint()); +// let trie_data_len = trie_data.len().into(); +// interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, +// trie_data_len); interpreter +// .push(0xDEADBEEFu32.into()) +// .expect("The stack should not overflow"); +// interpreter +// .push(value_ptr.into()) +// .expect("The stack should not overflow"); // value_ptr +// interpreter +// .push(k.try_into_u256().unwrap()) +// .expect("The stack should not overflow"); // key - interpreter.run()?; - assert_eq!( - interpreter.stack().len(), - 0, - "Expected empty stack after insert, found {:?}", - interpreter.stack() - ); +// interpreter.run()?; +// assert_eq!( +// interpreter.stack().len(), +// 0, +// "Expected empty stack after insert, found {:?}", +// interpreter.stack() +// ); - // Now, execute mpt_hash_state_trie. - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; - interpreter - .push(0xDEADBEEFu32.into()) - .expect("The stack should not overflow"); - interpreter - .push(1.into()) // Initial trie data segment size, unused. - .expect("The stack should not overflow"); - interpreter.run()?; +// // Now, execute mpt_hash_state_trie. +// interpreter.generation_state.registers.program_counter = +// mpt_hash_state_trie; interpreter +// .push(0xDEADBEEFu32.into()) +// .expect("The stack should not overflow"); +// interpreter +// .push(1.into()) // Initial trie data segment size, unused. +// .expect("The stack should not overflow"); +// interpreter.run()?; - assert_eq!( - interpreter.stack().len(), - 2, - "Expected 2 items on stack after hashing, found {:?}", - interpreter.stack() - ); - let hash = H256::from_uint(&interpreter.stack()[1]); +// assert_eq!( +// interpreter.stack().len(), +// 2, +// "Expected 2 items on stack after hashing, found {:?}", +// interpreter.stack() +// ); +// let hash = H256::from_uint(&interpreter.stack()[1]); - state_trie.insert(k, rlp::encode(account).to_vec()); - let expected_state_trie_hash = state_trie.hash(); - assert_eq!(hash, expected_state_trie_hash); +// state_trie.insert(k, rlp::encode(account).to_vec()); +// let expected_state_trie_hash = state_trie.hash(); +// assert_eq!(hash, expected_state_trie_hash); - Ok(()) -} +// Ok(()) +// } -#[test] -fn test_balance() -> Result<()> { - let mut rng = thread_rng(); - let balance = U256(rng.gen()); - let account = test_account(balance); +// #[test] +// fn test_balance() -> Result<()> { +// let mut rng = thread_rng(); +// let balance = U256(rng.gen()); +// let account = test_account(balance); - let mut interpreter: Interpreter = Interpreter::new(0, vec![]); - let address: Address = rng.gen(); - // Prepare the interpreter by inserting the account in the state trie. - prepare_interpreter(&mut interpreter, address, &account)?; +// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, +// vec![]); let address: Address = rng.gen(); +// // Prepare the interpreter by inserting the account in the state trie. +// prepare_interpreter(&mut interpreter, address, &account)?; - // Test `balance` - interpreter.generation_state.registers.program_counter = KERNEL.global_labels["balance"]; - interpreter.pop().expect("The stack should not be empty"); - interpreter.pop().expect("The stack should not be empty"); - assert!(interpreter.stack().is_empty()); - interpreter - .push(0xDEADBEEFu32.into()) - .expect("The stack should not overflow"); - interpreter - .push(U256::from_big_endian(address.as_bytes())) - .expect("The stack should not overflow"); - interpreter.run()?; +// // Test `balance` +// interpreter.generation_state.registers.program_counter = +// KERNEL.global_labels["balance"]; interpreter.pop().expect("The stack +// should not be empty"); interpreter.pop().expect("The stack should not be +// empty"); assert!(interpreter.stack().is_empty()); +// interpreter +// .push(0xDEADBEEFu32.into()) +// .expect("The stack should not overflow"); +// interpreter +// .push(U256::from_big_endian(address.as_bytes())) +// .expect("The stack should not overflow"); +// interpreter.run()?; - assert_eq!(interpreter.stack(), vec![balance]); +// assert_eq!(interpreter.stack(), vec![balance]); - Ok(()) -} +// Ok(()) +// } diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs index 3f9153cda..a9e78d8b8 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs @@ -1,9 +1,13 @@ use anyhow::Result; -use ethereum_types::{BigEndianHash, H256, U512}; +use ethereum_types::{BigEndianHash, H160, H256, U256, U512}; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; -use rand::random; +use rand::{random, thread_rng, Rng}; +use smt_trie::db::MemoryDb; +use smt_trie::keys::key_balance; +use smt_trie::smt::{Key, Smt}; +use smt_trie::utils::{hashout2u, key2u}; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; @@ -15,89 +19,51 @@ use crate::generation::TrieInputs; use crate::Node; #[test] -fn mpt_delete_empty() -> Result<()> { - test_state_trie(Default::default(), nibbles_64(0xABC), test_account_2()) +fn smt_delete_empty() -> Result<()> { + test_state_trie( + Smt::::default(), + key_balance(H160(random())), + U256(random()), + ) } #[test] -fn mpt_delete_leaf_nonoverlapping_keys() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: nibbles_64(0xABC), - value: test_account_1_rlp(), +fn smt_delete_random() -> Result<()> { + const N: usize = 100; + let mut rng = thread_rng(); + for _iter in 0..N { + let mut state_smt = Smt::::default(); + let num_keys: usize = rng.gen_range(0..100); + for _ in 0..num_keys { + let key = key_balance(H160(rng.gen())); + let value = U256(rng.gen()); + state_smt.set(key, value); + } + let trie_inputs = TrieInputs { + state_smt: state_smt.serialize(), + transactions_trie: Default::default(), + receipts_trie: Default::default(), + }; + + let key = key_balance(H160(rng.gen())); + let value = U256(rng.gen()); + test_state_trie(state_smt, key, value)?; } - .into(); - test_state_trie(state_trie, nibbles_64(0x123), test_account_2()) -} - -#[test] -fn mpt_delete_leaf_overlapping_keys() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: nibbles_64(0xABC), - value: test_account_1_rlp(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0xADE), test_account_2()) -} - -#[test] -fn mpt_delete_branch_into_hash() -> Result<()> { - let hash = Node::Hash(H256::random()); - let state_trie = Node::Extension { - nibbles: nibbles_64(0xADF), - child: hash.into(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0xADE), test_account_2()) -} - -#[test] -fn test_after_mpt_delete_extension_branch() -> Result<()> { - let hash = Node::Hash(H256::random()); - let branch = Node::Branch { - children: std::array::from_fn(|i| { - if i == 0 { - Node::Empty.into() - } else { - hash.clone().into() - } - }), - value: vec![], - }; - let nibbles = Nibbles::from_bytes_be(&random::<[u8; 5]>()).unwrap(); - let state_trie = Node::Extension { - nibbles, - child: branch.into(), - } - .into(); - let key = nibbles.merge_nibbles(&Nibbles { - packed: U512::zero(), - count: 64 - nibbles.count, - }); - test_state_trie(state_trie, key, test_account_2()) + Ok(()) } /// Note: The account's storage_root is ignored, as we can't insert a new /// storage_root without the accompanying trie data. An empty trie's /// storage_root is used instead. -fn test_state_trie( - state_trie: HashedPartialTrie, - k: Nibbles, - mut account: AccountRlp, -) -> Result<()> { - assert_eq!(k.count, 64); - - // Ignore any storage_root; see documentation note. - account.storage_root = HashedPartialTrie::from(Node::Empty).hash(); - +fn test_state_trie(state_smt: Smt, k: Key, value: U256) -> Result<()> { let trie_inputs = TrieInputs { - state_trie: state_trie.clone(), + state_smt: state_smt.serialize(), transactions_trie: Default::default(), receipts_trie: Default::default(), - storage_tries: vec![], }; - let mpt_insert_state_trie = KERNEL.global_labels["mpt_insert_state_trie"]; - let mpt_delete = KERNEL.global_labels["mpt_delete"]; - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; + let smt_insert_state = KERNEL.global_labels["smt_insert_state"]; + let smt_delete = KERNEL.global_labels["smt_delete"]; + let smt_hash = KERNEL.global_labels["smt_hash"]; let initial_stack = vec![]; let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); @@ -105,32 +71,25 @@ fn test_state_trie( initialize_mpts(&mut interpreter, &trie_inputs); assert_eq!(interpreter.stack(), vec![]); - // Next, execute mpt_insert_state_trie. - interpreter.generation_state.registers.program_counter = mpt_insert_state_trie; + // Next, execute smt_insert_state. + interpreter.generation_state.registers.program_counter = smt_insert_state; let trie_data = interpreter.get_trie_data_mut(); if trie_data.is_empty() { // In the assembly we skip over 0, knowing trie_data[0] = 0 by default. // Since we don't explicitly set it to 0, we need to do so here. trie_data.push(Some(0.into())); + trie_data.push(Some(0.into())); } - let value_ptr = trie_data.len(); - trie_data.push(Some(account.nonce)); - trie_data.push(Some(account.balance)); - // In memory, storage_root gets interpreted as a pointer to a storage trie, - // so we have to ensure the pointer is valid. It's easiest to set it to 0, - // which works as an empty node, since trie_data[0] = 0 = MPT_TYPE_EMPTY. - trie_data.push(Some(H256::zero().into_uint())); - trie_data.push(Some(account.code_hash.into_uint())); - let trie_data_len = trie_data.len().into(); - interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); + let len = trie_data.len(); + interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, len.into()); interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(value_ptr.into()) - .expect("The stack should not overflow"); // value_ptr + .push(value) + .expect("The stack should not overflow"); interpreter - .push(k.try_into_u256().unwrap()) + .push(key2u(k)) .expect("The stack should not overflow"); // key interpreter.run()?; assert_eq!( @@ -140,39 +99,37 @@ fn test_state_trie( interpreter.stack() ); - // Next, execute mpt_delete, deleting the account we just inserted. + // Next, execute smt_delete, deleting the account we just inserted. let state_trie_ptr = interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot); - interpreter.generation_state.registers.program_counter = mpt_delete; + interpreter.generation_state.registers.program_counter = smt_delete; interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(k.try_into_u256().unwrap()) - .expect("The stack should not overflow"); - interpreter - .push(64.into()) + .push(key2u(k)) .expect("The stack should not overflow"); interpreter .push(state_trie_ptr) .expect("The stack should not overflow"); interpreter.run()?; let state_trie_ptr = interpreter.pop().expect("The stack should not be empty"); - interpreter.set_global_metadata_field(GlobalMetadata::StateTrieRoot, state_trie_ptr); - // Now, execute mpt_hash_state_trie. - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; + // Now, execute smt_hash_state. + interpreter.generation_state.registers.program_counter = smt_hash; interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(1.into()) // Initial length of the trie data segment, unused. + .push(2.into()) // Initial length of the trie data segment, unused. + .expect("The stack should not overflow"); + interpreter + .push(state_trie_ptr) .expect("The stack should not overflow"); interpreter.run()?; - let state_trie_hash = - H256::from_uint(&interpreter.pop().expect("The stack should not be empty")); - let expected_state_trie_hash = state_trie.hash(); - assert_eq!(state_trie_hash, expected_state_trie_hash); + let state_smt_hash = interpreter.pop().expect("The stack should not be empty"); + let expected_state_smt_hash = hashout2u(state_smt.root); + assert_eq!(state_smt_hash, expected_state_smt_hash); Ok(()) } diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs index 18e3ae1fe..c92995999 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/hash.rs @@ -1,7 +1,12 @@ use anyhow::Result; -use ethereum_types::{BigEndianHash, H256}; +use ethereum_types::{BigEndianHash, H160, H256, U256}; use mpt_trie::partial_trie::PartialTrie; use plonky2::field::goldilocks_field::GoldilocksField as F; +use rand::{thread_rng, Rng}; +use smt_trie::db::MemoryDb; +use smt_trie::keys::key_balance; +use smt_trie::smt::{hash_serialize_u256, Smt}; +use smt_trie::utils::hashout2u; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::interpreter::Interpreter; @@ -13,103 +18,55 @@ use crate::Node; // TODO: Test with short leaf. Might need to be a storage trie. #[test] -fn mpt_hash_empty() -> Result<()> { +fn smt_hash_empty() -> Result<()> { + let mut state_smt = Smt::::default(); let trie_inputs = TrieInputs { - state_trie: Default::default(), + state_smt: state_smt.serialize(), transactions_trie: Default::default(), receipts_trie: Default::default(), - storage_tries: vec![], }; test_state_trie(trie_inputs) } #[test] -fn mpt_hash_empty_branch() -> Result<()> { - let children = core::array::from_fn(|_| Node::Empty.into()); - let state_trie = Node::Branch { - children, - value: vec![], +fn smt_hash_random() -> Result<()> { + const N: usize = 100; + let mut rng = thread_rng(); + for _iter in 0..N { + let mut state_smt = Smt::::default(); + let num_keys: usize = rng.gen_range(0..100); + for _ in 0..num_keys { + let key = key_balance(H160(rng.gen())); + let value = U256(rng.gen()); + state_smt.set(key, value); + } + let trie_inputs = TrieInputs { + state_smt: state_smt.serialize(), + transactions_trie: Default::default(), + receipts_trie: Default::default(), + }; + + test_state_trie(trie_inputs)?; } - .into(); - let trie_inputs = TrieInputs { - state_trie, - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - test_state_trie(trie_inputs) -} - -#[test] -fn mpt_hash_hash() -> Result<()> { - let hash = H256::random(); - let trie_inputs = TrieInputs { - state_trie: Node::Hash(hash).into(), - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - - test_state_trie(trie_inputs) -} - -#[test] -fn mpt_hash_leaf() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: 0xABC_u64.into(), - value: test_account_1_rlp(), - } - .into(); - let trie_inputs = TrieInputs { - state_trie, - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - test_state_trie(trie_inputs) -} - -#[test] -fn mpt_hash_extension_to_leaf() -> Result<()> { - let state_trie = extension_to_leaf(test_account_1_rlp()); - let trie_inputs = TrieInputs { - state_trie, - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - test_state_trie(trie_inputs) + Ok(()) } -#[test] -fn mpt_hash_branch_to_leaf() -> Result<()> { - let leaf = Node::Leaf { - nibbles: 0xABC_u64.into(), - value: test_account_2_rlp(), - } - .into(); - - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[3] = leaf; - let state_trie = Node::Branch { - children, - value: vec![], - } - .into(); - - let trie_inputs = TrieInputs { - state_trie, - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - - test_state_trie(trie_inputs) -} +// #[test] +// fn mpt_hash_hash() -> Result<()> { +// let hash = H256::random(); +// let trie_inputs = TrieInputs { +// state_trie: Node::Hash(hash).into(), +// transactions_trie: Default::default(), +// receipts_trie: Default::default(), +// storage_tries: vec![], +// }; +// +// test_state_trie(trie_inputs) +// } fn test_state_trie(trie_inputs: TrieInputs) -> Result<()> { - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; + let smt_hash_state = KERNEL.global_labels["smt_hash_state"]; let initial_stack = vec![]; let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); @@ -118,12 +75,12 @@ fn test_state_trie(trie_inputs: TrieInputs) -> Result<()> { assert_eq!(interpreter.stack(), vec![]); // Now, execute mpt_hash_state_trie. - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; + interpreter.generation_state.registers.program_counter = smt_hash_state; interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(1.into()) // Initial length of the trie data segment, unused. + .push(2.into()) // Initial length of the trie data segment, unused. .expect("The stack should not overflow"); interpreter.run()?; @@ -133,8 +90,8 @@ fn test_state_trie(trie_inputs: TrieInputs) -> Result<()> { "Expected 2 items on stack, found {:?}", interpreter.stack() ); - let hash = H256::from_uint(&interpreter.stack()[1]); - let expected_state_trie_hash = trie_inputs.state_trie.hash(); + let hash = interpreter.stack()[1]; + let expected_state_trie_hash = hash_serialize_u256(&trie_inputs.state_smt); assert_eq!(hash, expected_state_trie_hash); Ok(()) diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs index fcb2b5323..276201b96 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs @@ -1,8 +1,13 @@ use anyhow::Result; -use ethereum_types::{BigEndianHash, H256}; +use ethereum_types::{BigEndianHash, H160, H256, U256}; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField as F; +use rand::{random, thread_rng, Rng}; +use smt_trie::db::MemoryDb; +use smt_trie::keys::key_balance; +use smt_trie::smt::{Key, Smt}; +use smt_trie::utils::{hashout2u, key2u}; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; @@ -16,163 +21,51 @@ use crate::generation::TrieInputs; use crate::Node; #[test] -fn mpt_insert_empty() -> Result<()> { - test_state_trie(Default::default(), nibbles_64(0xABC), test_account_2()) +fn smt_insert_empty() -> Result<()> { + test_state_trie( + Smt::::default(), + key_balance(H160(random())), + U256(random()), + ) } #[test] -fn mpt_insert_leaf_identical_keys() -> Result<()> { - let key = nibbles_64(0xABC); - let state_trie = Node::Leaf { - nibbles: key, - value: test_account_1_rlp(), - } - .into(); - test_state_trie(state_trie, key, test_account_2()) -} - -#[test] -fn mpt_insert_leaf_nonoverlapping_keys() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: nibbles_64(0xABC), - value: test_account_1_rlp(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0x123), test_account_2()) -} - -#[test] -fn mpt_insert_leaf_overlapping_keys() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: nibbles_64(0xABC), - value: test_account_1_rlp(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0xADE), test_account_2()) -} - -#[test] -#[ignore] // TODO: Not valid for state trie, all keys have same len. -fn mpt_insert_leaf_insert_key_extends_leaf_key() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: 0xABC_u64.into(), - value: test_account_1_rlp(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0xABCDE), test_account_2()) -} - -#[test] -#[ignore] // TODO: Not valid for state trie, all keys have same len. -fn mpt_insert_leaf_leaf_key_extends_insert_key() -> Result<()> { - let state_trie = Node::Leaf { - nibbles: 0xABCDE_u64.into(), - value: test_account_1_rlp(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0xABC), test_account_2()) -} - -#[test] -fn mpt_insert_branch_replacing_empty_child() -> Result<()> { - let children = core::array::from_fn(|_| Node::Empty.into()); - let state_trie = Node::Branch { - children, - value: vec![], - } - .into(); - - test_state_trie(state_trie, nibbles_64(0xABC), test_account_2()) -} - -#[test] -// TODO: Not a valid test because branches state trie cannot have branch values. -// We should change it to use a different trie. -#[ignore] -fn mpt_insert_extension_nonoverlapping_keys() -> Result<()> { - // Existing keys are 0xABC, 0xABCDEF; inserted key is 0x12345. - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[0xD] = Node::Leaf { - nibbles: 0xEF_u64.into(), - value: test_account_1_rlp(), - } - .into(); - let state_trie = Node::Extension { - nibbles: 0xABC_u64.into(), - child: Node::Branch { - children, - value: test_account_1_rlp(), +fn smt_insert_random() -> Result<()> { + const N: usize = 100; + let mut rng = thread_rng(); + for _iter in 0..N { + let mut state_smt = Smt::::default(); + let num_keys: usize = rng.gen_range(0..100); + for _ in 0..num_keys { + let key = key_balance(H160(rng.gen())); + let value = U256(rng.gen()); + state_smt.set(key, value); } - .into(), + let trie_inputs = TrieInputs { + state_smt: state_smt.serialize(), + transactions_trie: Default::default(), + receipts_trie: Default::default(), + }; + + let key = key_balance(H160(rng.gen())); + let value = U256(rng.gen()); + test_state_trie(state_smt, key, value)?; } - .into(); - test_state_trie(state_trie, nibbles_64(0x12345), test_account_2()) -} - -#[test] -// TODO: Not a valid test because branches state trie cannot have branch values. -// We should change it to use a different trie. -#[ignore] -fn mpt_insert_extension_insert_key_extends_node_key() -> Result<()> { - // Existing keys are 0xA, 0xABCD; inserted key is 0xABCDEF. - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[0xB] = Node::Leaf { - nibbles: 0xCD_u64.into(), - value: test_account_1_rlp(), - } - .into(); - let state_trie = Node::Extension { - nibbles: 0xA_u64.into(), - child: Node::Branch { - children, - value: test_account_1_rlp(), - } - .into(), - } - .into(); - test_state_trie(state_trie, nibbles_64(0xABCDEF), test_account_2()) -} - -#[test] -fn mpt_insert_branch_to_leaf_same_key() -> Result<()> { - let leaf = Node::Leaf { - nibbles: nibbles_count(0xBCD, 63), - value: test_account_1_rlp(), - } - .into(); - - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[0] = leaf; - let state_trie = Node::Branch { - children, - value: vec![], - } - .into(); - - test_state_trie(state_trie, nibbles_64(0xABCD), test_account_2()) + Ok(()) } /// Note: The account's storage_root is ignored, as we can't insert a new /// storage_root without the accompanying trie data. An empty trie's /// storage_root is used instead. -fn test_state_trie( - mut state_trie: HashedPartialTrie, - k: Nibbles, - mut account: AccountRlp, -) -> Result<()> { - assert_eq!(k.count, 64); - - // Ignore any storage_root; see documentation note. - account.storage_root = HashedPartialTrie::from(Node::Empty).hash(); - +fn test_state_trie(mut state_smt: Smt, k: Key, value: U256) -> Result<()> { let trie_inputs = TrieInputs { - state_trie: state_trie.clone(), + state_smt: state_smt.serialize(), transactions_trie: Default::default(), receipts_trie: Default::default(), - storage_tries: vec![], }; - let mpt_insert_state_trie = KERNEL.global_labels["mpt_insert_state_trie"]; - let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"]; + let smt_insert_state = KERNEL.global_labels["smt_insert_state"]; + let smt_delete = KERNEL.global_labels["smt_delete"]; + let smt_hash = KERNEL.global_labels["smt_hash"]; let initial_stack = vec![]; let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); @@ -180,34 +73,26 @@ fn test_state_trie( initialize_mpts(&mut interpreter, &trie_inputs); assert_eq!(interpreter.stack(), vec![]); - // Next, execute mpt_insert_state_trie. - interpreter.generation_state.registers.program_counter = mpt_insert_state_trie; + // Next, execute smt_insert_state. + interpreter.generation_state.registers.program_counter = smt_insert_state; let trie_data = interpreter.get_trie_data_mut(); if trie_data.is_empty() { // In the assembly we skip over 0, knowing trie_data[0] = 0 by default. // Since we don't explicitly set it to 0, we need to do so here. trie_data.push(Some(0.into())); + trie_data.push(Some(0.into())); } - let value_ptr = trie_data.len(); - trie_data.push(Some(account.nonce)); - trie_data.push(Some(account.balance)); - // In memory, storage_root gets interpreted as a pointer to a storage trie, - // so we have to ensure the pointer is valid. It's easiest to set it to 0, - // which works as an empty node, since trie_data[0] = 0 = MPT_TYPE_EMPTY. - trie_data.push(Some(H256::zero().into_uint())); - trie_data.push(Some(account.code_hash.into_uint())); - let trie_data_len = trie_data.len().into(); - interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); + let len = trie_data.len(); + interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, len.into()); interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(value_ptr.into()) + .push(value) .expect("The stack should not overflow"); // value_ptr interpreter - .push(k.try_into_u256().unwrap()) + .push(key2u(k)) .expect("The stack should not overflow"); // key - interpreter.run()?; assert_eq!( interpreter.stack().len(), @@ -216,27 +101,25 @@ fn test_state_trie( interpreter.stack() ); - // Now, execute mpt_hash_state_trie. - interpreter.generation_state.registers.program_counter = mpt_hash_state_trie; + let state_trie_ptr = interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot); + + // Now, execute smt_hash_state. + interpreter.generation_state.registers.program_counter = smt_hash; interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(1.into()) // Initial length of the trie data segment, unused. + .push(2.into()) // Initial length of the trie data segment, unused. + .expect("The stack should not overflow"); + interpreter + .push(state_trie_ptr) // Initial length of the trie data segment, unused. .expect("The stack should not overflow"); interpreter.run()?; - assert_eq!( - interpreter.stack().len(), - 2, - "Expected 2 items on stack after hashing, found {:?}", - interpreter.stack() - ); - let hash = H256::from_uint(&interpreter.stack()[1]); - - state_trie.insert(k, rlp::encode(&account).to_vec()); - let expected_state_trie_hash = state_trie.hash(); - assert_eq!(hash, expected_state_trie_hash); + let state_smt_hash = interpreter.pop().expect("The stack should not be empty"); + state_smt.set(k, value); + let expected_state_smt_hash = hashout2u(state_smt.root); + assert_eq!(state_smt_hash, expected_state_smt_hash); Ok(()) } diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs index 9aa8a1f0b..5884eaf89 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/load.rs @@ -1,13 +1,19 @@ use std::str::FromStr; use anyhow::Result; -use ethereum_types::{BigEndianHash, H256, U256}; +use ethereum_types::{BigEndianHash, H160, H256, U256}; use hex_literal::hex; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::HashedPartialTrie; use plonky2::field::goldilocks_field::GoldilocksField as F; +use rand::{thread_rng, Rng}; +use smt_trie::db::MemoryDb; +use smt_trie::keys::key_balance; +use smt_trie::smt::Smt; +use smt_trie::utils::key2u; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; +use crate::cpu::kernel::constants::smt_type::PartialSmtType; use crate::cpu::kernel::constants::trie_type::PartialTrieType; use crate::cpu::kernel::interpreter::Interpreter; use crate::cpu::kernel::tests::account_code::initialize_mpts; @@ -17,11 +23,11 @@ use crate::Node; #[test] fn load_all_mpts_empty() -> Result<()> { + let smt = Smt::::default(); let trie_inputs = TrieInputs { - state_trie: Default::default(), + state_smt: smt.serialize(), transactions_trie: Default::default(), receipts_trie: Default::default(), - storage_tries: vec![], }; let initial_stack = vec![]; @@ -30,11 +36,11 @@ fn load_all_mpts_empty() -> Result<()> { assert_eq!(interpreter.stack(), vec![]); // We need to have the first element in `TrieData` be 0. - assert_eq!(interpreter.get_trie_data(), vec![0.into()]); + assert_eq!(interpreter.get_trie_data(), vec![0.into(); 4]); assert_eq!( interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot), - 0.into() + 2.into() ); assert_eq!( interpreter.get_global_metadata_field(GlobalMetadata::TransactionTrieRoot), @@ -50,99 +56,14 @@ fn load_all_mpts_empty() -> Result<()> { #[test] fn load_all_mpts_leaf() -> Result<()> { + let mut state_smt = Smt::::default(); + let key = key_balance(H160(thread_rng().gen())); + let value = U256(thread_rng().gen()); + state_smt.set(key, value); let trie_inputs = TrieInputs { - state_trie: Node::Leaf { - nibbles: 0xABC_u64.into(), - value: test_account_1_rlp(), - } - .into(), - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - - let initial_stack = vec![]; - let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); - initialize_mpts(&mut interpreter, &trie_inputs); - assert_eq!(interpreter.stack(), vec![]); - - let type_leaf = U256::from(PartialTrieType::Leaf as u32); - assert_eq!( - interpreter.get_trie_data(), - vec![ - 0.into(), - type_leaf, - 3.into(), - 0xABC.into(), - 5.into(), // value ptr - test_account_1().nonce, - test_account_1().balance, - 9.into(), // pointer to storage trie root - test_account_1().code_hash.into_uint(), - // These last two elements encode the storage trie, which is a hash node. - (PartialTrieType::Hash as u32).into(), - test_account_1().storage_root.into_uint(), - ] - ); - - assert_eq!( - interpreter.get_global_metadata_field(GlobalMetadata::TransactionTrieRoot), - 0.into() - ); - assert_eq!( - interpreter.get_global_metadata_field(GlobalMetadata::ReceiptTrieRoot), - 0.into() - ); - - Ok(()) -} - -#[test] -fn load_all_mpts_hash() -> Result<()> { - let hash = H256::random(); - let trie_inputs = TrieInputs { - state_trie: Node::Hash(hash).into(), - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - - let initial_stack = vec![]; - let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); - initialize_mpts(&mut interpreter, &trie_inputs); - assert_eq!(interpreter.stack(), vec![]); - - let type_hash = U256::from(PartialTrieType::Hash as u32); - assert_eq!( - interpreter.get_trie_data(), - vec![0.into(), type_hash, hash.into_uint(),] - ); - - assert_eq!( - interpreter.get_global_metadata_field(GlobalMetadata::TransactionTrieRoot), - 0.into() - ); - assert_eq!( - interpreter.get_global_metadata_field(GlobalMetadata::ReceiptTrieRoot), - 0.into() - ); - - Ok(()) -} - -#[test] -fn load_all_mpts_empty_branch() -> Result<()> { - let children = core::array::from_fn(|_| Node::Empty.into()); - let state_trie = Node::Branch { - children, - value: vec![], - } - .into(); - let trie_inputs = TrieInputs { - state_trie, + state_smt: state_smt.serialize(), transactions_trie: Default::default(), receipts_trie: Default::default(), - storage_tries: vec![], }; let initial_stack = vec![]; @@ -150,30 +71,10 @@ fn load_all_mpts_empty_branch() -> Result<()> { initialize_mpts(&mut interpreter, &trie_inputs); assert_eq!(interpreter.stack(), vec![]); - let type_branch = U256::from(PartialTrieType::Branch as u32); + let type_leaf = U256::from(PartialSmtType::Leaf as u32); assert_eq!( interpreter.get_trie_data(), - vec![ - 0.into(), // First address is unused, so that 0 can be treated as a null pointer. - type_branch, - 0.into(), // child 0 - 0.into(), // ... - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), - 0.into(), // child 16 - 0.into(), // value_ptr - ] + vec![0.into(), 0.into(), type_leaf, key2u(key), value,] ); assert_eq!( @@ -188,78 +89,173 @@ fn load_all_mpts_empty_branch() -> Result<()> { Ok(()) } -#[test] -fn load_all_mpts_ext_to_leaf() -> Result<()> { - let trie_inputs = TrieInputs { - state_trie: extension_to_leaf(test_account_1_rlp()), - transactions_trie: Default::default(), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - - let initial_stack = vec![]; - let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); - initialize_mpts(&mut interpreter, &trie_inputs); - assert_eq!(interpreter.stack(), vec![]); - - let type_extension = U256::from(PartialTrieType::Extension as u32); - let type_leaf = U256::from(PartialTrieType::Leaf as u32); - assert_eq!( - interpreter.get_trie_data(), - vec![ - 0.into(), // First address is unused, so that 0 can be treated as a null pointer. - type_extension, - 3.into(), // 3 nibbles - 0xABC.into(), // key part - 5.into(), // Pointer to the leaf node immediately below. - type_leaf, - 3.into(), // 3 nibbles - 0xDEF.into(), // key part - 9.into(), // value pointer - test_account_1().nonce, - test_account_1().balance, - 13.into(), // pointer to storage trie root - test_account_1().code_hash.into_uint(), - // These last two elements encode the storage trie, which is a hash node. - (PartialTrieType::Hash as u32).into(), - test_account_1().storage_root.into_uint(), - ] - ); - - Ok(()) -} - -#[test] -fn load_mpt_txn_trie() -> Result<()> { - let txn = hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b").to_vec(); - - let trie_inputs = TrieInputs { - state_trie: Default::default(), - transactions_trie: HashedPartialTrie::from(Node::Leaf { - nibbles: Nibbles::from_str("0x80").unwrap(), - value: txn.clone(), - }), - receipts_trie: Default::default(), - storage_tries: vec![], - }; - - let initial_stack = vec![]; - let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); - initialize_mpts(&mut interpreter, &trie_inputs); - assert_eq!(interpreter.stack(), vec![]); - - let mut expected_trie_data = vec![ - 0.into(), - U256::from(PartialTrieType::Leaf as u32), - 2.into(), - 128.into(), // Nibble - 5.into(), // value_ptr - txn.len().into(), - ]; - expected_trie_data.extend(txn.into_iter().map(U256::from)); - let trie_data = interpreter.get_trie_data(); - - assert_eq!(trie_data, expected_trie_data); - - Ok(()) -} +// #[test] +// fn load_all_mpts_hash() -> Result<()> { +// let hash = H256::random(); +// let trie_inputs = TrieInputs { +// state_trie: Node::Hash(hash).into(), +// transactions_trie: Default::default(), +// receipts_trie: Default::default(), +// storage_tries: vec![], +// }; + +// let initial_stack = vec![]; +// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, +// initial_stack); initialize_mpts(&mut interpreter, &trie_inputs); +// assert_eq!(interpreter.stack(), vec![]); + +// let type_hash = U256::from(PartialTrieType::Hash as u32); +// assert_eq!( +// interpreter.get_trie_data(), +// vec![0.into(), type_hash, hash.into_uint(),] +// ); + +// assert_eq!( +// interpreter. +// get_global_metadata_field(GlobalMetadata::TransactionTrieRoot), +// 0.into() +// ); +// assert_eq!( +// interpreter. +// get_global_metadata_field(GlobalMetadata::ReceiptTrieRoot), 0.into() +// ); + +// Ok(()) +// } + +// #[test] +// fn load_all_mpts_empty_branch() -> Result<()> { +// let children = core::array::from_fn(|_| Node::Empty.into()); +// let state_trie = Node::Branch { +// children, +// value: vec![], +// } +// .into(); +// let trie_inputs = TrieInputs { +// state_trie, +// transactions_trie: Default::default(), +// receipts_trie: Default::default(), +// storage_tries: vec![], +// }; + +// let initial_stack = vec![]; +// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, +// initial_stack); initialize_mpts(&mut interpreter, &trie_inputs); +// assert_eq!(interpreter.stack(), vec![]); + +// let type_branch = U256::from(PartialTrieType::Branch as u32); +// assert_eq!( +// interpreter.get_trie_data(), +// vec![ +// 0.into(), // First address is unused, so that 0 can be treated as +// a null pointer. type_branch, +// 0.into(), // child 0 +// 0.into(), // ... +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), +// 0.into(), // child 16 +// 0.into(), // value_ptr +// ] +// ); + +// assert_eq!( +// interpreter. +// get_global_metadata_field(GlobalMetadata::TransactionTrieRoot), +// 0.into() +// ); +// assert_eq!( +// interpreter. +// get_global_metadata_field(GlobalMetadata::ReceiptTrieRoot), 0.into() +// ); + +// Ok(()) +// } + +// #[test] +// fn load_all_mpts_ext_to_leaf() -> Result<()> { +// let trie_inputs = TrieInputs { +// state_trie: extension_to_leaf(test_account_1_rlp()), +// transactions_trie: Default::default(), +// receipts_trie: Default::default(), +// storage_tries: vec![], +// }; + +// let initial_stack = vec![]; +// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, +// initial_stack); initialize_mpts(&mut interpreter, &trie_inputs); +// assert_eq!(interpreter.stack(), vec![]); + +// let type_extension = U256::from(PartialTrieType::Extension as u32); +// let type_leaf = U256::from(PartialTrieType::Leaf as u32); +// assert_eq!( +// interpreter.get_trie_data(), +// vec![ +// 0.into(), // First address is unused, so that 0 can be treated as +// a null pointer. type_extension, +// 3.into(), // 3 nibbles +// 0xABC.into(), // key part +// 5.into(), // Pointer to the leaf node immediately below. +// type_leaf, +// 3.into(), // 3 nibbles +// 0xDEF.into(), // key part +// 9.into(), // value pointer +// test_account_1().nonce, +// test_account_1().balance, +// 13.into(), // pointer to storage trie root +// test_account_1().code_hash.into_uint(), +// // These last two elements encode the storage trie, which is a +// hash node. (PartialTrieType::Hash as u32).into(), +// test_account_1().storage_root.into_uint(), +// ] +// ); + +// Ok(()) +// } + +// #[test] +// fn load_mpt_txn_trie() -> Result<()> { +// let txn = +// hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b" +// ).to_vec(); + +// let trie_inputs = TrieInputs { +// state_trie: Default::default(), +// transactions_trie: HashedPartialTrie::from(Node::Leaf { +// nibbles: Nibbles::from_str("0x80").unwrap(), +// value: txn.clone(), +// }), +// receipts_trie: Default::default(), +// storage_tries: vec![], +// }; + +// let initial_stack = vec![]; +// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, +// initial_stack); initialize_mpts(&mut interpreter, &trie_inputs); +// assert_eq!(interpreter.stack(), vec![]); + +// let mut expected_trie_data = vec![ +// 0.into(), +// U256::from(PartialTrieType::Leaf as u32), +// 2.into(), +// 128.into(), // Nibble +// 5.into(), // value_ptr +// txn.len().into(), +// ]; +// expected_trie_data.extend(txn.into_iter().map(U256::from)); +// let trie_data = interpreter.get_trie_data(); + +// assert_eq!(trie_data, expected_trie_data); + +// Ok(()) +// } diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs index 84f64bb7b..47645bc99 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/mod.rs @@ -32,8 +32,8 @@ pub(crate) fn test_account_1() -> AccountRlp { AccountRlp { nonce: U256::from(1111), balance: U256::from(2222), - storage_root: H256::from_uint(&U256::from(3333)), - code_hash: H256::from_uint(&U256::from(4444)), + code_length: U256::from(3333), + code_hash: U256::from(4444), } } @@ -45,8 +45,8 @@ pub(crate) fn test_account_2() -> AccountRlp { AccountRlp { nonce: U256::from(5555), balance: U256::from(6666), - storage_root: H256::from_uint(&U256::from(7777)), - code_hash: H256::from_uint(&U256::from(8888)), + code_length: U256::from(7777), + code_hash: U256::from(8888), } } diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs index 9b669a21c..7220c344f 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs @@ -1,6 +1,13 @@ -use anyhow::Result; -use ethereum_types::BigEndianHash; +use anyhow::{anyhow, Result}; +use ethereum_types::{BigEndianHash, H160, U256}; use plonky2::field::goldilocks_field::GoldilocksField as F; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use rand::{thread_rng, Rng}; +use smt_trie::db::MemoryDb; +use smt_trie::keys::key_balance; +use smt_trie::smt::{Key, Smt}; +use smt_trie::utils::key2u; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; @@ -10,15 +17,18 @@ use crate::cpu::kernel::tests::mpt::{extension_to_leaf, test_account_1, test_acc use crate::generation::TrieInputs; #[test] -fn mpt_read() -> Result<()> { +fn smt_read() -> Result<()> { + let mut state_smt = Smt::::default(); + let key = key_balance(H160(thread_rng().gen())); + let value = U256(thread_rng().gen()); + state_smt.set(key, value); let trie_inputs = TrieInputs { - state_trie: extension_to_leaf(test_account_1_rlp()), + state_smt: state_smt.serialize(), transactions_trie: Default::default(), receipts_trie: Default::default(), - storage_tries: vec![], }; - let mpt_read = KERNEL.global_labels["mpt_read"]; + let smt_read_state = KERNEL.global_labels["smt_read_state"]; let initial_stack = vec![]; let mut interpreter: Interpreter = Interpreter::new(0, initial_stack); @@ -26,29 +36,19 @@ fn mpt_read() -> Result<()> { assert_eq!(interpreter.stack(), vec![]); // Now, execute mpt_read on the state trie. - interpreter.generation_state.registers.program_counter = mpt_read; + interpreter.generation_state.registers.program_counter = smt_read_state; interpreter .push(0xdeadbeefu32.into()) .expect("The stack should not overflow"); interpreter - .push(0xABCDEFu64.into()) - .expect("The stack should not overflow"); - interpreter - .push(6.into()) - .expect("The stack should not overflow"); - interpreter - .push(interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot)) + .push(key2u(key)) .expect("The stack should not overflow"); interpreter.run()?; assert_eq!(interpreter.stack().len(), 1); let result_ptr = interpreter.stack()[0].as_usize(); - let result = &interpreter.get_trie_data()[result_ptr..][..4]; - assert_eq!(result[0], test_account_1().nonce); - assert_eq!(result[1], test_account_1().balance); - // result[2] is the storage root pointer. We won't check that it matches a - // particular address, since that seems like over-specifying. - assert_eq!(result[3], test_account_1().code_hash.into_uint()); + let result = interpreter.get_trie_data()[result_ptr]; + assert_eq!(result, value); Ok(()) } diff --git a/evm_arithmetization/src/cpu/kernel/tests/receipt.rs b/evm_arithmetization/src/cpu/kernel/tests/receipt.rs index de6e7f1d4..fa7471fa4 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/receipt.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/receipt.rs @@ -1,618 +1,650 @@ -use anyhow::Result; -use ethereum_types::{Address, U256}; -use hex_literal::hex; -use keccak_hash::keccak; -use plonky2::field::goldilocks_field::GoldilocksField as F; -use rand::{thread_rng, Rng}; - -use crate::cpu::kernel::aggregator::KERNEL; -use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -use crate::cpu::kernel::constants::txn_fields::NormalizedTxnField; -use crate::cpu::kernel::interpreter::Interpreter; -use crate::cpu::kernel::tests::account_code::initialize_mpts; -use crate::generation::mpt::{LegacyReceiptRlp, LogRlp}; -use crate::memory::segments::{Segment, SEGMENT_SCALING_FACTOR}; - -#[test] -fn test_process_receipt() -> Result<()> { - /* Tests process_receipt, which: - - computes the cumulative gas - - computes the bloom filter - - inserts the receipt data in MPT_TRIE_DATA - - inserts a node in receipt_trie - - resets the bloom filter to 0 for the next transaction. */ - let process_receipt = KERNEL.global_labels["process_receipt"]; - let success = U256::from(1); - let leftover_gas = U256::from(4000); - let prev_cum_gas = U256::from(1000); - let retdest = 0xDEADBEEFu32.into(); - - // Log. - let address: Address = thread_rng().gen(); - let num_topics = 1; - - let mut topic = vec![0_u8; 32]; - topic[31] = 4; - - // Compute the expected Bloom filter. - let test_logs_list = vec![(address.to_fixed_bytes().to_vec(), vec![topic])]; - let expected_bloom = logs_bloom_bytes_fn(test_logs_list).to_vec(); - - // Set memory. - let num_nibbles = 2.into(); - let initial_stack: Vec = vec![ - retdest, - num_nibbles, - 0.into(), - prev_cum_gas, - leftover_gas, - success, - ]; - let mut interpreter: Interpreter = Interpreter::new(process_receipt, initial_stack); - interpreter.set_memory_segment( - Segment::LogsData, - vec![ - 56.into(), // payload len - U256::from_big_endian(&address.to_fixed_bytes()), // address - num_topics.into(), // num_topics - 4.into(), // topic - 0.into(), // data_len - ], - ); - interpreter.set_txn_field(NormalizedTxnField::GasLimit, U256::from(5000)); - interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); - interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); - interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 58.into()); - interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); - interpreter.set_global_metadata_field(GlobalMetadata::ReceiptTrieRoot, 500.into()); - interpreter.run()?; - - let segment_read = interpreter.get_memory_segment(Segment::TrieData); - - // The expected TrieData has the form [payload_len, status, cum_gas_used, - // bloom_filter, logs_payload_len, num_logs, [logs]] - let mut expected_trie_data: Vec = vec![323.into(), success, 2000.into()]; - expected_trie_data.extend( - expected_bloom - .into_iter() - .map(|elt| elt.into()) - .collect::>(), - ); - expected_trie_data.push(58.into()); // logs_payload_len - expected_trie_data.push(1.into()); // num_logs - expected_trie_data.extend(vec![ - 56.into(), // payload len - U256::from_big_endian(&address.to_fixed_bytes()), // address - num_topics.into(), // num_topics - 4.into(), // topic - 0.into(), // data_len - ]); - - assert_eq!( - expected_trie_data, - segment_read[0..expected_trie_data.len()] - ); - - Ok(()) -} - -/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 -#[test] -fn test_receipt_encoding() -> Result<()> { - // Initialize interpreter. - let success = U256::from(1); - - let retdest = 0xDEADBEEFu32.into(); - let num_topics = 3; - - let encode_receipt = KERNEL.global_labels["encode_receipt"]; - - // Logs and receipt in encodable form. - let log_1 = LogRlp { - address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), - topics: vec![ - hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(), - hex!("0000000000000000000000000000000000000000000000000000000000000004").into(), - hex!("00000000000000000000000000000000000000000000000000000000004920ea").into(), - ], - data: hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .to_vec() - .into(), - }; - - let receipt_1 = LegacyReceiptRlp { - status: true, - cum_gas_used: 0x02dcb6u64.into(), - bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000").to_vec().into(), - logs: vec![log_1], - }; - // Get the expected RLP encoding. - let expected_rlp = rlp::encode(&rlp::encode(&receipt_1)); - - // Address at which the encoding is written. - let rlp_addr = U256::from(Segment::RlpRaw as usize); - let initial_stack: Vec = vec![retdest, 0.into(), 0.into(), rlp_addr]; - let mut interpreter: Interpreter = Interpreter::new(encode_receipt, initial_stack); - - // Write data to memory. - let expected_bloom_bytes = vec![ - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - ]; - let expected_bloom: Vec = expected_bloom_bytes - .into_iter() - .map(|elt| elt.into()) - .collect(); - - let addr = U256::from([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3, - 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d, - ]); - - let topic1 = U256::from([ - 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31, - 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, - 0x67, 0x4d, - ]); - - let topic2 = 4.into(); - let topic3 = 0x4920ea.into(); - - let mut logs = vec![ - 155.into(), // unused - addr, - num_topics.into(), // num_topics - topic1, // topic1 - topic2, // topic2 - topic3, // topic3 - 32.into(), // data length - ]; - let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .iter() - .copied() - .map(U256::from); - logs.extend(cur_data); - - let mut receipt = vec![423.into(), success, receipt_1.cum_gas_used]; - receipt.extend(expected_bloom.clone()); - receipt.push(157.into()); // logs_payload_len - receipt.push(1.into()); // num_logs - receipt.extend(logs.clone()); - interpreter.set_memory_segment(Segment::LogsData, logs); - - interpreter.set_memory_segment(Segment::TxnBloom, expected_bloom); - - interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); - interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 1.into()); - interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 157.into()); - interpreter.set_memory_segment(Segment::TrieData, receipt); - - interpreter.run()?; - let rlp_pos = interpreter.pop().expect("The stack should not be empty"); - - let rlp_read: &[u8] = &interpreter.get_rlp_memory(); - - assert_eq!((rlp_pos - rlp_addr).as_usize(), expected_rlp.len()); - for i in 0..rlp_read.len() { - assert_eq!(rlp_read[i], expected_rlp[i]); - } - - Ok(()) -} - -/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 -#[test] -fn test_receipt_bloom_filter() -> Result<()> { - let logs_bloom = KERNEL.global_labels["logs_bloom"]; - - let num_topics = 3; - - // Expected bloom - let first_bloom_bytes = vec![ - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x10, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - ]; - - let retdest = 0xDEADBEEFu32.into(); - - let addr = U256::from([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3, - 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d, - ]); - - let topic1 = U256::from([ - 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31, - 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, - 0x67, 0x4d, - ]); - - let topic02 = 0x2a.into(); - let topic03 = 0xbd9fe6.into(); - - // Set logs memory and initialize TxnBloom and BlockBloom segments. - let initial_stack: Vec = vec![retdest]; - - let mut interpreter: Interpreter = Interpreter::new(logs_bloom, initial_stack); - let mut logs = vec![ - 0.into(), // unused - addr, - num_topics.into(), // num_topics - topic1, // topic1 - topic02, // topic2 - topic03, // topic3 - 32.into(), // data_len - ]; - let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .iter() - .copied() - .map(U256::from); - logs.extend(cur_data); - // The Bloom filter initialization is required for this test to ensure we have - // the correct length for the filters. Otherwise, some trailing zeroes could be - // missing. - interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter. - interpreter.set_memory_segment(Segment::LogsData, logs); - interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); - interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); - interpreter.run()?; - - // Second transaction. - let loaded_bloom_u256 = interpreter.get_memory_segment(Segment::TxnBloom); - let loaded_bloom: Vec = loaded_bloom_u256 - .into_iter() - .map(|elt| elt.0[0] as u8) - .collect(); - - assert_eq!(first_bloom_bytes, loaded_bloom); - let topic12 = 0x4.into(); - let topic13 = 0x4920ea.into(); - let mut logs2 = vec![ - 0.into(), // unused - addr, - num_topics.into(), // num_topics - topic1, // topic1 - topic12, // topic2 - topic13, // topic3 - 32.into(), // data_len - ]; - let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .iter() - .copied() - .map(U256::from); - logs2.extend(cur_data); - - interpreter - .push(retdest) - .expect("The stack should not overflow"); - interpreter.generation_state.registers.program_counter = logs_bloom; - interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter. - interpreter.set_memory_segment(Segment::LogsData, logs2); - interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); - interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); - interpreter.run()?; - - let second_bloom_bytes = vec![ - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - ]; - - let second_loaded_bloom_u256 = interpreter.get_memory_segment(Segment::TxnBloom); - let second_loaded_bloom: Vec = second_loaded_bloom_u256 - .into_iter() - .map(|elt| elt.0[0] as u8) - .collect(); - - assert_eq!(second_bloom_bytes, second_loaded_bloom); - - Ok(()) -} - -#[test] -fn test_mpt_insert_receipt() -> Result<()> { - // This test simulates a receipt processing to test `mpt_insert_receipt_trie`. - // For this, we need to set the data correctly in memory. - // In TrieData, we need to insert a receipt of the form: - // `[payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, - // [logs]]`. We also need to set TrieDataSize correctly. - - let retdest = 0xDEADBEEFu32.into(); - let trie_inputs = Default::default(); - let mpt_insert = KERNEL.global_labels["mpt_insert_receipt_trie"]; - let num_topics = 3; // Both transactions have the same number of topics. - let payload_len = 423; // Total payload length for each receipt. - let logs_payload_len = 157; // Payload length for all logs. - let log_payload_len = 155; // Payload length for one log. - let num_logs = 1; - - // Receipt_0: - let status_0 = 1; - let cum_gas_used_0 = 0x016e5b; - let logs_bloom_0_bytes = vec![ - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x10, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - ]; - - // Logs_0: - let logs_bloom_0: Vec = logs_bloom_0_bytes - .into_iter() - .map(|elt| elt.into()) - .collect(); - - let addr = U256::from([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3, - 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d, - ]); - - // The first topic is shared by the two transactions. - let topic1 = U256::from([ - 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31, - 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, - 0x67, 0x4d, - ]); - - let topic02 = 0x2a.into(); - let topic03 = 0xbd9fe6.into(); - - let mut logs_0 = vec![ - log_payload_len.into(), // payload_len - addr, - num_topics.into(), // num_topics - topic1, // topic1 - topic02, // topic2 - topic03, // topic3 - 32.into(), // data_len - ]; - let cur_data = hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") - .iter() - .copied() - .map(U256::from); - logs_0.extend(cur_data); - - let mut receipt: Vec = vec![423.into(), status_0.into(), cum_gas_used_0.into()]; - receipt.extend(logs_bloom_0); - receipt.push(logs_payload_len.into()); // logs_payload_len - receipt.push(num_logs.into()); // num_logs - receipt.extend(logs_0.clone()); - - let mut interpreter: Interpreter = Interpreter::new(0, vec![]); - initialize_mpts(&mut interpreter, &trie_inputs); - - // If TrieData is empty, we need to push 0 because the first value is always 0. - let mut cur_trie_data = interpreter.get_memory_segment(Segment::TrieData); - if cur_trie_data.is_empty() { - cur_trie_data.push(0.into()); - } - - // stack: transaction_nb, value_ptr, retdest - let num_nibbles = 2; - let initial_stack: Vec = vec![ - retdest, - cur_trie_data.len().into(), - 0x80.into(), - num_nibbles.into(), - ]; - for i in 0..initial_stack.len() { - interpreter - .push(initial_stack[i]) - .expect("The stack should not overflow"); - } - - interpreter.generation_state.registers.program_counter = mpt_insert; - - // Set memory. - cur_trie_data.extend(receipt); - interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); - interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, cur_trie_data.len().into()); - // First insertion. - interpreter.run()?; - - // receipt_1: - let status_1 = 1; - let cum_gas_used_1 = 0x02dcb6; - let logs_bloom_1_bytes = vec![ - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, - 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, - ]; - - // Logs_1: - let logs_bloom_1: Vec = logs_bloom_1_bytes - .into_iter() - .map(|elt| elt.into()) - .collect(); - - let topic12 = 4.into(); - let topic13 = 0x4920ea.into(); - - let mut logs_1 = vec![ - log_payload_len.into(), // payload length - addr, - num_topics.into(), // nb topics - topic1, // topic1 - topic12, // topic2 - topic13, // topic3 - 32.into(), // data length - ]; - let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .iter() - .copied() - .map(U256::from); - logs_1.extend(cur_data); - - let mut receipt_1: Vec = vec![payload_len.into(), status_1.into(), cum_gas_used_1.into()]; - receipt_1.extend(logs_bloom_1); - receipt_1.push(logs_payload_len.into()); // logs payload len - receipt_1.push(num_logs.into()); // nb logs - receipt_1.extend(logs_1.clone()); - - // Get updated TrieData segment. - cur_trie_data = interpreter.get_memory_segment(Segment::TrieData); - let num_nibbles = 2; - let initial_stack2: Vec = vec![ - retdest, - cur_trie_data.len().into(), - 0x01.into(), - num_nibbles.into(), - ]; - for i in 0..initial_stack2.len() { - interpreter - .push(initial_stack2[i]) - .expect("The stack should not overflow"); - } - cur_trie_data.extend(receipt_1); - - // Set memory. - interpreter.generation_state.registers.program_counter = mpt_insert; - interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); - let trie_data_len = cur_trie_data.len().into(); - interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); - interpreter.run()?; - - // Finally, check that the hashes correspond. - let mpt_hash_receipt = KERNEL.global_labels["mpt_hash_receipt_trie"]; - interpreter.generation_state.registers.program_counter = mpt_hash_receipt; - interpreter - .push(retdest) - .expect("The stack should not overflow"); - interpreter - .push(1.into()) // Initial length of the trie data segment, unused.; // Initial length of the trie data - // segment, unused. - .expect("The stack should not overflow"); - interpreter.run()?; - assert_eq!( - interpreter.stack()[1], - U256::from(hex!( - "da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98" - )) - ); - Ok(()) -} - -#[test] -fn test_bloom_two_logs() -> Result<()> { - // Tests the Bloom filter computation with two logs in one transaction. - - // address - let to = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x09, 0x5e, 0x7b, 0xae, 0xa6, 0xa6, 0xc7, 0xc4, 0xc2, - 0xdf, 0xeb, 0x97, 0x7e, 0xfa, 0xc3, 0x26, 0xaf, 0x55, 0x2d, 0x87, - ]; - - let retdest = 0xDEADBEEFu32.into(); - let logs_bloom = KERNEL.global_labels["logs_bloom"]; - - let initial_stack: Vec = vec![retdest]; - - // Set memory. - let logs = vec![ - 0.into(), // unused - to.into(), // address - 0.into(), // num_topics - 0.into(), // data_len, - 0.into(), // unused: rlp - to.into(), - 2.into(), // num_topics - 0x62.into(), - 0x63.into(), - 5.into(), - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xa1, - 0xb2, 0xc3, 0xd4, 0xe5, - ] - .into(), - ]; - let mut interpreter: Interpreter = Interpreter::new(logs_bloom, initial_stack); - interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter. - interpreter.set_memory_segment(Segment::LogsData, logs); - interpreter.set_memory_segment(Segment::Logs, vec![0.into(), 4.into()]); - interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(2)); - interpreter.run()?; - - let loaded_bloom_bytes: Vec = interpreter - .get_memory_segment(Segment::TxnBloom) - .into_iter() - .map(|elt| elt.0[0] as u8) - .collect(); - - let expected = hex!("00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000004000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000400000000000040000000000000000000000000002000000000000000000000000000").to_vec(); - - assert_eq!(expected, loaded_bloom_bytes); - Ok(()) -} - -fn logs_bloom_bytes_fn(logs_list: Vec<(Vec, Vec>)>) -> [u8; 256] { - // The first element of logs_list. - let mut bloom = [0_u8; 256]; - - for log in logs_list { - let cur_addr = log.0; - let topics = log.1; - - add_to_bloom(&mut bloom, &cur_addr); - for topic in topics { - add_to_bloom(&mut bloom, &topic); - } - } - bloom -} - -fn add_to_bloom(bloom: &mut [u8; 256], bloom_entry: &[u8]) { - let bloom_hash = keccak(bloom_entry).to_fixed_bytes(); - - for idx in 0..3 { - let bit_pair = u16::from_be_bytes(bloom_hash[2 * idx..2 * (idx + 1)].try_into().unwrap()); - let bit_to_set = 0x07FF - (bit_pair & 0x07FF); - let byte_index = bit_to_set / 8; - let bit_value = 1 << (7 - bit_to_set % 8); - bloom[byte_index as usize] |= bit_value; - } -} +// use anyhow::Result; +// use ethereum_types::{Address, U256}; +// use hex_literal::hex; +// use keccak_hash::keccak; +// use plonky2::field::goldilocks_field::GoldilocksField as F; +// use rand::{thread_rng, Rng}; + +// use crate::cpu::kernel::aggregator::KERNEL; +// use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; +// use crate::cpu::kernel::constants::txn_fields::NormalizedTxnField; +// use crate::cpu::kernel::interpreter::Interpreter; +// use crate::cpu::kernel::tests::account_code::initialize_mpts; +// use crate::generation::mpt::{LegacyReceiptRlp, LogRlp}; +// use crate::memory::segments::Segment; + +// #[test] +// fn test_process_receipt() -> Result<()> { +// /* Tests process_receipt, which: +// - computes the cumulative gas +// - computes the bloom filter +// - inserts the receipt data in MPT_TRIE_DATA +// - inserts a node in receipt_trie +// - resets the bloom filter to 0 for the next transaction. */ +// let process_receipt = KERNEL.global_labels["process_receipt"]; +// let success = U256::from(1); +// let leftover_gas = U256::from(4000); +// let prev_cum_gas = U256::from(1000); +// let retdest = 0xDEADBEEFu32.into(); + +// // Log. +// let address: Address = thread_rng().gen(); +// let num_topics = 1; + +// let mut topic = vec![0_u8; 32]; +// topic[31] = 4; + +// // Compute the expected Bloom filter. +// let test_logs_list = vec![(address.to_fixed_bytes().to_vec(), +// vec![topic])]; let expected_bloom = +// logs_bloom_bytes_fn(test_logs_list).to_vec(); + +// // Set memory. +// let num_nibbles = 2.into(); +// let initial_stack: Vec = vec![ +// retdest, +// num_nibbles, +// 0.into(), +// prev_cum_gas, +// leftover_gas, +// success, +// ]; +// let mut interpreter: Interpreter = +// Interpreter::new_with_kernel(process_receipt, initial_stack); +// interpreter.set_memory_segment( +// Segment::LogsData, +// vec![ +// 56.into(), // payload len +// U256::from_big_endian(&address.to_fixed_bytes()), // address +// num_topics.into(), // num_topics +// 4.into(), // topic +// 0.into(), // data_len +// ], +// ); +// interpreter.set_txn_field(NormalizedTxnField::GasLimit, +// U256::from(5000)); interpreter.set_memory_segment(Segment::TxnBloom, +// vec![0.into(); 256]); interpreter.set_memory_segment(Segment::Logs, +// vec![0.into()]); interpreter. +// set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 58.into()); +// interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, +// U256::from(1)); interpreter. +// set_global_metadata_field(GlobalMetadata::ReceiptTrieRoot, 500.into()); +// interpreter.run()?; + +// let segment_read = interpreter.get_memory_segment(Segment::TrieData); + +// // The expected TrieData has the form [payload_len, status, cum_gas_used, +// bloom_filter, logs_payload_len, num_logs, [logs]] let mut +// expected_trie_data: Vec = vec![323.into(), success, 2000.into()]; +// expected_trie_data.extend( +// expected_bloom +// .into_iter() +// .map(|elt| elt.into()) +// .collect::>(), +// ); +// expected_trie_data.push(58.into()); // logs_payload_len +// expected_trie_data.push(1.into()); // num_logs +// expected_trie_data.extend(vec![ +// 56.into(), // payload len +// U256::from_big_endian(&address.to_fixed_bytes()), // address +// num_topics.into(), // num_topics +// 4.into(), // topic +// 0.into(), // data_len +// ]); + +// assert_eq!( +// expected_trie_data, +// segment_read[0..expected_trie_data.len()] +// ); + +// Ok(()) +// } + +// /// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 +// #[test] +// fn test_receipt_encoding() -> Result<()> { +// // Initialize interpreter. +// let success = U256::from(1); + +// let retdest = 0xDEADBEEFu32.into(); +// let num_topics = 3; + +// let encode_receipt = KERNEL.global_labels["encode_receipt"]; + +// // Logs and receipt in encodable form. +// let log_1 = LogRlp { +// address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), +// topics: vec![ +// +// hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d"). +// into(), +// hex!("0000000000000000000000000000000000000000000000000000000000000004"). +// into(), +// hex!("00000000000000000000000000000000000000000000000000000000004920ea"). +// into(), ], +// data: +// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") +// .to_vec() +// .into(), +// }; + +// let receipt_1 = LegacyReceiptRlp { +// status: true, +// cum_gas_used: 0x02dcb6u64.into(), +// bloom: +// hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000" +// ).to_vec().into(), logs: vec![log_1], +// }; +// // Get the expected RLP encoding. +// let expected_rlp = rlp::encode(&rlp::encode(&receipt_1)); + +// let initial_stack: Vec = vec![retdest, 0.into(), 0.into(), +// 0.into()]; let mut interpreter: Interpreter = +// Interpreter::new_with_kernel(encode_receipt, initial_stack); + +// // Write data to memory. +// let expected_bloom_bytes = vec![ +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, +// 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, +// 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// ]; +// let expected_bloom: Vec = expected_bloom_bytes +// .into_iter() +// .map(|elt| elt.into()) +// .collect(); + +// let addr = U256::from([ +// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, +// 0x9e, 0x12, 0xca, 0xf3, 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, +// 0x77, 0xc5, 0x9d, 0x8d, ]); + +// let topic1 = U256::from([ +// 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, +// 0x49, 0x51, 0x27, 0x31, 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, +// 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, 0x67, 0x4d, +// ]); + +// let topic2 = 4.into(); +// let topic3 = 0x4920ea.into(); + +// let mut logs = vec![ +// 155.into(), // unused +// addr, +// num_topics.into(), // num_topics +// topic1, // topic1 +// topic2, // topic2 +// topic3, // topic3 +// 32.into(), // data length +// ]; +// let cur_data = +// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") +// .iter() +// .copied() +// .map(U256::from); +// logs.extend(cur_data); + +// let mut receipt = vec![423.into(), success, receipt_1.cum_gas_used]; +// receipt.extend(expected_bloom.clone()); +// receipt.push(157.into()); // logs_payload_len +// receipt.push(1.into()); // num_logs +// receipt.extend(logs.clone()); +// interpreter.set_memory_segment(Segment::LogsData, logs); + +// interpreter.set_memory_segment(Segment::TxnBloom, expected_bloom); + +// interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); +// interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 1.into()); +// interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, +// 157.into()); interpreter.set_memory_segment(Segment::TrieData, receipt); + +// interpreter.run()?; +// let rlp_pos = interpreter.pop().expect("The stack should not be empty"); + +// let rlp_read: Vec = interpreter.get_rlp_memory(); + +// assert_eq!(rlp_pos.as_usize(), expected_rlp.len()); +// for i in 0..rlp_read.len() { +// assert_eq!(rlp_read[i], expected_rlp[i]); +// } + +// Ok(()) +// } + +// /// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 +// #[test] +// fn test_receipt_bloom_filter() -> Result<()> { +// let logs_bloom = KERNEL.global_labels["logs_bloom"]; + +// let num_topics = 3; + +// // Expected bloom +// let first_bloom_bytes = vec![ +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, +// 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, ]; + +// let retdest = 0xDEADBEEFu32.into(); + +// let addr = U256::from([ +// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, +// 0x9e, 0x12, 0xca, 0xf3, 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, +// 0x77, 0xc5, 0x9d, 0x8d, ]); + +// let topic1 = U256::from([ +// 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, +// 0x49, 0x51, 0x27, 0x31, 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, +// 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, 0x67, 0x4d, +// ]); + +// let topic02 = 0x2a.into(); +// let topic03 = 0xbd9fe6.into(); + +// // Set logs memory and initialize TxnBloom and BlockBloom segments. +// let initial_stack: Vec = vec![retdest]; + +// let mut interpreter: Interpreter = +// Interpreter::new_with_kernel(logs_bloom, initial_stack); let mut logs = +// vec![ 0.into(), // unused +// addr, +// num_topics.into(), // num_topics +// topic1, // topic1 +// topic02, // topic2 +// topic03, // topic3 +// 32.into(), // data_len +// ]; +// let cur_data = +// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") +// .iter() +// .copied() +// .map(U256::from); +// logs.extend(cur_data); +// // The Bloom filter initialization is required for this test to ensure we +// have the correct length for the filters. Otherwise, some trailing zeroes +// could be missing. interpreter.set_memory_segment(Segment::TxnBloom, +// vec![0.into(); 256]); // Initialize transaction Bloom filter. +// interpreter.set_memory_segment(Segment::LogsData, logs); +// interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); +// interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, +// U256::from(1)); interpreter.run()?; + +// // Second transaction. +// let loaded_bloom_u256 = +// interpreter.get_memory_segment(Segment::TxnBloom); let loaded_bloom: +// Vec = loaded_bloom_u256 .into_iter() +// .map(|elt| elt.0[0] as u8) +// .collect(); + +// assert_eq!(first_bloom_bytes, loaded_bloom); +// let topic12 = 0x4.into(); +// let topic13 = 0x4920ea.into(); +// let mut logs2 = vec![ +// 0.into(), // unused +// addr, +// num_topics.into(), // num_topics +// topic1, // topic1 +// topic12, // topic2 +// topic13, // topic3 +// 32.into(), // data_len +// ]; +// let cur_data = +// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") +// .iter() +// .copied() +// .map(U256::from); +// logs2.extend(cur_data); + +// interpreter +// .push(retdest) +// .expect("The stack should not overflow"); +// interpreter.generation_state.registers.program_counter = logs_bloom; +// interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); +// // Initialize transaction Bloom filter. interpreter. +// set_memory_segment(Segment::LogsData, logs2); interpreter. +// set_memory_segment(Segment::Logs, vec![0.into()]); interpreter. +// set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); +// interpreter.run()?; + +// let second_bloom_bytes = vec![ +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, +// 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, +// 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// ]; + +// let second_loaded_bloom_u256 = +// interpreter.get_memory_segment(Segment::TxnBloom); +// let second_loaded_bloom: Vec = second_loaded_bloom_u256 +// .into_iter() +// .map(|elt| elt.0[0] as u8) +// .collect(); + +// assert_eq!(second_bloom_bytes, second_loaded_bloom); + +// Ok(()) +// } + +// #[test] +// fn test_mpt_insert_receipt() -> Result<()> { +// // This test simulates a receipt processing to test +// `mpt_insert_receipt_trie`. // For this, we need to set the data correctly +// in memory. // In TrieData, we need to insert a receipt of the form: +// // `[payload_len, status, cum_gas_used, bloom, logs_payload_len, +// num_logs, [logs]]`. // We also need to set TrieDataSize correctly. + +// let retdest = 0xDEADBEEFu32.into(); +// let trie_inputs = Default::default(); +// let mpt_insert = KERNEL.global_labels["mpt_insert_receipt_trie"]; +// let num_topics = 3; // Both transactions have the same number of topics. +// let payload_len = 423; // Total payload length for each receipt. +// let logs_payload_len = 157; // Payload length for all logs. +// let log_payload_len = 155; // Payload length for one log. +// let num_logs = 1; + +// // Receipt_0: +// let status_0 = 1; +// let cum_gas_used_0 = 0x016e5b; +// let logs_bloom_0_bytes = vec![ +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, +// 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, ]; + +// // Logs_0: +// let logs_bloom_0: Vec = logs_bloom_0_bytes +// .into_iter() +// .map(|elt| elt.into()) +// .collect(); + +// let addr = U256::from([ +// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, +// 0x9e, 0x12, 0xca, 0xf3, 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, +// 0x77, 0xc5, 0x9d, 0x8d, ]); + +// // The first topic is shared by the two transactions. +// let topic1 = U256::from([ +// 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, +// 0x49, 0x51, 0x27, 0x31, 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, +// 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, 0x67, 0x4d, +// ]); + +// let topic02 = 0x2a.into(); +// let topic03 = 0xbd9fe6.into(); + +// let mut logs_0 = vec![ +// log_payload_len.into(), // payload_len +// addr, +// num_topics.into(), // num_topics +// topic1, // topic1 +// topic02, // topic2 +// topic03, // topic3 +// 32.into(), // data_len +// ]; +// let cur_data = +// hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") +// .iter() +// .copied() +// .map(U256::from); +// logs_0.extend(cur_data); + +// let mut receipt: Vec = vec![423.into(), status_0.into(), +// cum_gas_used_0.into()]; receipt.extend(logs_bloom_0); +// receipt.push(logs_payload_len.into()); // logs_payload_len +// receipt.push(num_logs.into()); // num_logs +// receipt.extend(logs_0.clone()); + +// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, +// vec![]); initialize_mpts(&mut interpreter, &trie_inputs); + +// // If TrieData is empty, we need to push 0 because the first value is +// always 0. let mut cur_trie_data = +// interpreter.get_memory_segment(Segment::TrieData); if cur_trie_data. +// is_empty() { cur_trie_data.push(0.into()); +// } + +// // stack: transaction_nb, value_ptr, retdest +// let num_nibbles = 2; +// let initial_stack: Vec = vec![ +// retdest, +// cur_trie_data.len().into(), +// 0x80.into(), +// num_nibbles.into(), +// ]; +// for i in 0..initial_stack.len() { +// interpreter +// .push(initial_stack[i]) +// .expect("The stack should not overflow"); +// } + +// interpreter.generation_state.registers.program_counter = mpt_insert; + +// // Set memory. +// cur_trie_data.extend(receipt); +// interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); +// interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, +// cur_trie_data.len().into()); // First insertion. +// interpreter.run()?; + +// // receipt_1: +// let status_1 = 1; +// let cum_gas_used_1 = 0x02dcb6; +// let logs_bloom_1_bytes = vec![ +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, +// 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, +// 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, +// ]; + +// // Logs_1: +// let logs_bloom_1: Vec = logs_bloom_1_bytes +// .into_iter() +// .map(|elt| elt.into()) +// .collect(); + +// let topic12 = 4.into(); +// let topic13 = 0x4920ea.into(); + +// let mut logs_1 = vec![ +// log_payload_len.into(), // payload length +// addr, +// num_topics.into(), // nb topics +// topic1, // topic1 +// topic12, // topic2 +// topic13, // topic3 +// 32.into(), // data length +// ]; +// let cur_data = +// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") +// .iter() +// .copied() +// .map(U256::from); +// logs_1.extend(cur_data); + +// let mut receipt_1: Vec = vec![payload_len.into(), status_1.into(), +// cum_gas_used_1.into()]; receipt_1.extend(logs_bloom_1); +// receipt_1.push(logs_payload_len.into()); // logs payload len +// receipt_1.push(num_logs.into()); // nb logs +// receipt_1.extend(logs_1.clone()); + +// // Get updated TrieData segment. +// cur_trie_data = interpreter.get_memory_segment(Segment::TrieData); +// let num_nibbles = 2; +// let initial_stack2: Vec = vec![ +// retdest, +// cur_trie_data.len().into(), +// 0x01.into(), +// num_nibbles.into(), +// ]; +// for i in 0..initial_stack2.len() { +// interpreter +// .push(initial_stack2[i]) +// .expect("The stack should not overflow"); +// } +// cur_trie_data.extend(receipt_1); + +// // Set memory. +// interpreter.generation_state.registers.program_counter = mpt_insert; +// interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); +// interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, +// cur_trie_data.len().into()); interpreter.run()?; + +// // Finally, check that the hashes correspond. +// let mpt_hash_receipt = KERNEL.global_labels["mpt_hash_receipt_trie"]; +// interpreter.generation_state.registers.program_counter = +// mpt_hash_receipt; interpreter +// .push(retdest) +// .expect("The stack should not overflow"); +// interpreter +// .push(1.into()) // Initial length of the trie data segment, unused.; +// // Initial length of the trie data segment, unused. .expect("The +// stack should not overflow"); interpreter.run()?; +// assert_eq!( +// interpreter.stack()[1], +// U256::from(hex!( +// +// "da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98" )) +// ); +// Ok(()) +// } + +// #[test] +// fn test_bloom_two_logs() -> Result<()> { +// // Tests the Bloom filter computation with two logs in one transaction. + +// // address +// let to = [ +// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x09, 0x5e, 0x7b, 0xae, 0xa6, +// 0xa6, 0xc7, 0xc4, 0xc2, 0xdf, 0xeb, 0x97, 0x7e, 0xfa, 0xc3, 0x26, +// 0xaf, 0x55, 0x2d, 0x87, ]; + +// let retdest = 0xDEADBEEFu32.into(); +// let logs_bloom = KERNEL.global_labels["logs_bloom"]; + +// let initial_stack: Vec = vec![retdest]; + +// // Set memory. +// let logs = vec![ +// 0.into(), // unused +// to.into(), // address +// 0.into(), // num_topics +// 0.into(), // data_len, +// 0.into(), // unused: rlp +// to.into(), +// 2.into(), // num_topics +// 0x62.into(), +// 0x63.into(), +// 5.into(), +// [ +// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +// 0, 0, 0, 0, 0, 0xa1, 0xb2, 0xc3, 0xd4, 0xe5, +// ] +// .into(), +// ]; +// let mut interpreter: Interpreter = +// Interpreter::new_with_kernel(logs_bloom, initial_stack); interpreter. +// set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize +// transaction Bloom filter. interpreter. +// set_memory_segment(Segment::LogsData, logs); interpreter. +// set_memory_segment(Segment::Logs, vec![0.into(), 4.into()]); interpreter. +// set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(2)); +// interpreter.run()?; + +// let loaded_bloom_bytes: Vec = interpreter +// .get_memory_segment(Segment::TxnBloom) +// .into_iter() +// .map(|elt| elt.0[0] as u8) +// .collect(); + +// let expected = +// hex!("00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000004000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000400000000000040000000000000000000000000002000000000000000000000000000" +// ).to_vec(); + +// assert_eq!(expected, loaded_bloom_bytes); +// Ok(()) +// } + +// fn logs_bloom_bytes_fn(logs_list: Vec<(Vec, Vec>)>) -> [u8; 256] +// { // The first element of logs_list. +// let mut bloom = [0_u8; 256]; + +// for log in logs_list { +// let cur_addr = log.0; +// let topics = log.1; + +// add_to_bloom(&mut bloom, &cur_addr); +// for topic in topics { +// add_to_bloom(&mut bloom, &topic); +// } +// } +// bloom +// } + +// fn add_to_bloom(bloom: &mut [u8; 256], bloom_entry: &[u8]) { +// let bloom_hash = keccak(bloom_entry).to_fixed_bytes(); + +// for idx in 0..3 { +// let bit_pair = u16::from_be_bytes(bloom_hash[2 * idx..2 * (idx + +// 1)].try_into().unwrap()); let bit_to_set = 0x07FF - (bit_pair & +// 0x07FF); let byte_index = bit_to_set / 8; +// let bit_value = 1 << (7 - bit_to_set % 8); +// bloom[byte_index as usize] |= bit_value; +// } +// } diff --git a/evm_arithmetization/src/cpu/stack.rs b/evm_arithmetization/src/cpu/stack.rs index cd7ca703d..0c460ca11 100644 --- a/evm_arithmetization/src/cpu/stack.rs +++ b/evm_arithmetization/src/cpu/stack.rs @@ -29,6 +29,7 @@ pub(crate) const MIGHT_OVERFLOW: OpsColumnsView = OpsColumnsView { not_pop: false, shift: false, jumpdest_keccak_general: false, + poseidon: false, push_prover_input: true, // PROVER_INPUT doesn't require the check, but PUSH does. jumps: false, pc_push0: true, @@ -120,6 +121,11 @@ pub(crate) const STACK_BEHAVIORS: OpsColumnsView> = OpsCol disable_other_channels: false, }), jumpdest_keccak_general: None, + poseidon: Some(StackBehavior { + num_pops: 3, + pushes: true, + disable_other_channels: true, + }), push_prover_input: Some(StackBehavior { num_pops: 0, pushes: true, diff --git a/evm_arithmetization/src/fixed_recursive_verifier.rs b/evm_arithmetization/src/fixed_recursive_verifier.rs index a60b9e7e3..b2e32909d 100644 --- a/evm_arithmetization/src/fixed_recursive_verifier.rs +++ b/evm_arithmetization/src/fixed_recursive_verifier.rs @@ -485,6 +485,13 @@ where &all_stark.cross_table_lookups, stark_config, ); + let poseidon = RecursiveCircuitsForTable::new( + Table::Poseidon, + &all_stark.poseidon_stark, + degree_bits_ranges[*Table::Poseidon].clone(), + &all_stark.cross_table_lookups, + stark_config, + ); let by_table = [ arithmetic, @@ -494,6 +501,7 @@ where keccak_sponge, logic, memory, + poseidon, ]; let root = Self::create_root_circuit(&by_table, stark_config); let aggregation = Self::create_aggregation_circuit(&root); diff --git a/evm_arithmetization/src/generation/mod.rs b/evm_arithmetization/src/generation/mod.rs index e3536a120..d7907827a 100644 --- a/evm_arithmetization/src/generation/mod.rs +++ b/evm_arithmetization/src/generation/mod.rs @@ -11,6 +11,7 @@ use plonky2::hash::hash_types::RichField; use plonky2::timed; use plonky2::util::timing::TimingTree; use serde::{Deserialize, Serialize}; +use smt_trie::smt::hash_serialize_u256; use starky::config::StarkConfig; use GlobalMetadata::{ ReceiptTrieRootDigestAfter, ReceiptTrieRootDigestBefore, StateTrieRootDigestAfter, @@ -68,7 +69,7 @@ pub struct GenerationInputs { /// Mapping between smart contract code hashes and the contract byte code. /// All account smart contracts that are invoked will have an entry present. - pub contract_code: HashMap>, + pub contract_code: HashMap>, /// Information contained in the block header. pub block_metadata: BlockMetadata, @@ -78,12 +79,12 @@ pub struct GenerationInputs { pub block_hashes: BlockHashes, } -#[derive(Clone, Debug, Deserialize, Serialize, Default)] +#[derive(Clone, Debug, Deserialize, Serialize)] pub struct TrieInputs { - /// A partial version of the state trie prior to these transactions. It - /// should include all nodes that will be accessed by these - /// transactions. - pub state_trie: HashedPartialTrie, + /// A serialized partial version of the state SMT prior to these + /// transactions. It should include all nodes that will be accessed by + /// these transactions. + pub state_smt: Vec, /// A partial version of the transaction trie prior to these transactions. /// It should include all nodes that will be accessed by these @@ -94,11 +95,18 @@ pub struct TrieInputs { /// should include all nodes that will be accessed by these /// transactions. pub receipts_trie: HashedPartialTrie, +} - /// A partial version of each storage trie prior to these transactions. It - /// should include all storage tries, and nodes therein, that will be - /// accessed by these transactions. - pub storage_tries: Vec<(H256, HashedPartialTrie)>, +impl Default for TrieInputs { + fn default() -> Self { + Self { + // First 2 zeros are for the default empty node. + // The next 2 are for the current empty state trie root. + state_smt: vec![U256::zero(); 4], + transactions_trie: Default::default(), + receipts_trie: Default::default(), + } + } } fn apply_metadata_and_tries_memops, const D: usize>( @@ -137,7 +145,7 @@ fn apply_metadata_and_tries_memops, const D: usize> ), ( GlobalMetadata::StateTrieRootDigestBefore, - h2u(tries.state_trie.hash()), + hash_serialize_u256(&tries.state_smt), ), ( GlobalMetadata::TransactionTrieRootDigestBefore, @@ -206,13 +214,12 @@ fn apply_metadata_and_tries_memops, const D: usize> pub(crate) fn debug_inputs(inputs: &GenerationInputs) { log::debug!("Input signed_txn: {:?}", &inputs.signed_txn); - log::debug!("Input state_trie: {:?}", &inputs.tries.state_trie); + log::debug!("Input state_trie: {:?}", &inputs.tries.state_smt); log::debug!( "Input transactions_trie: {:?}", &inputs.tries.transactions_trie ); log::debug!("Input receipts_trie: {:?}", &inputs.tries.receipts_trie); - log::debug!("Input storage_tries: {:?}", &inputs.tries.storage_tries); log::debug!("Input contract_code: {:?}", &inputs.contract_code); } @@ -279,7 +286,7 @@ pub fn generate_traces, const D: usize>( Ok((tables, public_values)) } -fn simulate_cpu(state: &mut GenerationState) -> anyhow::Result<()> { +fn simulate_cpu(state: &mut GenerationState) -> anyhow::Result<()> { state.run_cpu()?; let pc = state.registers.program_counter; diff --git a/evm_arithmetization/src/generation/mpt.rs b/evm_arithmetization/src/generation/mpt.rs index 79e923068..8e1a775e1 100644 --- a/evm_arithmetization/src/generation/mpt.rs +++ b/evm_arithmetization/src/generation/mpt.rs @@ -8,6 +8,8 @@ use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use rlp::{Decodable, DecoderError, Encodable, PayloadInfo, Rlp, RlpStream}; use rlp_derive::{RlpDecodable, RlpEncodable}; +use smt_trie::code::{hash_bytecode_u256, hash_contract_bytecode}; +use smt_trie::utils::hashout2u; use crate::cpu::kernel::constants::trie_type::PartialTrieType; use crate::generation::TrieInputs; @@ -19,8 +21,8 @@ use crate::Node; pub struct AccountRlp { pub nonce: U256, pub balance: U256, - pub storage_root: H256, - pub code_hash: H256, + pub code_hash: U256, + pub code_length: U256, } #[derive(Clone, Debug)] @@ -35,8 +37,8 @@ impl Default for AccountRlp { Self { nonce: U256::zero(), balance: U256::zero(), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak([]), + code_hash: hash_bytecode_u256(vec![]), + code_length: U256::zero(), } } } @@ -67,6 +69,12 @@ impl LegacyReceiptRlp { } } +pub(crate) fn state_smt_prover_inputs_reversed(trie_inputs: &TrieInputs) -> Vec { + let mut inputs = state_smt_prover_inputs(trie_inputs); + inputs.reverse(); + inputs +} + pub(crate) fn parse_receipts(rlp: &[u8]) -> Result, ProgramError> { let txn_type = match rlp.first().ok_or(ProgramError::InvalidRlp)? { 1 => 1, @@ -112,6 +120,13 @@ pub(crate) fn parse_receipts(rlp: &[u8]) -> Result, ProgramError> { Ok(parsed_receipt) } +pub(crate) fn state_smt_prover_inputs(trie_inputs: &TrieInputs) -> Vec { + let len = trie_inputs.state_smt.len(); + let mut v = vec![len.into()]; + v.extend(trie_inputs.state_smt.iter()); + v +} + fn parse_storage_value(value_rlp: &[u8]) -> Result, ProgramError> { let value: U256 = rlp::decode(value_rlp).map_err(|_| ProgramError::InvalidRlp)?; Ok(vec![value]) @@ -202,132 +217,12 @@ where } } -fn load_state_trie( - trie: &HashedPartialTrie, - key: Nibbles, - trie_data: &mut Vec, - storage_tries_by_state_key: &HashMap, -) -> Result { - let node_ptr = trie_data.len(); - let type_of_trie = PartialTrieType::of(trie) as u32; - if type_of_trie > 0 { - trie_data.push(type_of_trie.into()); - } - match trie.deref() { - Node::Empty => Ok(0), - Node::Hash(h) => { - trie_data.push(h2u(*h)); - - Ok(node_ptr) - } - Node::Branch { children, value } => { - if !value.is_empty() { - return Err(ProgramError::ProverInputError( - ProverInputError::InvalidMptInput, - )); - } - // First, set children pointers to 0. - let first_child_ptr = trie_data.len(); - trie_data.extend(vec![U256::zero(); 16]); - // Then, set value pointer to 0. - trie_data.push(U256::zero()); - - // Now, load all children and update their pointers. - for (i, child) in children.iter().enumerate() { - let extended_key = key.merge_nibbles(&Nibbles { - count: 1, - packed: i.into(), - }); - let child_ptr = - load_state_trie(child, extended_key, trie_data, storage_tries_by_state_key)?; - - trie_data[first_child_ptr + i] = child_ptr.into(); - } - - Ok(node_ptr) - } - Node::Extension { nibbles, child } => { - trie_data.push(nibbles.count.into()); - trie_data.push( - nibbles - .try_into_u256() - .map_err(|_| ProgramError::IntegerTooLarge)?, - ); - // Set `value_ptr_ptr`. - trie_data.push((trie_data.len() + 1).into()); - let extended_key = key.merge_nibbles(nibbles); - let child_ptr = - load_state_trie(child, extended_key, trie_data, storage_tries_by_state_key)?; - if child_ptr == 0 { - trie_data.push(0.into()); - } - - Ok(node_ptr) - } - Node::Leaf { nibbles, value } => { - let account: AccountRlp = rlp::decode(value).map_err(|_| ProgramError::InvalidRlp)?; - let AccountRlp { - nonce, - balance, - storage_root, - code_hash, - } = account; - - let storage_hash_only = HashedPartialTrie::new(Node::Hash(storage_root)); - let merged_key = key.merge_nibbles(nibbles); - let storage_trie: &HashedPartialTrie = storage_tries_by_state_key - .get(&merged_key) - .copied() - .unwrap_or(&storage_hash_only); - - assert_eq!(storage_trie.hash(), storage_root, - "In TrieInputs, an account's storage_root didn't match the associated storage trie hash"); - - trie_data.push(nibbles.count.into()); - trie_data.push( - nibbles - .try_into_u256() - .map_err(|_| ProgramError::IntegerTooLarge)?, - ); - // Set `value_ptr_ptr`. - trie_data.push((trie_data.len() + 1).into()); - - trie_data.push(nonce); - trie_data.push(balance); - // Storage trie ptr. - let storage_ptr_ptr = trie_data.len(); - trie_data.push((trie_data.len() + 2).into()); - trie_data.push(code_hash.into_uint()); - let storage_ptr = load_mpt(storage_trie, trie_data, &parse_storage_value)?; - if storage_ptr == 0 { - trie_data[storage_ptr_ptr] = 0.into(); - } - - Ok(node_ptr) - } - } -} - pub(crate) fn load_all_mpts( trie_inputs: &TrieInputs, ) -> Result<(TrieRootPtrs, Vec), ProgramError> { - let mut trie_data = vec![U256::zero()]; - let storage_tries_by_state_key = trie_inputs - .storage_tries - .iter() - .map(|(hashed_address, storage_trie)| { - let key = Nibbles::from_bytes_be(hashed_address.as_bytes()) - .expect("An H256 is 32 bytes long"); - (key, storage_trie) - }) - .collect(); - - let state_root_ptr = load_state_trie( - &trie_inputs.state_trie, - empty_nibbles(), - &mut trie_data, - &storage_tries_by_state_key, - )?; + let mut trie_data = trie_inputs.state_smt.clone(); + + let state_root_ptr = 2; let txn_root_ptr = load_mpt(&trie_inputs.transactions_trie, &mut trie_data, &|rlp| { let mut parsed_txn = vec![U256::from(rlp.len())]; diff --git a/evm_arithmetization/src/generation/prover_input.rs b/evm_arithmetization/src/generation/prover_input.rs index ebffadc8a..79a02d19f 100644 --- a/evm_arithmetization/src/generation/prover_input.rs +++ b/evm_arithmetization/src/generation/prover_input.rs @@ -7,7 +7,9 @@ use ethereum_types::{BigEndianHash, H256, U256, U512}; use itertools::Itertools; use num_bigint::BigUint; use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; use serde::{Deserialize, Serialize}; +use smt_trie::code::hash_bytecode_u256; use crate::cpu::kernel::constants::context_metadata::ContextMetadata; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; @@ -39,7 +41,7 @@ impl From> for ProverInputFn { } } -impl GenerationState { +impl GenerationState { pub(crate) fn prover_input(&mut self, input_fn: &ProverInputFn) -> Result { match input_fn.0[0].as_str() { "no_txn" => self.no_txn(), @@ -55,6 +57,7 @@ impl GenerationState { "num_bits" => self.run_num_bits(), "jumpdest_table" => self.run_jumpdest_table(input_fn), "access_lists" => self.run_access_lists(input_fn), + "poseidon_code" => self.run_poseidon_code(), _ => Err(ProgramError::ProverInputError(InvalidFunction)), } } @@ -152,13 +155,25 @@ impl GenerationState { let code = self .inputs .contract_code - .get(&H256::from_uint(&codehash)) + .get(&codehash) .ok_or(ProgramError::ProverInputError(CodeHashNotFound))?; + let code_len = code.len(); + for &byte in code { self.memory.set(address, byte.into()); address.increment(); } - Ok(code.len().into()) + + // Padding + self.memory.set(address, 1.into()); + let mut len = code_len + 1; + len = 56 * ((len + 55) / 56); + let last_byte_addr = MemoryAddress::new(context, Segment::Code, len - 1); + let mut last_byte = u256_to_usize(self.memory.get_with_init(last_byte_addr))?; + last_byte |= 0x80; + self.memory.set(last_byte_addr, last_byte.into()); + + Ok(len.into()) } // Bignum modular multiplication. @@ -389,9 +404,24 @@ impl GenerationState { } Ok((Segment::AccessedStorageKeys as usize).into()) } + + fn run_poseidon_code(&mut self) -> Result { + let addr = stack_peek(self, 0)?; + let len = stack_peek(self, 1)?.as_usize(); + let addr = MemoryAddress::new_bundle(addr)?; + let code = (0..len) + .map(|i| { + let mut a = addr; + a.virt += i; + self.memory.get_with_init(a).as_usize() as u8 + }) + .collect_vec(); + + Ok(hash_bytecode_u256(code)) + } } -impl GenerationState { +impl GenerationState { /// Simulate the user's code and store all the jump addresses with their /// respective contexts. fn generate_jumpdest_table(&mut self) -> Result<(), ProgramError> { diff --git a/evm_arithmetization/src/generation/state.rs b/evm_arithmetization/src/generation/state.rs index e919eba80..82ab141a6 100644 --- a/evm_arithmetization/src/generation/state.rs +++ b/evm_arithmetization/src/generation/state.rs @@ -6,6 +6,9 @@ use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use itertools::Itertools; use keccak_hash::keccak; use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use smt_trie::code::{hash_bytecode_u256, hash_contract_bytecode}; +use smt_trie::utils::hashout2u; use super::mpt::{load_all_mpts, TrieRootPtrs}; use super::TrieInputs; @@ -19,6 +22,7 @@ use crate::generation::GenerationInputs; use crate::keccak_sponge::columns::KECCAK_WIDTH_BYTES; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeOp; use crate::memory::segments::Segment; +use crate::poseidon::poseidon_stark::PoseidonOp; use crate::util::u256_to_usize; use crate::witness::errors::ProgramError; use crate::witness::memory::MemoryChannel::GeneralPurpose; @@ -36,7 +40,7 @@ use crate::{arithmetic, keccak, logic}; /// A State is either an `Interpreter` (used for tests and jumpdest analysis) or /// a `GenerationState`. -pub(crate) trait State { +pub(crate) trait State { /// Returns a `State`'s latest `Checkpoint`. fn checkpoint(&mut self) -> GenerationStateCheckpoint; @@ -98,6 +102,10 @@ pub(crate) trait State { self.get_mut_generation_state().traces.memory_ops.push(op); } + fn push_poseidon(&mut self, op: PoseidonOp) { + self.get_mut_generation_state().traces.poseidon_ops.push(op); + } + fn push_byte_packing(&mut self, op: BytePackingOp) { self.get_mut_generation_state() .traces @@ -297,7 +305,7 @@ pub(crate) struct GenerationState { pub(crate) jumpdest_table: Option>>, } -impl GenerationState { +impl GenerationState { fn preinitialize_mpts(&mut self, trie_inputs: &TrieInputs) -> TrieRootPtrs { let (trie_roots_ptrs, trie_data) = load_all_mpts(trie_inputs).expect("Invalid MPT data for preinitialization"); @@ -346,8 +354,7 @@ impl GenerationState { self.observe_address(tip_h160); } else if dst == KERNEL.global_labels["observe_new_contract"] { let tip_u256 = stack_peek(self, 0)?; - let tip_h256 = H256::from_uint(&tip_u256); - self.observe_contract(tip_h256)?; + self.observe_contract(tip_u256)?; } Ok(()) @@ -363,7 +370,7 @@ impl GenerationState { /// Observe the given code hash and store the associated code. /// When called, the code corresponding to `codehash` should be stored in /// the return data. - pub(crate) fn observe_contract(&mut self, codehash: H256) -> Result<(), ProgramError> { + pub(crate) fn observe_contract(&mut self, codehash: U256) -> Result<(), ProgramError> { if self.inputs.contract_code.contains_key(&codehash) { return Ok(()); // Return early if the code hash has already been // observed. @@ -379,7 +386,7 @@ impl GenerationState { .iter() .map(|x| x.unwrap_or_default().low_u32() as u8) .collect::>(); - debug_assert_eq!(keccak(&code), codehash); + debug_assert_eq!(hash_bytecode_u256(code.clone()), codehash); self.inputs.contract_code.insert(codehash, code); @@ -419,7 +426,7 @@ impl GenerationState { } } -impl State for GenerationState { +impl State for GenerationState { fn checkpoint(&mut self) -> GenerationStateCheckpoint { GenerationStateCheckpoint { registers: self.registers, @@ -535,7 +542,7 @@ impl State for GenerationState { } } -impl Transition for GenerationState { +impl Transition for GenerationState { fn skip_if_necessary(&mut self, op: Operation) -> Result { Ok(op) } diff --git a/evm_arithmetization/src/generation/trie_extractor.rs b/evm_arithmetization/src/generation/trie_extractor.rs index dfea16234..fd5f40766 100644 --- a/evm_arithmetization/src/generation/trie_extractor.rs +++ b/evm_arithmetization/src/generation/trie_extractor.rs @@ -197,12 +197,13 @@ pub(crate) fn read_state_rlp_value( get_trie(memory, slice[2].unwrap_or_default().as_usize(), |_, x| { Ok(rlp::encode(&read_storage_trie_value(x)).to_vec()) })?; - let account = AccountRlp { - nonce: slice[0].unwrap_or_default(), - balance: slice[1].unwrap_or_default(), - storage_root: storage_trie.hash(), - code_hash: H256::from_uint(&slice[3].unwrap_or_default()), - }; + // let account = AccountRlp { + // nonce: slice[0], + // balance: slice[1], + // storage_root: storage_trie.hash(), + // code_hash: H256::from_uint(&slice[3]), + // }; + let account = AccountRlp::default(); // TODO: fix Ok(rlp::encode(&account).to_vec()) } diff --git a/evm_arithmetization/src/lib.rs b/evm_arithmetization/src/lib.rs index b3cdc0e37..5118c4a61 100644 --- a/evm_arithmetization/src/lib.rs +++ b/evm_arithmetization/src/lib.rs @@ -192,6 +192,7 @@ pub mod keccak; pub mod keccak_sponge; pub mod logic; pub mod memory; +pub mod poseidon; // Proving system components pub mod all_stark; diff --git a/evm_arithmetization/src/poseidon/columns.rs b/evm_arithmetization/src/poseidon/columns.rs new file mode 100644 index 000000000..fcd0621b4 --- /dev/null +++ b/evm_arithmetization/src/poseidon/columns.rs @@ -0,0 +1,155 @@ +use std::borrow::{Borrow, BorrowMut}; +use std::mem::{size_of, transmute}; + +use plonky2::hash::poseidon; + +use crate::util::{indices_arr, transmute_no_compile_time_size_checks}; + +pub(crate) const POSEIDON_SPONGE_WIDTH: usize = poseidon::SPONGE_WIDTH; +pub(crate) const POSEIDON_SPONGE_RATE: usize = poseidon::SPONGE_RATE; +pub(crate) const HALF_N_FULL_ROUNDS: usize = poseidon::HALF_N_FULL_ROUNDS; +pub(crate) const N_PARTIAL_ROUNDS: usize = poseidon::N_PARTIAL_ROUNDS; +pub(crate) const POSEIDON_DIGEST: usize = 4; + +#[repr(C)] +#[derive(Eq, PartialEq, Debug)] +pub(crate) struct PoseidonColumnsView { + /// Registers to hold permutation inputs. + pub input: [T; POSEIDON_SPONGE_WIDTH], + + /// Holds x^3 for all elements in full rounds. + pub cubed_full: [T; 2 * HALF_N_FULL_ROUNDS * POSEIDON_SPONGE_WIDTH], + + /// Holds x^3 for the first element in partial rounds. + pub cubed_partial: [T; N_PARTIAL_ROUNDS], + + /// Holds the input of the `i`-th S-box of the `round`-th round of the first + /// set of full rounds. + pub full_sbox_0: [T; POSEIDON_SPONGE_WIDTH * (HALF_N_FULL_ROUNDS - 1)], + + /// Holds the input of the S-box of the `round`-th round of the partial + /// rounds. + pub partial_sbox: [T; N_PARTIAL_ROUNDS], + + /// Holds the input of the `i`-th S-box of the `round`-th round of the + /// second set of full rounds. + pub full_sbox_1: [T; POSEIDON_SPONGE_WIDTH * HALF_N_FULL_ROUNDS], + + /// The digest, with each element divided into two 32-bit limbs. + pub digest: [T; 2 * POSEIDON_DIGEST], + + /// The output of the hash function with the digest removed. + pub output_partial: [T; POSEIDON_SPONGE_WIDTH - POSEIDON_DIGEST], + + /// Holds the pseudo-inverse of (digest_high_limb_i - 2^32 + 1). + pub pinv: [T; POSEIDON_DIGEST], + + pub not_padding: T, +} + +/// Returns the index of `i`-th input capacity element within the input. +pub(crate) fn reg_input_capacity(i: usize) -> usize { + debug_assert!(i < POSEIDON_SPONGE_WIDTH - POSEIDON_SPONGE_RATE); + POSEIDON_SPONGE_RATE + i +} + +/// Returns the index the `i`-th x^3 in the `round`-th round for full rounds. +/// Note: the cubes of the two sets of full rounds are stored one after the +/// other. +pub(crate) fn reg_cubed_full(round: usize, i: usize) -> usize { + debug_assert!(i < POSEIDON_SPONGE_WIDTH); + debug_assert!(round < 2 * HALF_N_FULL_ROUNDS); + POSEIDON_SPONGE_WIDTH * round + i +} + +/// Returns the index of the `i`-th output capacity element within +/// `output_partial`. +pub(crate) fn reg_output_capacity(i: usize) -> usize { + debug_assert!(i < POSEIDON_SPONGE_WIDTH - POSEIDON_SPONGE_RATE); + POSEIDON_SPONGE_RATE - POSEIDON_DIGEST + i +} + +/// Returns the index of x^3 within for the `round`-th partial round. +pub(crate) fn reg_cubed_partial(round: usize) -> usize { + debug_assert!(round < N_PARTIAL_ROUNDS); + round +} + +/// Returns the index of the `i`-th input in the `round`-th round within +/// `full_sbox_0`. +pub(crate) fn reg_full_sbox_0(round: usize, i: usize) -> usize { + debug_assert!( + round != 0, + "First round S-box inputs are not stored as wires" + ); + debug_assert!(round < HALF_N_FULL_ROUNDS); + debug_assert!(i < POSEIDON_SPONGE_WIDTH); + POSEIDON_SPONGE_WIDTH * (round - 1) + i +} + +/// Returns the index of the input of the S-box of the `round`-th round of the +/// partial rounds. +pub(crate) fn reg_partial_sbox(round: usize) -> usize { + debug_assert!(round < N_PARTIAL_ROUNDS); + round +} + +/// Returns the index of the `i`-th input in the `round`-th round within +/// `full_sbox_1`. +pub(crate) fn reg_full_sbox_1(round: usize, i: usize) -> usize { + debug_assert!(round < HALF_N_FULL_ROUNDS); + debug_assert!(i < POSEIDON_SPONGE_WIDTH); + POSEIDON_SPONGE_WIDTH * round + i +} + +// `u8` is guaranteed to have a `size_of` of 1. +pub(crate) const NUM_COLUMNS: usize = size_of::>(); + +impl From<[T; NUM_COLUMNS]> for PoseidonColumnsView { + fn from(value: [T; NUM_COLUMNS]) -> Self { + unsafe { transmute_no_compile_time_size_checks(value) } + } +} + +impl From> for [T; NUM_COLUMNS] { + fn from(value: PoseidonColumnsView) -> Self { + unsafe { transmute_no_compile_time_size_checks(value) } + } +} + +impl Borrow> for [T; NUM_COLUMNS] { + fn borrow(&self) -> &PoseidonColumnsView { + unsafe { transmute(self) } + } +} + +impl BorrowMut> for [T; NUM_COLUMNS] { + fn borrow_mut(&mut self) -> &mut PoseidonColumnsView { + unsafe { transmute(self) } + } +} + +impl Borrow<[T; NUM_COLUMNS]> for PoseidonColumnsView { + fn borrow(&self) -> &[T; NUM_COLUMNS] { + unsafe { transmute(self) } + } +} + +impl BorrowMut<[T; NUM_COLUMNS]> for PoseidonColumnsView { + fn borrow_mut(&mut self) -> &mut [T; NUM_COLUMNS] { + unsafe { transmute(self) } + } +} + +impl Default for PoseidonColumnsView { + fn default() -> Self { + [T::default(); NUM_COLUMNS].into() + } +} + +const fn make_col_map() -> PoseidonColumnsView { + let indices_arr = indices_arr::(); + unsafe { transmute::<[usize; NUM_COLUMNS], PoseidonColumnsView>(indices_arr) } +} + +pub(crate) const POSEIDON_COL_MAP: PoseidonColumnsView = make_col_map(); diff --git a/evm_arithmetization/src/poseidon/mod.rs b/evm_arithmetization/src/poseidon/mod.rs new file mode 100644 index 000000000..5ee77f125 --- /dev/null +++ b/evm_arithmetization/src/poseidon/mod.rs @@ -0,0 +1,2 @@ +pub mod columns; +pub mod poseidon_stark; diff --git a/evm_arithmetization/src/poseidon/poseidon_stark.rs b/evm_arithmetization/src/poseidon/poseidon_stark.rs new file mode 100644 index 000000000..bfa695cfa --- /dev/null +++ b/evm_arithmetization/src/poseidon/poseidon_stark.rs @@ -0,0 +1,562 @@ +use std::borrow::Borrow; +use std::iter::once; +use std::marker::PhantomData; + +use itertools::Itertools; +use plonky2::field::extension::{Extendable, FieldExtension}; +use plonky2::field::packed::PackedField; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::poseidon::Poseidon; +use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::timed; +use plonky2::util::timing::TimingTree; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::cross_table_lookup::TableWithColumns; +use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; +use starky::lookup::{Column, Filter}; +use starky::stark::Stark; +use starky::util::trace_rows_to_poly_values; + +use super::columns::{ + reg_cubed_full, reg_cubed_partial, reg_full_sbox_0, reg_full_sbox_1, reg_input_capacity, + reg_output_capacity, reg_partial_sbox, PoseidonColumnsView, HALF_N_FULL_ROUNDS, NUM_COLUMNS, + N_PARTIAL_ROUNDS, POSEIDON_COL_MAP, POSEIDON_DIGEST, POSEIDON_SPONGE_RATE, + POSEIDON_SPONGE_WIDTH, +}; +use crate::all_stark::{EvmStarkFrame, Table}; +use crate::witness::memory::MemoryAddress; + +pub(crate) fn ctl_looked() -> TableWithColumns { + let mut columns = Column::singles(POSEIDON_COL_MAP.input).collect_vec(); + columns.extend(Column::singles(POSEIDON_COL_MAP.digest)); + TableWithColumns::new( + *Table::Poseidon, + columns, + Some(Filter::new_simple(Column::single( + POSEIDON_COL_MAP.not_padding, + ))), + ) +} + +#[derive(Copy, Clone, Debug)] +pub struct PoseidonOp(pub [F; POSEIDON_SPONGE_WIDTH]); + +#[derive(Copy, Clone, Default)] +pub struct PoseidonStark { + pub(crate) f: PhantomData, +} + +/// Information about a Poseidon operation needed for witness generation. +impl, const D: usize> PoseidonStark { + /// Generate the rows of the trace. Note that this does not generate the + /// permuted columns used in our lookup arguments, as those are computed + /// after transposing to column-wise form. + fn generate_trace_rows( + &self, + operations: Vec>, + min_rows: usize, + ) -> Vec<[F; NUM_COLUMNS]> { + let num_rows = operations.len().max(min_rows).next_power_of_two(); + let mut rows = Vec::with_capacity(operations.len().max(min_rows)); + + for op in operations { + rows.push(self.generate_row_for_op(op)); + } + + // We generate "actual" rows for padding to avoid having to store + // another power of x, on top of x^3 and x^6. + let padding_row: [F; NUM_COLUMNS] = { + let mut tmp_row = PoseidonColumnsView::default(); + let padding_inp = [F::ZERO; POSEIDON_SPONGE_WIDTH]; + Self::generate_perm(&mut tmp_row, padding_inp); + tmp_row + } + .into(); + rows.resize(num_rows, padding_row); + rows + } + + fn generate_row_for_op(&self, op: PoseidonOp) -> [F; NUM_COLUMNS] { + let mut row = PoseidonColumnsView::default(); + Self::generate_perm(&mut row, op.0); + row.not_padding = F::ONE; + row.into() + } + + fn generate_perm(row: &mut PoseidonColumnsView, input: [F; POSEIDON_SPONGE_WIDTH]) { + // Populate the round input for the first round. + row.input.copy_from_slice(&input); + + let mut state = input; + let mut round_ctr = 0; + + for r in 0..HALF_N_FULL_ROUNDS { + ::constant_layer_field(&mut state, round_ctr); + + for i in 0..POSEIDON_SPONGE_WIDTH { + // We do not need to store the first full_sbox_0 inputs, since they are + // the permutation's inputs. + if r != 0 { + row.full_sbox_0[reg_full_sbox_0(r, i)] = state[i]; + } + // Generate x^3 and x^6 for the SBox layer constraints. + row.cubed_full[reg_cubed_full(r, i)] = state[i].cube(); + + // Apply x^7 to the state. + state[i] *= + row.cubed_full[reg_cubed_full(r, i)] * row.cubed_full[reg_cubed_full(r, i)]; + } + state = ::mds_layer_field(&state); + round_ctr += 1; + } + + ::partial_first_constant_layer(&mut state); + state = ::mds_partial_layer_init(&state); + for r in 0..(N_PARTIAL_ROUNDS - 1) { + row.partial_sbox[reg_partial_sbox(r)] = state[0]; + + // Generate x^3 for the SBox layer constraints. + row.cubed_partial[reg_cubed_partial(r)] = state[0] * state[0] * state[0]; + + state[0] *= + row.cubed_partial[reg_cubed_partial(r)] * row.cubed_partial[reg_cubed_partial(r)]; + state[0] += F::from_canonical_u64(::FAST_PARTIAL_ROUND_CONSTANTS[r]); + state = ::mds_partial_layer_fast_field(&state, r); + } + + row.partial_sbox[reg_partial_sbox(N_PARTIAL_ROUNDS - 1)] = state[0]; + // Generate x^3 and x^6 for the SBox layer constraints. + row.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)] = state[0].cube(); + + state[0] *= row.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)] + * row.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)]; + state = ::mds_partial_layer_fast_field(&state, N_PARTIAL_ROUNDS - 1); + round_ctr += N_PARTIAL_ROUNDS; + + for r in 0..HALF_N_FULL_ROUNDS { + ::constant_layer_field(&mut state, round_ctr); + for i in 0..POSEIDON_SPONGE_WIDTH { + row.full_sbox_1[reg_full_sbox_1(r, i)] = state[i]; + // Generate x^3 and x^6 for the SBox layer constraints. + row.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)] = state[i].cube(); + + state[i] *= row.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)] + * row.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)]; + } + state = ::mds_layer_field(&state); + round_ctr += 1; + } + + for i in 0..POSEIDON_DIGEST { + let state_val = state[i].to_canonical_u64(); + let hi_limb = F::from_canonical_u32((state_val >> 32) as u32); + row.pinv[i] = + if let Some(inv) = (hi_limb - F::from_canonical_u32(u32::MAX)).try_inverse() { + inv + } else { + F::ZERO + }; + row.digest[2 * i] = F::from_canonical_u32(state_val as u32); + row.digest[2 * i + 1] = hi_limb; + } + row.output_partial + .copy_from_slice(&state[POSEIDON_DIGEST..POSEIDON_SPONGE_WIDTH]); + } + + pub fn generate_trace( + &self, + operations: Vec>, + min_rows: usize, + timing: &mut TimingTree, + ) -> Vec> { + // Generate the witness, except for permuted columns in the lookup argument. + let trace_rows = timed!( + timing, + "generate trace rows", + self.generate_trace_rows(operations, min_rows) + ); + let trace_polys = timed!( + timing, + "convert to PolynomialValues", + trace_rows_to_poly_values(trace_rows) + ); + trace_polys + } +} + +impl, const D: usize> Stark for PoseidonStark { + type EvaluationFrame = EvmStarkFrame + where + FE: FieldExtension, + P: PackedField; + + type EvaluationFrameTarget = EvmStarkFrame, ExtensionTarget, NUM_COLUMNS>; + + fn eval_packed_generic( + &self, + vars: &Self::EvaluationFrame, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + let lv: &[P; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap(); + let lv: &PoseidonColumnsView

= lv.borrow(); + + // Padding flag must be boolean. + let not_padding = lv.not_padding; + yield_constr.constraint(not_padding * (not_padding - P::ONES)); + + // Compute the input layer. + let mut state = lv.input; + + let mut round_ctr = 0; + + // First set of full rounds. + for r in 0..HALF_N_FULL_ROUNDS { + ::constant_layer_packed_field(&mut state, round_ctr); + + for i in 0..POSEIDON_SPONGE_WIDTH { + if r != 0 { + let sbox_in = lv.full_sbox_0[reg_full_sbox_0(r, i)]; + yield_constr.constraint(state[i] - sbox_in); + state[i] = sbox_in; + } + + // Check that the powers were correctly generated. + let cube = state[i] * state[i] * state[i]; + yield_constr.constraint(cube - lv.cubed_full[reg_cubed_full(r, i)]); + + state[i] *= + lv.cubed_full[reg_cubed_full(r, i)] * lv.cubed_full[reg_cubed_full(r, i)]; + } + + state = ::mds_layer_packed_field(&state); + round_ctr += 1; + } + + // Partial rounds. + ::partial_first_constant_layer_packed_field(&mut state); + state = ::mds_partial_layer_init_packed_field(&state); + for r in 0..(N_PARTIAL_ROUNDS - 1) { + let sbox_in = lv.partial_sbox[reg_partial_sbox(r)]; + yield_constr.constraint(state[0] - sbox_in); + state[0] = sbox_in; + + // Check that the powers were generated correctly. + let cube = state[0] * state[0] * state[0]; + yield_constr.constraint(cube - lv.cubed_partial[reg_cubed_partial(r)]); + + state[0] = lv.cubed_partial[reg_cubed_partial(r)] + * lv.cubed_partial[reg_cubed_partial(r)] + * sbox_in; + state[0] += + P::Scalar::from_canonical_u64(::FAST_PARTIAL_ROUND_CONSTANTS[r]); + state = ::mds_partial_layer_fast_packed_field(&state, r); + } + let sbox_in = lv.partial_sbox[reg_partial_sbox(N_PARTIAL_ROUNDS - 1)]; + yield_constr.constraint(state[0] - sbox_in); + state[0] = sbox_in; + + // Check that the powers were generated correctly. + let cube = state[0] * state[0] * state[0]; + yield_constr.constraint(cube - lv.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)]); + + state[0] = lv.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)] + * lv.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)] + * sbox_in; + state = ::mds_partial_layer_fast_packed_field(&state, N_PARTIAL_ROUNDS - 1); + round_ctr += N_PARTIAL_ROUNDS; + + // Second set of full rounds. + for r in 0..HALF_N_FULL_ROUNDS { + ::constant_layer_packed_field(&mut state, round_ctr); + for i in 0..POSEIDON_SPONGE_WIDTH { + let sbox_in = lv.full_sbox_1[reg_full_sbox_1(r, i)]; + yield_constr.constraint(state[i] - sbox_in); + state[i] = sbox_in; + + // Check that the powers were correctly generated. + let cube = state[i] * state[i] * state[i]; + yield_constr + .constraint(cube - lv.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)]); + + state[i] *= lv.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)] + * lv.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)]; + } + state = ::mds_layer_packed_field(&state); + round_ctr += 1; + } + + for i in 0..POSEIDON_DIGEST { + yield_constr.constraint( + state[i] + - (lv.digest[2 * i] + + lv.digest[2 * i + 1] * P::Scalar::from_canonical_u64(1 << 32)), + ); + } + for i in POSEIDON_DIGEST..POSEIDON_SPONGE_WIDTH { + yield_constr.constraint(state[i] - lv.output_partial[i - POSEIDON_DIGEST]) + } + + // Ensure that the output limbs are written in canonical form. + for i in 0..POSEIDON_DIGEST { + let constr = ((lv.digest[2 * i + 1] - P::Scalar::from_canonical_u32(u32::MAX)) + * lv.pinv[i] + - P::ONES) + * lv.digest[2 * i]; + yield_constr.constraint(constr); + } + } + + fn eval_ext_circuit( + &self, + builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, + vars: &Self::EvaluationFrameTarget, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + let lv: &[ExtensionTarget; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap(); + let lv: &PoseidonColumnsView> = lv.borrow(); + + // Padding flag must be boolean. + let not_padding = lv.not_padding; + let constr = builder.mul_sub_extension(not_padding, not_padding, not_padding); + yield_constr.constraint(builder, constr); + + // Compute the input layer. + let mut state = lv.input; + + let mut round_ctr = 0; + + // First set of full rounds. + for r in 0..HALF_N_FULL_ROUNDS { + ::constant_layer_circuit(builder, &mut state, round_ctr); + for i in 0..POSEIDON_SPONGE_WIDTH { + if r != 0 { + let sbox_in = lv.full_sbox_0[reg_full_sbox_0(r, i)]; + let constr = builder.sub_extension(state[i], sbox_in); + yield_constr.constraint(builder, constr); + state[i] = sbox_in; + } + + // Check that the powers were correctly generated. + let cube = builder.mul_many_extension([state[i], state[i], state[i]]); + let constr = builder.sub_extension(cube, lv.cubed_full[reg_cubed_full(r, i)]); + yield_constr.constraint(builder, constr); + + // Update the i'th element of the state. + state[i] = builder.mul_many_extension([ + state[i], + lv.cubed_full[reg_cubed_full(r, i)], + lv.cubed_full[reg_cubed_full(r, i)], + ]); + } + + state = ::mds_layer_circuit(builder, &state); + round_ctr += 1; + } + + // Partial rounds. + ::partial_first_constant_layer_circuit(builder, &mut state); + state = ::mds_partial_layer_init_circuit(builder, &state); + for r in 0..(N_PARTIAL_ROUNDS - 1) { + let sbox_in = lv.partial_sbox[reg_partial_sbox(r)]; + let constr = builder.sub_extension(state[0], sbox_in); + yield_constr.constraint(builder, constr); + state[0] = sbox_in; + + // Check that the powers were generated correctly. + let cube = builder.mul_many_extension([state[0], state[0], state[0]]); + let constr = builder.sub_extension(cube, lv.cubed_partial[reg_cubed_partial(r)]); + yield_constr.constraint(builder, constr); + + // Update state[0]. + state[0] = builder.mul_many_extension([ + lv.cubed_partial[reg_cubed_partial(r)], + lv.cubed_partial[reg_cubed_partial(r)], + sbox_in, + ]); + state[0] = builder.add_const_extension( + state[0], + F::from_canonical_u64(::FAST_PARTIAL_ROUND_CONSTANTS[r]), + ); + state = ::mds_partial_layer_fast_circuit(builder, &state, r); + } + let sbox_in = lv.partial_sbox[reg_partial_sbox(N_PARTIAL_ROUNDS - 1)]; + let constr = builder.sub_extension(state[0], sbox_in); + yield_constr.constraint(builder, constr); + state[0] = sbox_in; + + // Check that the powers were generated correctly. + let mut constr = builder.mul_many_extension([state[0], state[0], state[0]]); + constr = builder.sub_extension( + constr, + lv.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)], + ); + yield_constr.constraint(builder, constr); + + state[0] = builder.mul_many_extension([ + lv.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)], + lv.cubed_partial[reg_cubed_partial(N_PARTIAL_ROUNDS - 1)], + sbox_in, + ]); + state = + ::mds_partial_layer_fast_circuit(builder, &state, N_PARTIAL_ROUNDS - 1); + round_ctr += N_PARTIAL_ROUNDS; + + // Second set of full rounds. + for r in 0..HALF_N_FULL_ROUNDS { + ::constant_layer_circuit(builder, &mut state, round_ctr); + for i in 0..POSEIDON_SPONGE_WIDTH { + let sbox_in = lv.full_sbox_1[reg_full_sbox_1(r, i)]; + let constr = builder.sub_extension(state[i], sbox_in); + yield_constr.constraint(builder, constr); + state[i] = sbox_in; + + // Check that the powers were correctly generated. + let mut constr = builder.mul_many_extension([state[i], state[i], state[i]]); + constr = builder.sub_extension( + constr, + lv.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)], + ); + yield_constr.constraint(builder, constr); + + // Update the i'th element of the state. + state[i] = builder.mul_many_extension([ + lv.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)], + lv.cubed_full[reg_cubed_full(HALF_N_FULL_ROUNDS + r, i)], + state[i], + ]); + } + + state = ::mds_layer_circuit(builder, &state); + round_ctr += 1; + } + + for i in 0..POSEIDON_DIGEST { + let val = builder.mul_const_add_extension( + F::from_canonical_u64(1 << 32), + lv.digest[2 * i + 1], + lv.digest[2 * i], + ); + let constr = builder.sub_extension(state[i], val); + yield_constr.constraint(builder, constr); + } + for i in POSEIDON_DIGEST..POSEIDON_SPONGE_WIDTH { + let constr = builder.sub_extension(state[i], lv.output_partial[i - POSEIDON_DIGEST]); + yield_constr.constraint(builder, constr); + } + + // Ensure that the output limbs are written in canonical form. + for i in 0..POSEIDON_DIGEST { + let mut constr = builder.arithmetic_extension( + F::ONE, + F::NEG_ONE * F::from_canonical_u32(u32::MAX), + lv.digest[2 * i + 1], + lv.pinv[i], + lv.pinv[i], + ); + constr = builder.mul_sub_extension(lv.digest[2 * i], constr, lv.digest[2 * i]); + + yield_constr.constraint(builder, constr); + } + } + + fn constraint_degree(&self) -> usize { + 3 + } + + fn requires_ctls(&self) -> bool { + true + } +} + +#[cfg(test)] +mod tests { + use std::borrow::Borrow; + + use anyhow::Result; + use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; + use plonky2::field::polynomial::PolynomialValues; + use plonky2::field::types::{Field, PrimeField64, Sample}; + use plonky2::fri::oracle::PolynomialBatch; + use plonky2::hash::poseidon::Poseidon; + use plonky2::iop::challenger::Challenger; + use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use plonky2::timed; + use plonky2::util::timing::TimingTree; + use starky::cross_table_lookup::{CtlData, CtlZData}; + use starky::lookup::{GrandProductChallenge, GrandProductChallengeSet}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; + + use crate::memory::segments::Segment; + use crate::poseidon::columns::{ + PoseidonColumnsView, POSEIDON_DIGEST, POSEIDON_SPONGE_RATE, POSEIDON_SPONGE_WIDTH, + }; + use crate::poseidon::poseidon_stark::{PoseidonOp, PoseidonStark}; + use crate::prover::prove_single_table; + use crate::witness::memory::MemoryAddress; + use crate::StarkConfig; + + #[test] + fn test_stark_degree() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = PoseidonStark; + + let stark = S { + f: Default::default(), + }; + test_stark_low_degree(stark) + } + + #[test] + fn test_stark_circuit() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = PoseidonStark; + + let stark = S { + f: Default::default(), + }; + test_stark_circuit_constraints::(stark) + } + + #[test] + fn poseidon_correctness_test() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = PoseidonStark; + + let stark = S { + f: Default::default(), + }; + + let input = PoseidonOp(F::rand_array()); + let rows = stark.generate_trace_rows(vec![input], 8); + assert_eq!(rows.len(), 8); + let row: PoseidonColumnsView = rows[0].into(); + let expected = F::poseidon(input.0); + assert_eq!( + std::array::from_fn::<_, 4, _>( + |i| row.digest[2 * i] + row.digest[2 * i + 1] * F::from_canonical_u64(1 << 32) + ), + expected[0..POSEIDON_DIGEST] + ); + assert_eq!( + row.output_partial, + expected[POSEIDON_DIGEST..POSEIDON_SPONGE_WIDTH] + ); + + Ok(()) + } + + fn init_logger() { + let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "debug")); + } +} diff --git a/evm_arithmetization/src/prover.rs b/evm_arithmetization/src/prover.rs index a298e6612..d68fc35ae 100644 --- a/evm_arithmetization/src/prover.rs +++ b/evm_arithmetization/src/prover.rs @@ -295,6 +295,21 @@ where ctl_challenges, challenger, timing, + abort_signal.clone(), + )? + ); + let poseidon_proof = timed!( + timing, + "prove memory STARK", + prove_single_table( + &all_stark.poseidon_stark, + config, + &trace_poly_values[Table::Poseidon as usize], + &trace_commitments[Table::Poseidon as usize], + &ctl_data_per_table[Table::Poseidon as usize], + ctl_challenges, + challenger, + timing, abort_signal, )? ); @@ -307,6 +322,7 @@ where keccak_sponge_proof, logic_proof, memory_proof, + poseidon_proof, ]) } diff --git a/evm_arithmetization/src/verifier.rs b/evm_arithmetization/src/verifier.rs index 52fa5304f..8819ea84f 100644 --- a/evm_arithmetization/src/verifier.rs +++ b/evm_arithmetization/src/verifier.rs @@ -42,6 +42,7 @@ where keccak_sponge_stark, logic_stark, memory_stark, + poseidon_stark, cross_table_lookups, } = all_stark; diff --git a/evm_arithmetization/src/witness/gas.rs b/evm_arithmetization/src/witness/gas.rs index 54597a3eb..199b34760 100644 --- a/evm_arithmetization/src/witness/gas.rs +++ b/evm_arithmetization/src/witness/gas.rs @@ -35,6 +35,7 @@ pub(crate) const fn gas_to_charge(op: Operation) -> u64 { TernaryArithmetic(MulMod) => G_MID, TernaryArithmetic(SubMod) => KERNEL_ONLY_INSTR, KeccakGeneral => KERNEL_ONLY_INSTR, + Poseidon => KERNEL_ONLY_INSTR, ProverInput => KERNEL_ONLY_INSTR, Pop => G_BASE, Jump => G_MID, diff --git a/evm_arithmetization/src/witness/operation.rs b/evm_arithmetization/src/witness/operation.rs index 57945ad05..9cdfff1ae 100644 --- a/evm_arithmetization/src/witness/operation.rs +++ b/evm_arithmetization/src/witness/operation.rs @@ -2,6 +2,7 @@ use ethereum_types::{BigEndianHash, U256}; use itertools::Itertools; use keccak_hash::keccak; use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; use super::transition::Transition; use super::util::{ @@ -17,6 +18,7 @@ use crate::cpu::simple_logic::eq_iszero::generate_pinv_diff; use crate::cpu::stack::MAX_USER_STACK_SIZE; use crate::extension_tower::BN_BASE; use crate::memory::segments::Segment; +use crate::poseidon::poseidon_stark::PoseidonOp; use crate::util::u256_to_usize; use crate::witness::errors::MemoryError::VirtTooLarge; use crate::witness::errors::ProgramError; @@ -38,6 +40,7 @@ pub(crate) enum Operation { BinaryArithmetic(arithmetic::BinaryOperator), TernaryArithmetic(arithmetic::TernaryOperator), KeccakGeneral, + Poseidon, ProverInput, Pop, Jump, @@ -64,7 +67,7 @@ pub(crate) const CONTEXT_SCALING_FACTOR: usize = 64; /// operation. Generates a new logic operation and adds it to the vector of /// operation in `LogicStark`. Adds three memory read operations to /// `MemoryStark`: for the two inputs and the output. -pub(crate) fn generate_binary_logic_op>( +pub(crate) fn generate_binary_logic_op>( op: logic::Op, state: &mut T, mut row: CpuColumnsView, @@ -82,7 +85,7 @@ pub(crate) fn generate_binary_logic_op>( Ok(()) } -pub(crate) fn generate_binary_arithmetic_op>( +pub(crate) fn generate_binary_arithmetic_op>( operator: arithmetic::BinaryOperator, state: &mut T, mut row: CpuColumnsView, @@ -113,7 +116,7 @@ pub(crate) fn generate_binary_arithmetic_op>( Ok(()) } -pub(crate) fn generate_ternary_arithmetic_op>( +pub(crate) fn generate_ternary_arithmetic_op>( operator: arithmetic::TernaryOperator, state: &mut T, mut row: CpuColumnsView, @@ -132,7 +135,7 @@ pub(crate) fn generate_ternary_arithmetic_op>( Ok(()) } -pub(crate) fn generate_keccak_general>( +pub(crate) fn generate_keccak_general>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -164,7 +167,36 @@ pub(crate) fn generate_keccak_general>( Ok(()) } -pub(crate) fn generate_prover_input>( +/// Pops 3 elements `x,y,z` from the stack, and returns `Poseidon(x || y || +/// z)[0..4]`, where values are split into 64-bit limbs, and `z` is used as the +/// capacity. Limbs are range-checked to be in canonical form in the +/// PoseidonStark. +pub(crate) fn generate_poseidon>( + state: &mut T, + mut row: CpuColumnsView, +) -> Result<(), ProgramError> { + let generation_state = state.get_mut_generation_state(); + let [(x, _), (y, log_in1), (z, log_in2)] = + stack_pop_with_log_and_fill::<3, _>(generation_state, &mut row)?; + let mut arr = [ + x.0[0], x.0[1], x.0[2], x.0[3], y.0[0], y.0[1], y.0[2], y.0[3], z.0[0], z.0[1], z.0[2], + z.0[3], + ] + .map(F::from_canonical_u64); + let hash = F::poseidon(arr); + let hash = U256(std::array::from_fn(|i| hash[i].to_canonical_u64())); + log::debug!("Poseidon hashing {:?} -> {}", arr, hash); + push_no_write(generation_state, hash); + + state.push_poseidon(PoseidonOp(arr)); + + state.push_memory(log_in1); + state.push_memory(log_in2); + state.push_cpu(row); + Ok(()) +} + +pub(crate) fn generate_prover_input>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -192,7 +224,7 @@ pub(crate) fn generate_prover_input>( Ok(()) } -pub(crate) fn generate_pop>( +pub(crate) fn generate_pop>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -215,7 +247,7 @@ pub(crate) fn generate_pop>( Ok(()) } -pub(crate) fn generate_pc>( +pub(crate) fn generate_pc>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -228,7 +260,7 @@ pub(crate) fn generate_pc>( Ok(()) } -pub(crate) fn generate_jumpdest>( +pub(crate) fn generate_jumpdest>( state: &mut T, row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -236,7 +268,7 @@ pub(crate) fn generate_jumpdest>( Ok(()) } -pub(crate) fn generate_get_context>( +pub(crate) fn generate_get_context>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -272,7 +304,7 @@ pub(crate) fn generate_get_context>( Ok(()) } -pub(crate) fn generate_set_context>( +pub(crate) fn generate_set_context>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -352,7 +384,7 @@ pub(crate) fn generate_set_context>( Ok(()) } -pub(crate) fn generate_push>( +pub(crate) fn generate_push>( n: u8, state: &mut T, mut row: CpuColumnsView, @@ -397,7 +429,7 @@ pub(crate) fn generate_push>( // - Update `stack_top` with `val` and add 1 to `stack_len` // Since the write must happen before the read, the normal way of assigning // GP channels doesn't work and we must handle them manually. -pub(crate) fn generate_dup>( +pub(crate) fn generate_dup>( n: u8, state: &mut T, mut row: CpuColumnsView, @@ -465,7 +497,7 @@ pub(crate) fn generate_dup>( Ok(()) } -pub(crate) fn generate_swap>( +pub(crate) fn generate_swap>( n: u8, state: &mut T, mut row: CpuColumnsView, @@ -496,7 +528,7 @@ pub(crate) fn generate_swap>( Ok(()) } -pub(crate) fn generate_not>( +pub(crate) fn generate_not>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -520,7 +552,7 @@ pub(crate) fn generate_not>( Ok(()) } -pub(crate) fn generate_iszero>( +pub(crate) fn generate_iszero>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -539,7 +571,7 @@ pub(crate) fn generate_iszero>( Ok(()) } -fn append_shift>( +fn append_shift>( state: &mut T, mut row: CpuColumnsView, is_shl: bool, @@ -590,7 +622,7 @@ fn append_shift>( Ok(()) } -pub(crate) fn generate_shl>( +pub(crate) fn generate_shl>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -606,7 +638,7 @@ pub(crate) fn generate_shl>( append_shift(state, row, true, input0, input1, log_in1, result) } -pub(crate) fn generate_shr>( +pub(crate) fn generate_shr>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -621,7 +653,7 @@ pub(crate) fn generate_shr>( append_shift(state, row, false, input0, input1, log_in1, result) } -pub(crate) fn generate_syscall>( +pub(crate) fn generate_syscall>( opcode: u8, stack_values_read: usize, stack_len_increased: bool, @@ -711,7 +743,7 @@ pub(crate) fn generate_syscall>( Ok(()) } -pub(crate) fn generate_eq>( +pub(crate) fn generate_eq>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -729,7 +761,7 @@ pub(crate) fn generate_eq>( Ok(()) } -pub(crate) fn generate_exit_kernel>( +pub(crate) fn generate_exit_kernel>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -759,7 +791,7 @@ pub(crate) fn generate_exit_kernel>( Ok(()) } -pub(crate) fn generate_mload_general>( +pub(crate) fn generate_mload_general>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -792,7 +824,7 @@ pub(crate) fn generate_mload_general>( Ok(()) } -pub(crate) fn generate_mload_32bytes>( +pub(crate) fn generate_mload_32bytes>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -832,7 +864,7 @@ pub(crate) fn generate_mload_32bytes>( Ok(()) } -pub(crate) fn generate_mstore_general>( +pub(crate) fn generate_mstore_general>( state: &mut T, mut row: CpuColumnsView, ) -> Result<(), ProgramError> { @@ -862,7 +894,7 @@ pub(crate) fn generate_mstore_general>( Ok(()) } -pub(crate) fn generate_mstore_32bytes>( +pub(crate) fn generate_mstore_32bytes>( n: u8, state: &mut T, mut row: CpuColumnsView, @@ -882,7 +914,7 @@ pub(crate) fn generate_mstore_32bytes>( Ok(()) } -pub(crate) fn generate_exception>( +pub(crate) fn generate_exception>( exc_code: u8, state: &mut T, mut row: CpuColumnsView, diff --git a/evm_arithmetization/src/witness/traces.rs b/evm_arithmetization/src/witness/traces.rs index 271595712..d3eecc2d9 100644 --- a/evm_arithmetization/src/witness/traces.rs +++ b/evm_arithmetization/src/witness/traces.rs @@ -1,5 +1,6 @@ use plonky2::field::extension::Extendable; use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::timed; use plonky2::util::timing::TimingTree; @@ -11,6 +12,7 @@ use crate::arithmetic::{BinaryOperator, Operation}; use crate::byte_packing::byte_packing_stark::BytePackingOp; use crate::cpu::columns::CpuColumnsView; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeOp; +use crate::poseidon::poseidon_stark::PoseidonOp; use crate::witness::memory::MemoryOp; use crate::{arithmetic, keccak, keccak_sponge, logic}; @@ -23,10 +25,11 @@ pub(crate) struct TraceCheckpoint { pub(self) keccak_sponge_len: usize, pub(self) logic_len: usize, pub(self) memory_len: usize, + pub(self) poseidon_len: usize, } #[derive(Debug)] -pub(crate) struct Traces { +pub(crate) struct Traces { pub(crate) arithmetic_ops: Vec, pub(crate) byte_packing_ops: Vec, pub(crate) cpu: Vec>, @@ -34,9 +37,10 @@ pub(crate) struct Traces { pub(crate) memory_ops: Vec, pub(crate) keccak_inputs: Vec<([u64; keccak::keccak_stark::NUM_INPUTS], usize)>, pub(crate) keccak_sponge_ops: Vec, + pub(crate) poseidon_ops: Vec>, } -impl Traces { +impl Traces { pub(crate) fn new() -> Self { Traces { arithmetic_ops: vec![], @@ -46,6 +50,7 @@ impl Traces { memory_ops: vec![], keccak_inputs: vec![], keccak_sponge_ops: vec![], + poseidon_ops: vec![], } } @@ -81,6 +86,7 @@ impl Traces { // This is technically a lower-bound, as we may fill gaps, // but this gives a relatively good estimate. memory_len: self.memory_ops.len(), + poseidon_len: self.poseidon_ops.len(), } } @@ -94,6 +100,7 @@ impl Traces { keccak_sponge_len: self.keccak_sponge_ops.len(), logic_len: self.logic_ops.len(), memory_len: self.memory_ops.len(), + poseidon_len: self.poseidon_ops.len(), } } @@ -106,6 +113,7 @@ impl Traces { .truncate(checkpoint.keccak_sponge_len); self.logic_ops.truncate(checkpoint.logic_len); self.memory_ops.truncate(checkpoint.memory_len); + self.poseidon_ops.truncate(checkpoint.poseidon_len); } pub(crate) fn mem_ops_since(&self, checkpoint: TraceCheckpoint) -> &[MemoryOp] { @@ -134,6 +142,7 @@ impl Traces { memory_ops, keccak_inputs, keccak_sponge_ops, + poseidon_ops, } = self; let arithmetic_trace = timed!( @@ -176,6 +185,13 @@ impl Traces { "generate memory trace", all_stark.memory_stark.generate_trace(memory_ops, timing) ); + let poseidon_trace = timed!( + timing, + "generate memory trace", + all_stark + .poseidon_stark + .generate_trace(poseidon_ops, cap_elements, timing) + ); [ arithmetic_trace, @@ -185,11 +201,12 @@ impl Traces { keccak_sponge_trace, logic_trace, memory_trace, + poseidon_trace, ] } } -impl Default for Traces { +impl Default for Traces { fn default() -> Self { Self::new() } diff --git a/evm_arithmetization/src/witness/transition.rs b/evm_arithmetization/src/witness/transition.rs index 2d0d7501d..ee8eaea62 100644 --- a/evm_arithmetization/src/witness/transition.rs +++ b/evm_arithmetization/src/witness/transition.rs @@ -1,6 +1,7 @@ use ethereum_types::U256; use log::log_enabled; use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; use super::util::{mem_read_gp_with_log_and_fill, stack_pop_with_log_and_fill}; use crate::cpu::columns::CpuColumnsView; @@ -20,7 +21,7 @@ use crate::witness::state::RegistersState; use crate::witness::util::mem_read_code_with_log_and_fill; use crate::{arithmetic, logic}; -pub(crate) fn read_code_memory>( +pub(crate) fn read_code_memory>( state: &mut T, row: &mut CpuColumnsView, ) -> u8 { @@ -88,6 +89,7 @@ pub(crate) fn decode(registers: RegistersState, opcode: u8) -> Result Ok(Operation::Syscall(opcode, 2, false)), // SAR (0x20, _) => Ok(Operation::Syscall(opcode, 2, false)), // KECCAK256 (0x21, true) => Ok(Operation::KeccakGeneral), + (0x22, true) => Ok(Operation::Poseidon), (0x30, _) => Ok(Operation::Syscall(opcode, 0, true)), // ADDRESS (0x31, _) => Ok(Operation::Syscall(opcode, 1, false)), // BALANCE (0x32, _) => Ok(Operation::Syscall(opcode, 0, true)), // ORIGIN @@ -180,6 +182,7 @@ pub(crate) fn fill_op_flag(op: Operation, row: &mut CpuColumnsView) Operation::BinaryArithmetic(_) => &mut flags.binary_op, Operation::TernaryArithmetic(_) => &mut flags.ternary_op, Operation::KeccakGeneral | Operation::Jumpdest => &mut flags.jumpdest_keccak_general, + Operation::Poseidon => &mut flags.poseidon, Operation::ProverInput | Operation::Push(1..) => &mut flags.push_prover_input, Operation::Jump | Operation::Jumpi => &mut flags.jumps, Operation::Pc | Operation::Push(0) => &mut flags.pc_push0, @@ -212,6 +215,7 @@ pub(crate) const fn get_op_special_length(op: Operation) -> Option { Operation::BinaryArithmetic(_) => STACK_BEHAVIORS.binary_op, Operation::TernaryArithmetic(_) => STACK_BEHAVIORS.ternary_op, Operation::KeccakGeneral | Operation::Jumpdest => STACK_BEHAVIORS.jumpdest_keccak_general, + Operation::Poseidon => STACK_BEHAVIORS.poseidon, Operation::Jump => JUMP_OP, Operation::Jumpi => JUMPI_OP, Operation::GetContext | Operation::SetContext => None, @@ -251,6 +255,7 @@ pub(crate) const fn might_overflow_op(op: Operation) -> bool { Operation::BinaryArithmetic(_) => MIGHT_OVERFLOW.binary_op, Operation::TernaryArithmetic(_) => MIGHT_OVERFLOW.ternary_op, Operation::KeccakGeneral | Operation::Jumpdest => MIGHT_OVERFLOW.jumpdest_keccak_general, + Operation::Poseidon => MIGHT_OVERFLOW.poseidon, Operation::Jump | Operation::Jumpi => MIGHT_OVERFLOW.jumps, Operation::Pc | Operation::Push(0) => MIGHT_OVERFLOW.pc_push0, Operation::GetContext | Operation::SetContext => MIGHT_OVERFLOW.context_op, @@ -260,7 +265,7 @@ pub(crate) const fn might_overflow_op(op: Operation) -> bool { } } -pub(crate) fn log_kernel_instruction>(state: &mut S, op: Operation) { +pub(crate) fn log_kernel_instruction>(state: &mut S, op: Operation) { // The logic below is a bit costly, so skip it if debug logs aren't enabled. if !log_enabled!(log::Level::Debug) { return; @@ -289,7 +294,7 @@ pub(crate) fn log_kernel_instruction>(state: &mut S, op: O assert!(pc < KERNEL.code.len(), "Kernel PC is out of range: {}", pc); } -pub(crate) trait Transition: State { +pub(crate) trait Transition: State { /// When in jumpdest analysis, adds the offset `dst` to the jumpdest table. /// Returns a boolean indicating whether we are running the jumpdest /// analysis. @@ -305,6 +310,7 @@ pub(crate) trait Transition: State { ) -> Result where Self: Sized, + F: RichField, { self.perform_op(op, opcode, row)?; self.incr_pc(match op { @@ -466,6 +472,7 @@ pub(crate) trait Transition: State { ) -> Result<(), ProgramError> where Self: Sized, + F: RichField, { let op = self.skip_if_necessary(op)?; @@ -506,6 +513,7 @@ pub(crate) trait Transition: State { Operation::TernaryArithmetic(op) => generate_ternary_arithmetic_op(op, self, row)?, Operation::KeccakGeneral => generate_keccak_general(self, row)?, Operation::ProverInput => generate_prover_input(self, row)?, + Operation::Poseidon => generate_poseidon(self, row)?, Operation::Pop => generate_pop(self, row)?, Operation::Jump => self.generate_jump(row)?, Operation::Jumpi => self.generate_jumpi(row)?, diff --git a/evm_arithmetization/src/witness/util.rs b/evm_arithmetization/src/witness/util.rs index b90541e69..d2b2c1f4e 100644 --- a/evm_arithmetization/src/witness/util.rs +++ b/evm_arithmetization/src/witness/util.rs @@ -1,5 +1,5 @@ use ethereum_types::U256; -use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; use super::memory::DUMMY_MEMOP; use super::transition::Transition; @@ -22,7 +22,7 @@ fn to_byte_checked(n: U256) -> u8 { res } -fn to_bits_le(n: u8) -> [F; 8] { +fn to_bits_le(n: u8) -> [F; 8] { let mut res = [F::ZERO; 8]; for (i, bit) in res.iter_mut().enumerate() { *bit = F::from_bool(n & (1 << i) != 0); @@ -31,7 +31,7 @@ fn to_bits_le(n: u8) -> [F; 8] { } /// Peek at the stack item `i`th from the top. If `i=0` this gives the tip. -pub(crate) fn stack_peek( +pub(crate) fn stack_peek( state: &GenerationState, i: usize, ) -> Result { @@ -50,7 +50,7 @@ pub(crate) fn stack_peek( } /// Peek at kernel at specified segment and address -pub(crate) fn current_context_peek( +pub(crate) fn current_context_peek( state: &GenerationState, segment: Segment, virt: usize, @@ -61,7 +61,11 @@ pub(crate) fn current_context_peek( .get_with_init(MemoryAddress::new(context, segment, virt)) } -pub(crate) fn fill_channel_with_value(row: &mut CpuColumnsView, n: usize, val: U256) { +pub(crate) fn fill_channel_with_value( + row: &mut CpuColumnsView, + n: usize, + val: U256, +) { let channel = &mut row.mem_channels[n]; let val_limbs: [u64; 4] = val.0; for (i, limb) in val_limbs.into_iter().enumerate() { @@ -72,14 +76,14 @@ pub(crate) fn fill_channel_with_value(row: &mut CpuColumnsView, n: /// Pushes without writing in memory. This happens in opcodes where a push /// immediately follows a pop. -pub(crate) fn push_no_write(state: &mut GenerationState, val: U256) { +pub(crate) fn push_no_write(state: &mut GenerationState, val: U256) { state.registers.stack_top = val; state.registers.stack_len += 1; } /// Pushes and (maybe) writes the previous stack top in memory. This happens in /// opcodes which only push. -pub(crate) fn push_with_write>( +pub(crate) fn push_with_write>( state: &mut T, row: &mut CpuColumnsView, val: U256, @@ -115,7 +119,7 @@ pub(crate) fn push_with_write>( Ok(()) } -pub(crate) fn mem_read_with_log( +pub(crate) fn mem_read_with_log( channel: MemoryChannel, address: MemoryAddress, state: &GenerationState, @@ -131,7 +135,7 @@ pub(crate) fn mem_read_with_log( (val, op) } -pub(crate) fn mem_write_log( +pub(crate) fn mem_write_log( channel: MemoryChannel, address: MemoryAddress, state: &GenerationState, @@ -146,7 +150,7 @@ pub(crate) fn mem_write_log( ) } -pub(crate) fn mem_read_code_with_log_and_fill( +pub(crate) fn mem_read_code_with_log_and_fill( address: MemoryAddress, state: &GenerationState, row: &mut CpuColumnsView, @@ -159,7 +163,7 @@ pub(crate) fn mem_read_code_with_log_and_fill( (val_u8, op) } -pub(crate) fn mem_read_gp_with_log_and_fill( +pub(crate) fn mem_read_gp_with_log_and_fill( n: usize, address: MemoryAddress, state: &GenerationState, @@ -183,7 +187,7 @@ pub(crate) fn mem_read_gp_with_log_and_fill( (val, op) } -pub(crate) fn mem_write_gp_log_and_fill( +pub(crate) fn mem_write_gp_log_and_fill( n: usize, address: MemoryAddress, state: &GenerationState, @@ -208,7 +212,7 @@ pub(crate) fn mem_write_gp_log_and_fill( op } -pub(crate) fn mem_write_partial_log_and_fill( +pub(crate) fn mem_write_partial_log_and_fill( address: MemoryAddress, state: &GenerationState, row: &mut CpuColumnsView, @@ -230,7 +234,7 @@ pub(crate) fn mem_write_partial_log_and_fill( // Channel 0 already contains the top of the stack. You only need to read // from the second popped element. // If the resulting stack isn't empty, update `stack_top`. -pub(crate) fn stack_pop_with_log_and_fill( +pub(crate) fn stack_pop_with_log_and_fill( state: &mut GenerationState, row: &mut CpuColumnsView, ) -> Result<[(U256, MemoryOp); N], ProgramError> { @@ -267,7 +271,7 @@ pub(crate) fn stack_pop_with_log_and_fill( Ok(result) } -fn xor_into_sponge>( +fn xor_into_sponge>( state: &mut T, sponge_state: &mut [u8; KECCAK_WIDTH_BYTES], block: &[u8; KECCAK_RATE_BYTES], @@ -283,7 +287,7 @@ fn xor_into_sponge>( } } -pub(crate) fn keccak_sponge_log>( +pub(crate) fn keccak_sponge_log>( state: &mut T, base_address: MemoryAddress, input: Vec, @@ -339,7 +343,7 @@ pub(crate) fn keccak_sponge_log>( }); } -pub(crate) fn byte_packing_log>( +pub(crate) fn byte_packing_log>( state: &mut T, base_address: MemoryAddress, bytes: Vec, @@ -366,7 +370,7 @@ pub(crate) fn byte_packing_log>( }); } -pub(crate) fn byte_unpacking_log>( +pub(crate) fn byte_unpacking_log>( state: &mut T, base_address: MemoryAddress, val: U256, diff --git a/evm_arithmetization/tests/add11_yml.rs b/evm_arithmetization/tests/add11_yml.rs index 00d7e56b4..027b85d8e 100644 --- a/evm_arithmetization/tests/add11_yml.rs +++ b/evm_arithmetization/tests/add11_yml.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use std::time::Duration; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, BigEndianHash, H256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; @@ -11,12 +11,16 @@ use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -34,16 +38,8 @@ fn add11_yml() -> anyhow::Result<()> { let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender); - let to_hashed = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x60, 0x00, 0x55, 0x00]; - let code_hash = keccak(code); + let code_hash = hash_bytecode_u256(code.to_vec()); let beneficiary_account_before = AccountRlp { nonce: 1.into(), @@ -59,19 +55,30 @@ fn add11_yml() -> anyhow::Result<()> { ..AccountRlp::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_before).to_vec(), - )?; - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: Node::Empty.into(), - storage_tries: vec![(to_hashed, Node::Empty.into())], }; let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); @@ -90,10 +97,11 @@ fn add11_yml() -> anyhow::Result<()> { }; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); contract_code.insert(code_hash, code.to_vec()); - let expected_state_trie_after = { + let expected_state_smt_after = { + let mut smt = Smt::::default(); let beneficiary_account_after = AccountRlp { nonce: 1.into(), ..AccountRlp::default() @@ -106,24 +114,29 @@ fn add11_yml() -> anyhow::Result<()> { let to_account_after = AccountRlp { balance: 0xde0b6b3a76586a0u64.into(), code_hash, - // Storage map: { 0 => 2 } - storage_root: HashedPartialTrie::from(Node::Leaf { - nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), - value: vec![2], - }) - .hash(), ..AccountRlp::default() }; - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - )?; - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; - expected_state_trie_after + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(to), + &to_account_after, + &HashMap::from([(U256::zero(), 2.into())]), // Storage map: { 0 => 2 } + ); + + smt }; let receipt_0 = LegacyReceiptRlp { @@ -144,7 +157,7 @@ fn add11_yml() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -175,3 +188,18 @@ fn add11_yml() -> anyhow::Result<()> { fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/basic_smart_contract.rs b/evm_arithmetization/tests/basic_smart_contract.rs index fd0948d80..430dc0d0b 100644 --- a/evm_arithmetization/tests/basic_smart_contract.rs +++ b/evm_arithmetization/tests/basic_smart_contract.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use std::time::Duration; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::cpu::kernel::opcodes::{get_opcode, get_push_opcode}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; @@ -12,12 +12,16 @@ use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -36,20 +40,12 @@ fn test_basic_smart_contract() -> anyhow::Result<()> { let sender = hex!("2c7536e3605d9c16a7a3d7b1898e529396a65c23"); let to = hex!("a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0"); - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender); - let to_state_key = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_state_key.as_bytes()).unwrap(); - let push1 = get_push_opcode(1); let add = get_opcode("ADD"); let stop = get_opcode("STOP"); let code = [push1, 3, push1, 4, add, stop]; let code_gas = 3 + 3 + 3; - let code_hash = keccak(code); + let code_hash = hash_bytecode_u256(code.to_vec()); let beneficiary_account_before = AccountRlp { nonce: 1.into(), @@ -65,35 +61,30 @@ fn test_basic_smart_contract() -> anyhow::Result<()> { ..AccountRlp::default() }; - let state_trie_before = { - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[beneficiary_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: beneficiary_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&beneficiary_account_before).to_vec(), - } - .into(); - children[sender_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: sender_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&sender_account_before).to_vec(), - } - .into(); - children[to_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: to_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&to_account_before).to_vec(), - } - .into(); - Node::Branch { - children, - value: vec![], - } - } - .into(); + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: Node::Empty.into(), - storage_tries: vec![], }; let txdata_gas = 2 * 16; @@ -117,10 +108,12 @@ fn test_basic_smart_contract() -> anyhow::Result<()> { }; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); contract_code.insert(code_hash, code.to_vec()); - let expected_state_trie_after: HashedPartialTrie = { + let expected_state_smt_after = { + let mut smt = Smt::::default(); + let beneficiary_account_after = AccountRlp { nonce: 1.into(), ..AccountRlp::default() @@ -135,28 +128,22 @@ fn test_basic_smart_contract() -> anyhow::Result<()> { ..to_account_before }; - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[beneficiary_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: beneficiary_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&beneficiary_account_after).to_vec(), - } - .into(); - children[sender_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: sender_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&sender_account_after).to_vec(), - } - .into(); - children[to_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: to_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&to_account_after).to_vec(), - } - .into(); - Node::Branch { - children, - value: vec![], - } - } - .into(); + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to), &to_account_after, &HashMap::new()); + + smt + }; let receipt_0 = LegacyReceiptRlp { status: true, @@ -176,7 +163,7 @@ fn test_basic_smart_contract() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -212,3 +199,18 @@ fn eth_to_wei(eth: U256) -> U256 { fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/empty_txn_list.rs b/evm_arithmetization/tests/empty_txn_list.rs index 1205414f6..567e39fb8 100644 --- a/evm_arithmetization/tests/empty_txn_list.rs +++ b/evm_arithmetization/tests/empty_txn_list.rs @@ -7,13 +7,16 @@ use ethereum_types::{BigEndianHash, H256}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, PublicValues, TrieRoots}; use evm_arithmetization::{AllRecursiveCircuits, AllStark, Node, StarkConfig}; -use keccak_hash::keccak; use log::info; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::serialization::{DefaultGateSerializer, DefaultGeneratorSerializer}; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::MemoryDb; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -33,40 +36,38 @@ fn test_empty_txn_list() -> anyhow::Result<()> { ..Default::default() }; - let state_trie = HashedPartialTrie::from(Node::Empty); + let state_smt = Smt::::default(); let transactions_trie = HashedPartialTrie::from(Node::Empty); let receipts_trie = HashedPartialTrie::from(Node::Empty); - let storage_tries = vec![]; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); // No transactions, so no trie roots change. let trie_roots_after = TrieRoots { - state_root: state_trie.hash(), + state_root: H256::from_uint(&hashout2u(state_smt.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; let mut initial_block_hashes = vec![H256::default(); 256]; - initial_block_hashes[255] = H256::from_uint(&0x200.into()); + initial_block_hashes[255] = H256::from_uint(&hashout2u(state_smt.root)); let inputs = GenerationInputs { signed_txn: None, withdrawals: vec![], tries: TrieInputs { - state_trie, + state_smt: state_smt.serialize(), transactions_trie, receipts_trie, - storage_tries, }, trie_roots_after, contract_code, - checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + checkpoint_state_trie_root: H256::from_uint(&hashout2u(state_smt.root)), block_metadata, txn_number_before: 0.into(), gas_used_before: 0.into(), gas_used_after: 0.into(), block_hashes: BlockHashes { - prev_hashes: initial_block_hashes, + prev_hashes: vec![H256::default(); 256], cur_hash: H256::default(), }, }; @@ -74,8 +75,8 @@ fn test_empty_txn_list() -> anyhow::Result<()> { // Initialize the preprocessed circuits for the zkEVM. let all_circuits = AllRecursiveCircuits::::new( &all_stark, - &[16..17, 9..11, 12..13, 14..15, 9..11, 12..13, 17..18], /* Minimal ranges to prove an - * empty list */ + // Minimal ranges to prove an empty list + &[16..17, 9..10, 12..13, 14..15, 9..10, 12..13, 17..18, 4..5], &config, ); diff --git a/evm_arithmetization/tests/erc20.rs b/evm_arithmetization/tests/erc20.rs index 609579af9..d8233068d 100644 --- a/evm_arithmetization/tests/erc20.rs +++ b/evm_arithmetization/tests/erc20.rs @@ -1,3 +1,4 @@ +use std::collections::HashMap; use std::str::FromStr; use std::time::Duration; @@ -10,12 +11,16 @@ use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -53,29 +58,30 @@ fn test_erc20() -> anyhow::Result<()> { let giver = hex!("e7f1725E7734CE288F8367e1Bb143E90bb3F0512"); let token = hex!("5FbDB2315678afecb367f032d93F642f64180aa3"); - let sender_state_key = keccak(sender); - let giver_state_key = keccak(giver); - let token_state_key = keccak(token); - - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let giver_nibbles = Nibbles::from_bytes_be(giver_state_key.as_bytes()).unwrap(); - let token_nibbles = Nibbles::from_bytes_be(token_state_key.as_bytes()).unwrap(); - - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account()).to_vec())?; - state_trie_before.insert(giver_nibbles, rlp::encode(&giver_account()?).to_vec())?; - state_trie_before.insert(token_nibbles, rlp::encode(&token_account()?).to_vec())?; - - let storage_tries = vec![ - (giver_state_key, giver_storage()?), - (token_state_key, token_storage()?), - ]; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account(), + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(giver), + &giver_account(), + &giver_storage(), + ); + set_account( + &mut state_smt_before, + H160(token), + &token_account(), + &token_storage(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: HashedPartialTrie::from(Node::Empty), receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries, }; let txn = signed_tx(); @@ -96,26 +102,32 @@ fn test_erc20() -> anyhow::Result<()> { }; let contract_code = [giver_bytecode(), token_bytecode(), vec![]] - .map(|v| (keccak(v.clone()), v)) + .map(|v| (hash_bytecode_u256(v.clone()), v)) .into(); - let expected_state_trie_after: HashedPartialTrie = { - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let expected_smt_after: Smt = { + let mut smt = Smt::default(); let sender_account = sender_account(); let sender_account_after = AccountRlp { nonce: sender_account.nonce + 1, balance: sender_account.balance - gas_used * 0xa, ..sender_account }; - state_trie_after.insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - state_trie_after.insert(giver_nibbles, rlp::encode(&giver_account()?).to_vec())?; - let token_account_after = AccountRlp { - storage_root: token_storage_after()?.hash(), - ..token_account()? - }; - state_trie_after.insert(token_nibbles, rlp::encode(&token_account_after).to_vec())?; - - state_trie_after + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(giver), &giver_account(), &giver_storage()); + set_account( + &mut smt, + H160(token), + &token_account(), + &token_storage_after(), + ); + + smt }; let receipt_0 = LegacyReceiptRlp { @@ -152,7 +164,7 @@ fn test_erc20() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -192,80 +204,68 @@ fn token_bytecode() -> Vec { hex!("608060405234801561001057600080fd5b50600436106100935760003560e01c8063313ce56711610066578063313ce567146100fe57806370a082311461010d57806395d89b4114610136578063a9059cbb1461013e578063dd62ed3e1461015157600080fd5b806306fdde0314610098578063095ea7b3146100b657806318160ddd146100d957806323b872dd146100eb575b600080fd5b6100a061018a565b6040516100ad919061056a565b60405180910390f35b6100c96100c43660046105d4565b61021c565b60405190151581526020016100ad565b6002545b6040519081526020016100ad565b6100c96100f93660046105fe565b610236565b604051601281526020016100ad565b6100dd61011b36600461063a565b6001600160a01b031660009081526020819052604090205490565b6100a061025a565b6100c961014c3660046105d4565b610269565b6100dd61015f36600461065c565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b6060600380546101999061068f565b80601f01602080910402602001604051908101604052809291908181526020018280546101c59061068f565b80156102125780601f106101e757610100808354040283529160200191610212565b820191906000526020600020905b8154815290600101906020018083116101f557829003601f168201915b5050505050905090565b60003361022a818585610277565b60019150505b92915050565b600033610244858285610289565b61024f85858561030c565b506001949350505050565b6060600480546101999061068f565b60003361022a81858561030c565b610284838383600161036b565b505050565b6001600160a01b03838116600090815260016020908152604080832093861683529290522054600019811461030657818110156102f757604051637dc7a0d960e11b81526001600160a01b038416600482015260248101829052604481018390526064015b60405180910390fd5b6103068484848403600061036b565b50505050565b6001600160a01b03831661033657604051634b637e8f60e11b8152600060048201526024016102ee565b6001600160a01b0382166103605760405163ec442f0560e01b8152600060048201526024016102ee565b610284838383610440565b6001600160a01b0384166103955760405163e602df0560e01b8152600060048201526024016102ee565b6001600160a01b0383166103bf57604051634a1406b160e11b8152600060048201526024016102ee565b6001600160a01b038085166000908152600160209081526040808320938716835292905220829055801561030657826001600160a01b0316846001600160a01b03167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9258460405161043291815260200190565b60405180910390a350505050565b6001600160a01b03831661046b57806002600082825461046091906106c9565b909155506104dd9050565b6001600160a01b038316600090815260208190526040902054818110156104be5760405163391434e360e21b81526001600160a01b038516600482015260248101829052604481018390526064016102ee565b6001600160a01b03841660009081526020819052604090209082900390555b6001600160a01b0382166104f957600280548290039055610518565b6001600160a01b03821660009081526020819052604090208054820190555b816001600160a01b0316836001600160a01b03167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161055d91815260200190565b60405180910390a3505050565b600060208083528351808285015260005b818110156105975785810183015185820160400152820161057b565b506000604082860101526040601f19601f8301168501019250505092915050565b80356001600160a01b03811681146105cf57600080fd5b919050565b600080604083850312156105e757600080fd5b6105f0836105b8565b946020939093013593505050565b60008060006060848603121561061357600080fd5b61061c846105b8565b925061062a602085016105b8565b9150604084013590509250925092565b60006020828403121561064c57600080fd5b610655826105b8565b9392505050565b6000806040838503121561066f57600080fd5b610678836105b8565b9150610686602084016105b8565b90509250929050565b600181811c908216806106a357607f821691505b6020821081036106c357634e487b7160e01b600052602260045260246000fd5b50919050565b8082018082111561023057634e487b7160e01b600052601160045260246000fdfea2646970667358221220266a323ae4a816f6c6342a5be431fedcc0d45c44b02ea75f5474eb450b5d45b364736f6c63430008140033").into() } -fn insert_storage(trie: &mut HashedPartialTrie, slot: U256, value: U256) -> anyhow::Result<()> { - let mut bytes = [0; 32]; - slot.to_big_endian(&mut bytes); - let key = keccak(bytes); - let nibbles = Nibbles::from_bytes_be(key.as_bytes()).unwrap(); - let r = rlp::encode(&value); - let r = r.freeze().to_vec(); - trie.insert(nibbles, r)?; - Ok(()) -} - fn sd2u(s: &str) -> U256 { U256::from_dec_str(s).unwrap() } -fn giver_storage() -> anyhow::Result { - let mut trie = HashedPartialTrie::from(Node::Empty); - insert_storage( - &mut trie, +fn giver_storage() -> HashMap { + let mut storage = HashMap::new(); + storage.insert( U256::zero(), sd2u("546584486846459126461364135121053344201067465379"), - )?; - Ok(trie) + ); + storage } -fn token_storage() -> anyhow::Result { - let mut trie = HashedPartialTrie::from(Node::Empty); - insert_storage( - &mut trie, +fn token_storage() -> HashMap { + let mut storage = HashMap::new(); + storage.insert( sd2u("82183438603287090451672504949863617512989139203883434767553028632841710582583"), sd2u("1000000000000000000000"), - )?; - Ok(trie) + ); + storage } -fn token_storage_after() -> anyhow::Result { - let mut trie = HashedPartialTrie::from(Node::Empty); - insert_storage( - &mut trie, +fn token_storage_after() -> HashMap { + let mut storage = HashMap::new(); + storage.insert( sd2u("82183438603287090451672504949863617512989139203883434767553028632841710582583"), sd2u("900000000000000000000"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sd2u("53006154680716014998529145169423020330606407246856709517064848190396281160729"), sd2u("100000000000000000000"), - )?; - Ok(trie) + ); + storage } -fn giver_account() -> anyhow::Result { - Ok(AccountRlp { +fn giver_account() -> AccountRlp { + let code = giver_bytecode(); + let len = code.len(); + AccountRlp { nonce: 1.into(), balance: 0.into(), - storage_root: giver_storage()?.hash(), - code_hash: keccak(giver_bytecode()), - }) + code_hash: hash_bytecode_u256(code), + code_length: len.into(), + } } -fn token_account() -> anyhow::Result { - Ok(AccountRlp { +fn token_account() -> AccountRlp { + let code = token_bytecode(); + let len = code.len(); + AccountRlp { nonce: 1.into(), balance: 0.into(), - storage_root: token_storage()?.hash(), - code_hash: keccak(token_bytecode()), - }) + code_hash: hash_bytecode_u256(code), + code_length: len.into(), + } } fn sender_account() -> AccountRlp { AccountRlp { nonce: 0.into(), balance: sd2u("10000000000000000000000"), - storage_root: Default::default(), - code_hash: keccak([]), + ..Default::default() } } @@ -284,3 +284,18 @@ fn bloom() -> [U256; 8] { .collect::>(); bloom.try_into().unwrap() } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/erc721.rs b/evm_arithmetization/tests/erc721.rs index 86dd34002..414729e4b 100644 --- a/evm_arithmetization/tests/erc721.rs +++ b/evm_arithmetization/tests/erc721.rs @@ -1,3 +1,4 @@ +use std::collections::HashMap; use std::str::FromStr; use std::time::Duration; @@ -16,6 +17,11 @@ use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::{hash_serialize, Smt}; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -55,23 +61,24 @@ fn test_erc721() -> anyhow::Result<()> { let owner = hex!("5B38Da6a701c568545dCfcB03FcB875f56beddC4"); let contract = hex!("f2B1114C644cBb3fF63Bf1dD284c8Cd716e95BE9"); - let owner_state_key = keccak(owner); - let contract_state_key = keccak(contract); - - let owner_nibbles = Nibbles::from_bytes_be(owner_state_key.as_bytes()).unwrap(); - let contract_nibbles = Nibbles::from_bytes_be(contract_state_key.as_bytes()).unwrap(); - - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert(owner_nibbles, rlp::encode(&owner_account()).to_vec())?; - state_trie_before.insert(contract_nibbles, rlp::encode(&contract_account()?).to_vec())?; - - let storage_tries = vec![(contract_state_key, contract_storage()?)]; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(owner), + &owner_account(), + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(contract), + &contract_account()?, + &contract_storage(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: HashedPartialTrie::from(Node::Empty), receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries, }; let txn = signed_tx(); @@ -79,28 +86,27 @@ fn test_erc721() -> anyhow::Result<()> { let gas_used = 58_418.into(); let contract_code = [contract_bytecode(), vec![]] - .map(|v| (keccak(v.clone()), v)) + .map(|v| (hash_bytecode_u256(v.clone()), v)) .into(); - let expected_state_trie_after: HashedPartialTrie = { - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let expected_state_smt_after = { + let mut smt = Smt::::default(); let owner_account = owner_account(); let owner_account_after = AccountRlp { nonce: owner_account.nonce + 1, balance: owner_account.balance - gas_used * 0xa, ..owner_account }; - state_trie_after.insert(owner_nibbles, rlp::encode(&owner_account_after).to_vec())?; - let contract_account_after = AccountRlp { - storage_root: contract_storage_after()?.hash(), - ..contract_account()? - }; - state_trie_after.insert( - contract_nibbles, - rlp::encode(&contract_account_after).to_vec(), - )?; - - state_trie_after + set_account(&mut smt, H160(owner), &owner_account_after, &HashMap::new()); + let contract_account_after = contract_account()?; + set_account( + &mut smt, + H160(contract), + &contract_account_after, + &contract_storage_after(), + ); + + smt }; let logs = vec![LogRlp { @@ -135,8 +141,10 @@ fn test_erc721() -> anyhow::Result<()> { } .into(); + hash_serialize(&expected_state_smt_after.serialize()); + dbg!("done"); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -191,18 +199,6 @@ fn contract_bytecode() -> Vec { hex!("608060405234801561000f575f80fd5b5060043610610109575f3560e01c8063715018a6116100a0578063a22cb4651161006f578063a22cb465146102a1578063b88d4fde146102bd578063c87b56dd146102d9578063e985e9c514610309578063f2fde38b1461033957610109565b8063715018a61461023f5780638da5cb5b1461024957806395d89b4114610267578063a14481941461028557610109565b806323b872dd116100dc57806323b872dd146101a757806342842e0e146101c35780636352211e146101df57806370a082311461020f57610109565b806301ffc9a71461010d57806306fdde031461013d578063081812fc1461015b578063095ea7b31461018b575b5f80fd5b61012760048036038101906101229190611855565b610355565b604051610134919061189a565b60405180910390f35b610145610436565b604051610152919061193d565b60405180910390f35b61017560048036038101906101709190611990565b6104c5565b60405161018291906119fa565b60405180910390f35b6101a560048036038101906101a09190611a3d565b6104e0565b005b6101c160048036038101906101bc9190611a7b565b6104f6565b005b6101dd60048036038101906101d89190611a7b565b6105f5565b005b6101f960048036038101906101f49190611990565b610614565b60405161020691906119fa565b60405180910390f35b61022960048036038101906102249190611acb565b610625565b6040516102369190611b05565b60405180910390f35b6102476106db565b005b6102516106ee565b60405161025e91906119fa565b60405180910390f35b61026f610716565b60405161027c919061193d565b60405180910390f35b61029f600480360381019061029a9190611a3d565b6107a6565b005b6102bb60048036038101906102b69190611b48565b6107bc565b005b6102d760048036038101906102d29190611cb2565b6107d2565b005b6102f360048036038101906102ee9190611990565b6107ef565b604051610300919061193d565b60405180910390f35b610323600480360381019061031e9190611d32565b610855565b604051610330919061189a565b60405180910390f35b610353600480360381019061034e9190611acb565b6108e3565b005b5f7f80ac58cd000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916827bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916148061041f57507f5b5e139f000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916827bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916145b8061042f575061042e82610967565b5b9050919050565b60605f805461044490611d9d565b80601f016020809104026020016040519081016040528092919081815260200182805461047090611d9d565b80156104bb5780601f10610492576101008083540402835291602001916104bb565b820191905f5260205f20905b81548152906001019060200180831161049e57829003601f168201915b5050505050905090565b5f6104cf826109d0565b506104d982610a56565b9050919050565b6104f282826104ed610a8f565b610a96565b5050565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610566575f6040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161055d91906119fa565b60405180910390fd5b5f6105798383610574610a8f565b610aa8565b90508373ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16146105ef578382826040517f64283d7b0000000000000000000000000000000000000000000000000000000081526004016105e693929190611dcd565b60405180910390fd5b50505050565b61060f83838360405180602001604052805f8152506107d2565b505050565b5f61061e826109d0565b9050919050565b5f8073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610696575f6040517f89c62b6400000000000000000000000000000000000000000000000000000000815260040161068d91906119fa565b60405180910390fd5b60035f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f20549050919050565b6106e3610cb3565b6106ec5f610d3a565b565b5f60065f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff16905090565b60606001805461072590611d9d565b80601f016020809104026020016040519081016040528092919081815260200182805461075190611d9d565b801561079c5780601f106107735761010080835404028352916020019161079c565b820191905f5260205f20905b81548152906001019060200180831161077f57829003601f168201915b5050505050905090565b6107ae610cb3565b6107b88282610dfd565b5050565b6107ce6107c7610a8f565b8383610e1a565b5050565b6107dd8484846104f6565b6107e984848484610f83565b50505050565b60606107fa826109d0565b505f610804611135565b90505f8151116108225760405180602001604052805f81525061084d565b8061082c8461114b565b60405160200161083d929190611e3c565b6040516020818303038152906040525b915050919050565b5f60055f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f9054906101000a900460ff16905092915050565b6108eb610cb3565b5f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff160361095b575f6040517f1e4fbdf700000000000000000000000000000000000000000000000000000000815260040161095291906119fa565b60405180910390fd5b61096481610d3a565b50565b5f7f01ffc9a7000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916827bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916149050919050565b5f806109db83611215565b90505f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1603610a4d57826040517f7e273289000000000000000000000000000000000000000000000000000000008152600401610a449190611b05565b60405180910390fd5b80915050919050565b5f60045f8381526020019081526020015f205f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050919050565b5f33905090565b610aa3838383600161124e565b505050565b5f80610ab384611215565b90505f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1614610af457610af381848661140d565b5b5f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614610b7f57610b335f855f8061124e565b600160035f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f82825403925050819055505b5f73ffffffffffffffffffffffffffffffffffffffff168573ffffffffffffffffffffffffffffffffffffffff1614610bfe57600160035f8773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f82825401925050819055505b8460025f8681526020019081526020015f205f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550838573ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef60405160405180910390a4809150509392505050565b610cbb610a8f565b73ffffffffffffffffffffffffffffffffffffffff16610cd96106ee565b73ffffffffffffffffffffffffffffffffffffffff1614610d3857610cfc610a8f565b6040517f118cdaa7000000000000000000000000000000000000000000000000000000008152600401610d2f91906119fa565b60405180910390fd5b565b5f60065f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690508160065f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff167f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e060405160405180910390a35050565b610e16828260405180602001604052805f8152506114d0565b5050565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610e8a57816040517f5b08ba18000000000000000000000000000000000000000000000000000000008152600401610e8191906119fa565b60405180910390fd5b8060055f8573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f6101000a81548160ff0219169083151502179055508173ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167f17307eab39ab6107e8899845ad3d59bd9653f200f220920489ca2b5937696c3183604051610f76919061189a565b60405180910390a3505050565b5f8373ffffffffffffffffffffffffffffffffffffffff163b111561112f578273ffffffffffffffffffffffffffffffffffffffff1663150b7a02610fc6610a8f565b8685856040518563ffffffff1660e01b8152600401610fe89493929190611eb1565b6020604051808303815f875af192505050801561102357506040513d601f19601f820116820180604052508101906110209190611f0f565b60015b6110a4573d805f8114611051576040519150601f19603f3d011682016040523d82523d5f602084013e611056565b606091505b505f81510361109c57836040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161109391906119fa565b60405180910390fd5b805181602001fd5b63150b7a0260e01b7bffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916817bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19161461112d57836040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161112491906119fa565b60405180910390fd5b505b50505050565b606060405180602001604052805f815250905090565b60605f6001611159846114eb565b0190505f8167ffffffffffffffff81111561117757611176611b8e565b5b6040519080825280601f01601f1916602001820160405280156111a95781602001600182028036833780820191505090505b5090505f82602001820190505b60011561120a578080600190039150507f3031323334353637383961626364656600000000000000000000000000000000600a86061a8153600a85816111ff576111fe611f3a565b5b0494505f85036111b6575b819350505050919050565b5f60025f8381526020019081526020015f205f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050919050565b808061128657505f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b156113b8575f611295846109d0565b90505f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff16141580156112ff57508273ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561131257506113108184610855565b155b1561135457826040517fa9fbf51f00000000000000000000000000000000000000000000000000000000815260040161134b91906119fa565b60405180910390fd5b81156113b657838573ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92560405160405180910390a45b505b8360045f8581526020019081526020015f205f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050505050565b61141883838361163c565b6114cb575f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff160361148c57806040517f7e2732890000000000000000000000000000000000000000000000000000000081526004016114839190611b05565b60405180910390fd5b81816040517f177e802f0000000000000000000000000000000000000000000000000000000081526004016114c2929190611f67565b60405180910390fd5b505050565b6114da83836116fc565b6114e65f848484610f83565b505050565b5f805f90507a184f03e93ff9f4daa797ed6e38ed64bf6a1f0100000000000000008310611547577a184f03e93ff9f4daa797ed6e38ed64bf6a1f010000000000000000838161153d5761153c611f3a565b5b0492506040810190505b6d04ee2d6d415b85acef81000000008310611584576d04ee2d6d415b85acef8100000000838161157a57611579611f3a565b5b0492506020810190505b662386f26fc1000083106115b357662386f26fc1000083816115a9576115a8611f3a565b5b0492506010810190505b6305f5e10083106115dc576305f5e10083816115d2576115d1611f3a565b5b0492506008810190505b61271083106116015761271083816115f7576115f6611f3a565b5b0492506004810190505b60648310611624576064838161161a57611619611f3a565b5b0492506002810190505b600a8310611633576001810190505b80915050919050565b5f8073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff16141580156116f357508273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff1614806116b457506116b38484610855565b5b806116f257508273ffffffffffffffffffffffffffffffffffffffff166116da83610a56565b73ffffffffffffffffffffffffffffffffffffffff16145b5b90509392505050565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff160361176c575f6040517f64a0ae9200000000000000000000000000000000000000000000000000000000815260040161176391906119fa565b60405180910390fd5b5f61177883835f610aa8565b90505f73ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16146117ea575f6040517f73c6ac6e0000000000000000000000000000000000000000000000000000000081526004016117e191906119fa565b60405180910390fd5b505050565b5f604051905090565b5f80fd5b5f80fd5b5f7fffffffff0000000000000000000000000000000000000000000000000000000082169050919050565b61183481611800565b811461183e575f80fd5b50565b5f8135905061184f8161182b565b92915050565b5f6020828403121561186a576118696117f8565b5b5f61187784828501611841565b91505092915050565b5f8115159050919050565b61189481611880565b82525050565b5f6020820190506118ad5f83018461188b565b92915050565b5f81519050919050565b5f82825260208201905092915050565b5f5b838110156118ea5780820151818401526020810190506118cf565b5f8484015250505050565b5f601f19601f8301169050919050565b5f61190f826118b3565b61191981856118bd565b93506119298185602086016118cd565b611932816118f5565b840191505092915050565b5f6020820190508181035f8301526119558184611905565b905092915050565b5f819050919050565b61196f8161195d565b8114611979575f80fd5b50565b5f8135905061198a81611966565b92915050565b5f602082840312156119a5576119a46117f8565b5b5f6119b28482850161197c565b91505092915050565b5f73ffffffffffffffffffffffffffffffffffffffff82169050919050565b5f6119e4826119bb565b9050919050565b6119f4816119da565b82525050565b5f602082019050611a0d5f8301846119eb565b92915050565b611a1c816119da565b8114611a26575f80fd5b50565b5f81359050611a3781611a13565b92915050565b5f8060408385031215611a5357611a526117f8565b5b5f611a6085828601611a29565b9250506020611a718582860161197c565b9150509250929050565b5f805f60608486031215611a9257611a916117f8565b5b5f611a9f86828701611a29565b9350506020611ab086828701611a29565b9250506040611ac18682870161197c565b9150509250925092565b5f60208284031215611ae057611adf6117f8565b5b5f611aed84828501611a29565b91505092915050565b611aff8161195d565b82525050565b5f602082019050611b185f830184611af6565b92915050565b611b2781611880565b8114611b31575f80fd5b50565b5f81359050611b4281611b1e565b92915050565b5f8060408385031215611b5e57611b5d6117f8565b5b5f611b6b85828601611a29565b9250506020611b7c85828601611b34565b9150509250929050565b5f80fd5b5f80fd5b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b611bc4826118f5565b810181811067ffffffffffffffff82111715611be357611be2611b8e565b5b80604052505050565b5f611bf56117ef565b9050611c018282611bbb565b919050565b5f67ffffffffffffffff821115611c2057611c1f611b8e565b5b611c29826118f5565b9050602081019050919050565b828183375f83830152505050565b5f611c56611c5184611c06565b611bec565b905082815260208101848484011115611c7257611c71611b8a565b5b611c7d848285611c36565b509392505050565b5f82601f830112611c9957611c98611b86565b5b8135611ca9848260208601611c44565b91505092915050565b5f805f8060808587031215611cca57611cc96117f8565b5b5f611cd787828801611a29565b9450506020611ce887828801611a29565b9350506040611cf98782880161197c565b925050606085013567ffffffffffffffff811115611d1a57611d196117fc565b5b611d2687828801611c85565b91505092959194509250565b5f8060408385031215611d4857611d476117f8565b5b5f611d5585828601611a29565b9250506020611d6685828601611a29565b9150509250929050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52602260045260245ffd5b5f6002820490506001821680611db457607f821691505b602082108103611dc757611dc6611d70565b5b50919050565b5f606082019050611de05f8301866119eb565b611ded6020830185611af6565b611dfa60408301846119eb565b949350505050565b5f81905092915050565b5f611e16826118b3565b611e208185611e02565b9350611e308185602086016118cd565b80840191505092915050565b5f611e478285611e0c565b9150611e538284611e0c565b91508190509392505050565b5f81519050919050565b5f82825260208201905092915050565b5f611e8382611e5f565b611e8d8185611e69565b9350611e9d8185602086016118cd565b611ea6816118f5565b840191505092915050565b5f608082019050611ec45f8301876119eb565b611ed160208301866119eb565b611ede6040830185611af6565b8181036060830152611ef08184611e79565b905095945050505050565b5f81519050611f098161182b565b92915050565b5f60208284031215611f2457611f236117f8565b5b5f611f3184828501611efb565b91505092915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601260045260245ffd5b5f604082019050611f7a5f8301856119eb565b611f876020830184611af6565b939250505056fea2646970667358221220432b30673e00c0eb009e1718c271f4cfdfbeded17345829703b06d322360990164736f6c63430008160033").into() } -fn insert_storage(trie: &mut HashedPartialTrie, slot: U256, value: U256) -> anyhow::Result<()> { - let mut bytes = [0; 32]; - slot.to_big_endian(&mut bytes); - let key = keccak(bytes); - let nibbles = Nibbles::from_bytes_be(key.as_bytes()).unwrap(); - let r = rlp::encode(&value); - let r = r.freeze().to_vec(); - trie.insert(nibbles, r)?; - - Ok(()) -} - fn sd2u(s: &str) -> U256 { U256::from_dec_str(s).unwrap() } @@ -211,72 +207,61 @@ fn sh2u(s: &str) -> U256 { U256::from_str_radix(s, 16).unwrap() } -fn contract_storage() -> anyhow::Result { - let mut trie = HashedPartialTrie::from(Node::Empty); - insert_storage( - &mut trie, +fn contract_storage() -> HashMap { + let mut storage = HashMap::new(); + storage.insert( U256::zero(), sh2u("0x54657374546f6b656e0000000000000000000000000000000000000000000012"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( U256::one(), sh2u("0x5445535400000000000000000000000000000000000000000000000000000008"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sd2u("6"), sh2u("0x5b38da6a701c568545dcfcb03fcb875f56beddc4"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sh2u("0x343ff8127bd64f680be4e996254dc3528603c6ecd54364b4cf956ebdd28f0028"), sh2u("0x5b38da6a701c568545dcfcb03fcb875f56beddc4"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sh2u("0x118c1ea466562cb796e30ef705e4db752f5c39d773d22c5efd8d46f67194e78a"), sd2u("1"), - )?; - Ok(trie) + ); + storage } -fn contract_storage_after() -> anyhow::Result { - let mut trie = HashedPartialTrie::from(Node::Empty); - insert_storage( - &mut trie, +fn contract_storage_after() -> HashMap { + let mut storage = HashMap::new(); + storage.insert( U256::zero(), sh2u("0x54657374546f6b656e0000000000000000000000000000000000000000000012"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( U256::one(), sh2u("0x5445535400000000000000000000000000000000000000000000000000000008"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sd2u("6"), sh2u("0x5b38da6a701c568545dcfcb03fcb875f56beddc4"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sh2u("0x343ff8127bd64f680be4e996254dc3528603c6ecd54364b4cf956ebdd28f0028"), sh2u("0xab8483f64d9c6d1ecf9b849ae677dd3315835cb2"), - )?; - insert_storage( - &mut trie, + ); + storage.insert( sh2u("0xf3aa6a8a9f7e3707e36cc99c499a27514922afe861ec3d80a1a314409cba92f9"), sd2u("1"), - )?; - Ok(trie) + ); + storage } fn owner_account() -> AccountRlp { AccountRlp { nonce: 2.into(), balance: 0x1000000.into(), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak([]), + ..Default::default() } } @@ -284,8 +269,8 @@ fn contract_account() -> anyhow::Result { Ok(AccountRlp { nonce: 0.into(), balance: 0.into(), - storage_root: contract_storage()?.hash(), - code_hash: keccak(contract_bytecode()), + code_hash: hash_bytecode_u256(contract_bytecode()), + ..Default::default() }) } @@ -313,3 +298,18 @@ fn add_to_bloom(bloom: &mut [u8; 256], bloom_entry: &[u8]) { bloom[byte_index as usize] |= bit_value; } } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/log_opcode.rs b/evm_arithmetization/tests/log_opcode.rs index 75cdd44f5..2a57745fc 100644 --- a/evm_arithmetization/tests/log_opcode.rs +++ b/evm_arithmetization/tests/log_opcode.rs @@ -4,16 +4,13 @@ use std::time::Duration; use bytes::Bytes; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, BigEndianHash, H256, U256}; -use evm_arithmetization::generation::mpt::transaction_testing::{ - AddressOption, LegacyTransactionRlp, -}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; -use evm_arithmetization::{AllRecursiveCircuits, AllStark, Node, StarkConfig}; +use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; @@ -21,6 +18,10 @@ use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; type F = GoldilocksField; const D: usize = 2; @@ -64,7 +65,7 @@ fn test_log_opcodes() -> anyhow::Result<()> { ; let gas_used = 21_000 + code_gas; - let code_hash = keccak(code); + let code_hash = hash_bytecode_u256(code.to_vec()); // Set accounts before the transaction. let beneficiary_account_before = AccountRlp { @@ -84,13 +85,25 @@ fn test_log_opcodes() -> anyhow::Result<()> { }; // Initialize the state trie with three accounts. - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_before).to_vec(), - )?; - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); // We now add two receipts with logs and data. This updates the receipt trie as // well. @@ -122,10 +135,9 @@ fn test_log_opcodes() -> anyhow::Result<()> { )?; let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: receipts_trie.clone(), - storage_tries: vec![(to_hashed, Node::Empty.into())], }; // Prove a transaction which carries out two LOG opcodes. @@ -146,7 +158,7 @@ fn test_log_opcodes() -> anyhow::Result<()> { }; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); contract_code.insert(code_hash, code.to_vec()); // Update the state and receipt tries after the transaction, so that we have the @@ -253,532 +265,613 @@ fn test_log_opcodes() -> anyhow::Result<()> { verify_proof(&all_stark, proof, &config) } +// TODO: fix // Tests proving two transactions, one of which with logs, and aggregating them. -#[test] -#[ignore] // Too slow to run on CI. -fn test_log_with_aggreg() -> anyhow::Result<()> { - init_logger(); - - let code = [ - 0x64, 0xA1, 0xB2, 0xC3, 0xD4, 0xE5, 0x60, 0x0, 0x52, // MSTORE(0x0, 0xA1B2C3D4E5) - 0x60, 0x0, 0x60, 0x0, 0xA0, // LOG0(0x0, 0x0) - 0x60, 99, 0x60, 98, 0x60, 5, 0x60, 27, 0xA2, // LOG2(27, 5, 98, 99) - 0x00, - ]; - - let code_gas = 3 + 3 + 3 // PUSHs and MSTORE - + 3 + 3 + 375 // PUSHs and LOG0 - + 3 + 3 + 3 + 3 + 375 + 375*2 + 8*5 // PUSHs and LOG2 - + 3 // Memory expansion - ; - - let gas_used = 21_000 + code_gas; - - let code_hash = keccak(code); - - // First transaction. - let all_stark = AllStark::::default(); - let config = StarkConfig::standard_fast_config(); - - let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); - let sender_first = hex!("af1276cbb260bb13deddb4209ae99ae6e497f446"); - let to_first = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552e89"); - - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender_first); - let to_hashed = keccak(to_first); - let to_hashed_2 = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - let to_second_nibbles = Nibbles::from_bytes_be(to_hashed_2.as_bytes()).unwrap(); - - let beneficiary_account_before = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - let sender_balance_before = 1000000000000000000u64.into(); - let sender_account_before = AccountRlp { - balance: sender_balance_before, - ..AccountRlp::default() - }; - let to_account_before = AccountRlp { - ..AccountRlp::default() - }; - let to_account_second_before = AccountRlp { - code_hash, - ..AccountRlp::default() - }; - - // In the first transaction, the sender account sends `txn_value` to - // `to_account`. - let gas_price = 10; - let txn_value = 0xau64; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_before).to_vec(), - )?; - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; - state_trie_before.insert( - to_second_nibbles, - rlp::encode(&to_account_second_before).to_vec(), - )?; - let checkpoint_state_trie_root = state_trie_before.hash(); - - let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries: vec![], - }; - - let txn = hex!("f85f800a82520894095e7baea6a6c7c4c2dfeb977efac326af552d870a8026a0122f370ed4023a6c253350c6bfb87d7d7eb2cd86447befee99e0a26b70baec20a07100ab1b3977f2b4571202b9f4b68850858caf5469222794600b5ce1cfb348ad"); - - let block_1_metadata = BlockMetadata { - block_beneficiary: Address::from(beneficiary), - block_timestamp: 0x03e8.into(), - block_number: 1.into(), - block_difficulty: 0x020000.into(), - block_gaslimit: 0x445566u32.into(), - block_chain_id: 1.into(), - block_base_fee: 0xa.into(), - block_gas_used: (22570 + 21000).into(), - block_bloom: [ - 0.into(), - 0.into(), - U256::from_dec_str( - "55213970774324510299479508399853534522527075462195808724319849722937344", - ) - .unwrap(), - U256::from_dec_str("1361129467683753853853498429727072845824").unwrap(), - 33554432.into(), - U256::from_dec_str("9223372036854775808").unwrap(), - U256::from_dec_str( - "3618502788666131106986593281521497120414687020801267626233049500247285563392", - ) - .unwrap(), - U256::from_dec_str("2722259584404615024560450425766186844160").unwrap(), - ], - block_random: Default::default(), - }; - - let beneficiary_account_after = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - - let sender_balance_after = sender_balance_before - gas_price * 21000 - txn_value; - let sender_account_after = AccountRlp { - balance: sender_balance_after, - nonce: 1.into(), - ..AccountRlp::default() - }; - let to_account_after = AccountRlp { - balance: txn_value.into(), - ..AccountRlp::default() - }; - - let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); - contract_code.insert(code_hash, code.to_vec()); - - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - )?; - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; - expected_state_trie_after.insert( - to_second_nibbles, - rlp::encode(&to_account_second_before).to_vec(), - )?; - - // Compute new receipt trie. - let mut receipts_trie = HashedPartialTrie::from(Node::Empty); - let receipt_0 = LegacyReceiptRlp { - status: true, - cum_gas_used: 21000u64.into(), - bloom: [0x00; 256].to_vec().into(), - logs: vec![], - }; - receipts_trie.insert( - Nibbles::from_str("0x80").unwrap(), - rlp::encode(&receipt_0).to_vec(), - )?; - - let mut transactions_trie: HashedPartialTrie = Node::Leaf { - nibbles: Nibbles::from_str("0x80").unwrap(), - value: txn.to_vec(), - } - .into(); - - let tries_after = TrieRoots { - state_root: expected_state_trie_after.hash(), - transactions_root: transactions_trie.hash(), - receipts_root: receipts_trie.clone().hash(), - }; - - let block_1_hash = - H256::from_str("0x0101010101010101010101010101010101010101010101010101010101010101")?; - let mut block_hashes = vec![H256::default(); 256]; - - let inputs_first = GenerationInputs { - signed_txn: Some(txn.to_vec()), - withdrawals: vec![], - tries: tries_before, - trie_roots_after: tries_after, - contract_code, - checkpoint_state_trie_root, - block_metadata: block_1_metadata.clone(), - txn_number_before: 0.into(), - gas_used_before: 0.into(), - gas_used_after: 21000u64.into(), - block_hashes: BlockHashes { - prev_hashes: block_hashes.clone(), - cur_hash: block_1_hash, - }, - }; - - // Preprocess all circuits. - let all_circuits = AllRecursiveCircuits::::new( - &all_stark, - &[16..17, 12..15, 14..18, 14..15, 9..10, 12..13, 17..20], - &config, - ); - - let mut timing = TimingTree::new("prove root first", log::Level::Info); - let (root_proof_first, public_values_first) = - all_circuits.prove_root(&all_stark, &config, inputs_first, &mut timing, None)?; - - timing.filter(Duration::from_millis(100)).print(); - all_circuits.verify_root(root_proof_first.clone())?; - - // The gas used and transaction number are fed to the next transaction, so the - // two proofs can be correctly aggregated. - let gas_used_second = public_values_first.extra_block_data.gas_used_after; - - // Prove second transaction. In this second transaction, the code with logs is - // executed. - - let state_trie_before = expected_state_trie_after; - - let tries_before = TrieInputs { - state_trie: state_trie_before, - transactions_trie: transactions_trie.clone(), - receipts_trie: receipts_trie.clone(), - storage_tries: vec![], - }; - - // Prove a transaction which carries out two LOG opcodes. - let txn_gas_price = 10; - let txn_2 = hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b"); - - let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); - contract_code.insert(code_hash, code.to_vec()); - - // Update the state and receipt tries after the transaction, so that we have the - // correct expected tries: Update accounts. - let beneficiary_account_after = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - - let sender_balance_after = sender_balance_after - gas_used * txn_gas_price; - let sender_account_after = AccountRlp { - balance: sender_balance_after, - nonce: 2.into(), - ..AccountRlp::default() - }; - let balance_after = to_account_after.balance; - let to_account_after = AccountRlp { - balance: balance_after, - ..AccountRlp::default() - }; - let to_account_second_after = AccountRlp { - balance: to_account_second_before.balance, - code_hash, - ..AccountRlp::default() - }; - - // Update the receipt trie. - let first_log = LogRlp { - address: to.into(), - topics: vec![], - data: Bytes::new(), - }; - - let second_log = LogRlp { - address: to.into(), - topics: vec![ - hex!("0000000000000000000000000000000000000000000000000000000000000062").into(), /* dec: 98 */ - hex!("0000000000000000000000000000000000000000000000000000000000000063").into(), /* dec: 99 */ - ], - data: hex!("a1b2c3d4e5").to_vec().into(), - }; - - let receipt = LegacyReceiptRlp { - status: true, - cum_gas_used: (22570 + 21000).into(), - bloom: hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000001000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000800000000000000008000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000800002000000000000000000000000000").to_vec().into(), - logs: vec![first_log, second_log], - }; - - let receipt_nibbles = Nibbles::from_str("0x01").unwrap(); // RLP(1) = 0x1 - - receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec())?; - - // Update the state trie. - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - )?; - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; - expected_state_trie_after.insert( - to_second_nibbles, - rlp::encode(&to_account_second_after).to_vec(), - )?; - - transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), txn_2.to_vec())?; - - let block_1_state_root = expected_state_trie_after.hash(); - - let trie_roots_after = TrieRoots { - state_root: block_1_state_root, - transactions_root: transactions_trie.hash(), - receipts_root: receipts_trie.hash(), - }; - - let inputs = GenerationInputs { - signed_txn: Some(txn_2.to_vec()), - withdrawals: vec![], - tries: tries_before, - trie_roots_after: trie_roots_after.clone(), - contract_code, - checkpoint_state_trie_root, - block_metadata: block_1_metadata, - txn_number_before: 1.into(), - gas_used_before: gas_used_second, - gas_used_after: receipt.cum_gas_used, - block_hashes: BlockHashes { - prev_hashes: block_hashes.clone(), - cur_hash: block_1_hash, - }, - }; - - let mut timing = TimingTree::new("prove root second", log::Level::Info); - let (root_proof_second, public_values_second) = - all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, None.clone())?; - timing.filter(Duration::from_millis(100)).print(); - - all_circuits.verify_root(root_proof_second.clone())?; - - let (agg_proof, updated_agg_public_values) = all_circuits.prove_aggregation( - false, - &root_proof_first, - public_values_first, - false, - &root_proof_second, - public_values_second, - )?; - all_circuits.verify_aggregation(&agg_proof)?; - let (first_block_proof, _block_public_values) = - all_circuits.prove_block(None, &agg_proof, updated_agg_public_values)?; - all_circuits.verify_block(&first_block_proof)?; - - // Prove the next, empty block. - - let block_2_hash = - H256::from_str("0x0123456789101112131415161718192021222324252627282930313233343536")?; - block_hashes[255] = block_1_hash; - - let block_2_metadata = BlockMetadata { - block_beneficiary: Address::from(beneficiary), - block_timestamp: 0x03e8.into(), - block_number: 2.into(), - block_difficulty: 0x020000.into(), - block_gaslimit: 0x445566u32.into(), - block_chain_id: 1.into(), - block_base_fee: 0xa.into(), - ..Default::default() - }; - - let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); - - let inputs = GenerationInputs { - signed_txn: None, - withdrawals: vec![], - tries: TrieInputs { - state_trie: expected_state_trie_after, - transactions_trie: Node::Empty.into(), - receipts_trie: Node::Empty.into(), - storage_tries: vec![], - }, - trie_roots_after: TrieRoots { - state_root: trie_roots_after.state_root, - transactions_root: HashedPartialTrie::from(Node::Empty).hash(), - receipts_root: HashedPartialTrie::from(Node::Empty).hash(), - }, - contract_code, - checkpoint_state_trie_root: block_1_state_root, // We use block 1 as new checkpoint. - block_metadata: block_2_metadata, - txn_number_before: 0.into(), - gas_used_before: 0.into(), - gas_used_after: 0.into(), - block_hashes: BlockHashes { - prev_hashes: block_hashes, - cur_hash: block_2_hash, - }, - }; - - let (root_proof, public_values) = - all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, None)?; - all_circuits.verify_root(root_proof.clone())?; - - // We can just duplicate the initial proof as the state didn't change. - let (agg_proof, updated_agg_public_values) = all_circuits.prove_aggregation( - false, - &root_proof, - public_values.clone(), - false, - &root_proof, - public_values, - )?; - all_circuits.verify_aggregation(&agg_proof)?; - - let (second_block_proof, _block_public_values) = all_circuits.prove_block( - None, // We don't specify a previous proof, considering block 1 as the new checkpoint. - &agg_proof, - updated_agg_public_values, - )?; - all_circuits.verify_block(&second_block_proof) -} - -/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 -#[test] -fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> { - // This test checks that inserting into the transaction and receipt - // `HashedPartialTrie`s works as expected. - let mut example_txn_trie = HashedPartialTrie::from(Node::Empty); - - // We consider two transactions, with one log each. - let transaction_0 = LegacyTransactionRlp { - nonce: 157823u64.into(), - gas_price: 1000000000u64.into(), - gas: 250000u64.into(), - to: AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), - value: 0u64.into(), - data: hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000bd9fe6f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") - .to_vec() - .into(), - v: 0x1c.into(), - r: hex!("d0eeac4841caf7a894dd79e6e633efc2380553cdf8b786d1aa0b8a8dee0266f4").into(), - s: hex!("740710eed9696c663510b7fb71a553112551121595a54ec6d2ec0afcec72a973").into(), - }; - - // Insert the first transaction into the transaction trie. - example_txn_trie.insert( - Nibbles::from_str("0x80").unwrap(), // RLP(0) = 0x80 - rlp::encode(&transaction_0).to_vec(), - )?; - - let transaction_1 = LegacyTransactionRlp { - nonce: 157824u64.into(), - gas_price: 1000000000u64.into(), - gas: 250000u64.into(), - to: AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), - value: 0u64.into(), - data: hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000004920eaa814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .to_vec() - .into(), - v: 0x1b.into(), - r: hex!("a3ff39967683fc684dc7b857d6f62723e78804a14b091a058ad95cc1b8a0281f").into(), - s: hex!("51b156e05f21f499fa1ae47ebf536b15a237208f1d4a62e33956b6b03cf47742").into(), - }; - - // Insert the second transaction into the transaction trie. - example_txn_trie.insert( - Nibbles::from_str("0x01").unwrap(), - rlp::encode(&transaction_1).to_vec(), - )?; - - // Receipts: - let mut example_receipt_trie = HashedPartialTrie::from(Node::Empty); - - let log_0 = LogRlp { - address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), - topics: vec![ - hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(), - hex!("000000000000000000000000000000000000000000000000000000000000002a").into(), - hex!("0000000000000000000000000000000000000000000000000000000000bd9fe6").into(), - ], - data: hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") - .to_vec() - .into(), - }; - - let receipt_0 = LegacyReceiptRlp { - status: true, - cum_gas_used: 0x016e5bu64.into(), - bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000020000000000008000000000000000000000000").to_vec().into(), - logs: vec![log_0], - }; - - // Insert the first receipt into the receipt trie. - example_receipt_trie.insert( - Nibbles::from_str("0x80").unwrap(), // RLP(0) is 0x80 - rlp::encode(&receipt_0).to_vec(), - )?; - - let log_1 = LogRlp { - address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), - topics: vec![ - hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(), - hex!("0000000000000000000000000000000000000000000000000000000000000004").into(), - hex!("00000000000000000000000000000000000000000000000000000000004920ea").into(), - ], - data: hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") - .to_vec() - .into(), - }; - - let receipt_1 = LegacyReceiptRlp { - status: true, - cum_gas_used: 0x02dcb6u64.into(), - bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000").to_vec().into(), - logs: vec![log_1], - }; - - // Insert the second receipt into the receipt trie. - example_receipt_trie.insert( - Nibbles::from_str("0x01").unwrap(), - rlp::encode(&receipt_1).to_vec(), - )?; - - // Check that the trie hashes are correct. - assert_eq!( - example_txn_trie.hash(), - hex!("3ab7120d12e1fc07303508542602beb7eecfe8f262b83fd71eefe7d6205242ce").into() - ); - - assert_eq!( - example_receipt_trie.hash(), - hex!("da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98").into() - ); - - Ok(()) -} +// #[test] +// #[ignore] // Too slow to run on CI. +// fn test_log_with_aggreg() -> anyhow::Result<()> { +// init_logger(); +// +// let code = [ +// 0x64, 0xA1, 0xB2, 0xC3, 0xD4, 0xE5, 0x60, 0x0, 0x52, // MSTORE(0x0, +// 0xA1B2C3D4E5) 0x60, 0x0, 0x60, 0x0, 0xA0, // LOG0(0x0, 0x0) +// 0x60, 99, 0x60, 98, 0x60, 5, 0x60, 27, 0xA2, // LOG2(27, 5, 98, 99) +// 0x00, +// ]; +// +// let code_gas = 3 + 3 + 3 // PUSHs and MSTORE +// + 3 + 3 + 375 // PUSHs and LOG0 +// + 3 + 3 + 3 + 3 + 375 + 375*2 + 8*5 // PUSHs and LOG2 +// + 3 // Memory expansion +// ; +// +// let gas_used = 21_000 + code_gas; +// +// let code_hash = hashout2u(hash_contract_bytecode(code.to_vec())); +// +// // First transaction. +// let all_stark = AllStark::::default(); +// let config = StarkConfig::standard_fast_config(); +// +// let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); +// let sender_first = hex!("af1276cbb260bb13deddb4209ae99ae6e497f446"); +// let to_first = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); +// let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552e89"); +// +// let beneficiary_state_key = keccak(beneficiary); +// let sender_state_key = keccak(sender_first); +// let to_hashed = keccak(to_first); +// let to_hashed_2 = keccak(to); +// +// let beneficiary_nibbles = +// Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); +// let sender_nibbles = +// Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); +// let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); +// let to_second_nibbles = +// Nibbles::from_bytes_be(to_hashed_2.as_bytes()).unwrap(); +// +// let beneficiary_account_before = AccountRlp { +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let sender_balance_before = 1000000000000000000u64.into(); +// let sender_account_before = AccountRlp { +// balance: sender_balance_before, +// ..AccountRlp::default() +// }; +// let to_account_before = AccountRlp { +// ..AccountRlp::default() +// }; +// let to_account_second_before = AccountRlp { +// code_hash, +// ..AccountRlp::default() +// }; +// +// // In the first transaction, the sender account sends `txn_value` to +// `to_account`. let gas_price = 10; +// let txn_value = 0xau64; +// let mut state_smt_before = Smt::::default(); +// set_account( +// &mut state_smt_before, +// H160(beneficiary), +// &beneficiary_account_before, +// &HashMap::new(), +// ); +// set_account( +// &mut state_smt_before, +// H160(sender_first), +// &sender_account_before, +// &HashMap::new(), +// ); +// set_account( +// &mut state_smt_before, +// H160(to_first), +// &to_account_before, +// &HashMap::new(), +// ); +// set_account( +// &mut state_smt_before, +// H160(to), +// &to_account_second_before, +// &HashMap::new(), +// ); +// let checkpoint_state_trie_root = +// H256::from_uint(&hashout2u(state_smt_before.root)); +// +// let tries_before = TrieInputs { +// state_smt: state_smt_before.serialize(), +// transactions_trie: Node::Empty.into(), +// receipts_trie: Node::Empty.into(), +// }; +// +// let txn = +// hex!("f85f800a82520894095e7baea6a6c7c4c2dfeb977efac326af552d870a8026a0122f370ed4023a6c253350c6bfb87d7d7eb2cd86447befee99e0a26b70baec20a07100ab1b3977f2b4571202b9f4b68850858caf5469222794600b5ce1cfb348ad" +// ); +// +// let block_1_metadata = BlockMetadata { +// block_beneficiary: Address::from(beneficiary), +// block_timestamp: 0x03e8.into(), +// block_number: 1.into(), +// block_difficulty: 0x020000.into(), +// block_gaslimit: 0x445566u32.into(), +// block_chain_id: 1.into(), +// block_base_fee: 0xa.into(), +// block_gas_used: (22570 + 21000).into(), +// block_bloom: [ +// 0.into(), +// 0.into(), +// U256::from_dec_str( +// +// "55213970774324510299479508399853534522527075462195808724319849722937344", +// ) +// .unwrap(), +// +// U256::from_dec_str("1361129467683753853853498429727072845824").unwrap(), +// 33554432.into(), +// U256::from_dec_str("9223372036854775808").unwrap(), +// U256::from_dec_str( +// +// "3618502788666131106986593281521497120414687020801267626233049500247285563392" +// , ) +// .unwrap(), +// +// U256::from_dec_str("2722259584404615024560450425766186844160").unwrap(), +// ], +// block_random: Default::default(), +// }; +// +// let beneficiary_account_after = AccountRlp { +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// +// let sender_balance_after = sender_balance_before - gas_price * 21000 - +// txn_value; let sender_account_after = AccountRlp { +// balance: sender_balance_after, +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// let to_account_after = AccountRlp { +// balance: txn_value.into(), +// ..AccountRlp::default() +// }; +// +// let mut contract_code = HashMap::new(); +// contract_code.insert(hashout2u(hash_contract_bytecode(vec![])), vec![]); +// contract_code.insert(code_hash, code.to_vec()); +// +// let mut expected_state_trie_after = Smt::::default(); +// set_account( +// &mut expected_state_trie_after, +// H160(beneficiary), +// &beneficiary_account_after, +// &HashMap::new(), +// ); +// set_account( +// &mut expected_state_trie_after, +// H160(sender_first), +// &sender_account_after, +// &HashMap::new(), +// ); +// set_account( +// &mut expected_state_trie_after, +// H160(to_first), +// &to_account_after, +// &HashMap::new(), +// ); +// set_account( +// &mut expected_state_trie_after, +// H160(to), +// &to_account_second_before, +// &HashMap::new(), +// ); +// +// // Compute new receipt trie. +// let mut receipts_trie = HashedPartialTrie::from(Node::Empty); +// let receipt_0 = LegacyReceiptRlp { +// status: true, +// cum_gas_used: 21000u64.into(), +// bloom: [0x00; 256].to_vec().into(), +// logs: vec![], +// }; +// receipts_trie.insert( +// Nibbles::from_str("0x80").unwrap(), +// rlp::encode(&receipt_0).to_vec(), +// ); +// +// let mut transactions_trie: HashedPartialTrie = Node::Leaf { +// nibbles: Nibbles::from_str("0x80").unwrap(), +// value: txn.to_vec(), +// } +// .into(); +// +// let tries_after = TrieRoots { +// state_root: +// H256::from_uint(&hashout2u(expected_state_smt_after.root)), +// transactions_root: transactions_trie.hash(), +// receipts_root: receipts_trie.clone().hash(), +// }; +// +// let block_1_hash = +// H256::from_str(" +// 0x0101010101010101010101010101010101010101010101010101010101010101")?; +// let mut block_hashes = vec![H256::default(); 256]; +// +// let inputs_first = GenerationInputs { +// signed_txn: Some(txn.to_vec()), +// withdrawals: vec![], +// tries: tries_before, +// trie_roots_after: tries_after, +// contract_code, +// checkpoint_state_trie_root, +// block_metadata: block_1_metadata.clone(), +// txn_number_before: 0.into(), +// gas_used_before: 0.into(), +// gas_used_after: 21000u64.into(), +// block_hashes: BlockHashes { +// prev_hashes: block_hashes.clone(), +// cur_hash: block_1_hash, +// }, +// }; +// +// // Preprocess all circuits. +// let all_circuits = AllRecursiveCircuits::::new( +// &all_stark, +// &[16..17, 12..15, 14..18, 14..15, 9..10, 12..13, 17..20], +// &config, +// ); +// +// let mut timing = TimingTree::new("prove root first", log::Level::Info); +// let (root_proof_first, public_values_first) = +// all_circuits.prove_root(&all_stark, &config, inputs_first, &mut +// timing, None)?; +// +// timing.filter(Duration::from_millis(100)).print(); +// all_circuits.verify_root(root_proof_first.clone())?; +// +// // The gas used and transaction number are fed to the next transaction, +// so the two proofs can be correctly aggregated. let gas_used_second = +// public_values_first.extra_block_data.gas_used_after; +// +// // Prove second transaction. In this second transaction, the code with +// logs is executed. +// +// let state_trie_before = expected_state_trie_after; +// +// let tries_before = TrieInputs { +// state_smt: state_trie_before.serialize(), +// transactions_trie: transactions_trie.clone(), +// receipts_trie: receipts_trie.clone(), +// }; +// +// // Prove a transaction which carries out two LOG opcodes. +// let txn_gas_price = 10; +// let txn_2 = +// hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b" +// ); +// +// let mut contract_code = HashMap::new(); +// contract_code.insert(keccak(vec![]), vec![]); +// contract_code.insert(code_hash, code.to_vec()); +// +// // Update the state and receipt tries after the transaction, so that we +// have the correct expected tries: // Update accounts. +// let beneficiary_account_after = AccountRlp { +// nonce: 1.into(), +// ..AccountRlp::default() +// }; +// +// let sender_balance_after = sender_balance_after - gas_used * +// txn_gas_price; let sender_account_after = AccountRlp { +// balance: sender_balance_after, +// nonce: 2.into(), +// ..AccountRlp::default() +// }; +// let balance_after = to_account_after.balance; +// let to_account_after = AccountRlp { +// balance: balance_after, +// ..AccountRlp::default() +// }; +// let to_account_second_after = AccountRlp { +// balance: to_account_second_before.balance, +// code_hash, +// ..AccountRlp::default() +// }; +// +// // Update the receipt trie. +// let first_log = LogRlp { +// address: to.into(), +// topics: vec![], +// data: Bytes::new(), +// }; +// +// let second_log = LogRlp { +// address: to.into(), +// topics: vec![ +// +// hex!("0000000000000000000000000000000000000000000000000000000000000062"). +// into(), // dec: 98 +// hex!("0000000000000000000000000000000000000000000000000000000000000063"). +// into(), // dec: 99 ], +// data: hex!("a1b2c3d4e5").to_vec().into(), +// }; +// +// let receipt = LegacyReceiptRlp { +// status: true, +// cum_gas_used: (22570 + 21000).into(), +// bloom: +// hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000001000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000800000000000000008000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000800002000000000000000000000000000" +// ).to_vec().into(), logs: vec![first_log, second_log], +// }; +// +// let receipt_nibbles = Nibbles::from_str("0x01").unwrap(); // RLP(1) = 0x1 +// +// receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec()); +// +// // Update the state trie. +// let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); +// expected_state_trie_after.insert( +// beneficiary_nibbles, +// rlp::encode(&beneficiary_account_after).to_vec(), +// ); +// expected_state_trie_after.insert(sender_nibbles, +// rlp::encode(&sender_account_after).to_vec()); expected_state_trie_after. +// insert(to_nibbles, rlp::encode(&to_account_after).to_vec()); +// expected_state_trie_after.insert( +// to_second_nibbles, +// rlp::encode(&to_account_second_after).to_vec(), +// ); +// +// transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), +// txn_2.to_vec()); +// +// let block_1_state_root = expected_state_trie_after.hash(); +// +// let trie_roots_after = TrieRoots { +// state_root: block_1_state_root, +// transactions_root: transactions_trie.hash(), +// receipts_root: receipts_trie.hash(), +// }; +// +// let inputs = GenerationInputs { +// signed_txn: Some(txn_2.to_vec()), +// withdrawals: vec![], +// tries: tries_before, +// trie_roots_after: trie_roots_after.clone(), +// contract_code, +// checkpoint_state_trie_root, +// block_metadata: block_1_metadata, +// txn_number_before: 1.into(), +// gas_used_before: gas_used_second, +// gas_used_after: receipt.cum_gas_used, +// block_hashes: BlockHashes { +// prev_hashes: block_hashes.clone(), +// cur_hash: block_1_hash, +// }, +// }; +// +// let mut timing = TimingTree::new("prove root second", log::Level::Info); +// let (root_proof_second, public_values_second) = +// all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, +// None.clone())?; timing.filter(Duration::from_millis(100)).print(); +// +// all_circuits.verify_root(root_proof_second.clone())?; +// +// let (agg_proof, updated_agg_public_values) = +// all_circuits.prove_aggregation( false, +// &root_proof_first, +// public_values_first, +// false, +// &root_proof_second, +// public_values_second, +// )?; +// all_circuits.verify_aggregation(&agg_proof)?; +// let (first_block_proof, _block_public_values) = +// all_circuits.prove_block(None, &agg_proof, +// updated_agg_public_values)?; all_circuits.verify_block(& +// first_block_proof)?; +// +// // Prove the next, empty block. +// +// let block_2_hash = +// H256::from_str(" +// 0x0123456789101112131415161718192021222324252627282930313233343536")?; +// block_hashes[255] = block_1_hash; +// +// let block_2_metadata = BlockMetadata { +// block_beneficiary: Address::from(beneficiary), +// block_timestamp: 0x03e8.into(), +// block_number: 2.into(), +// block_difficulty: 0x020000.into(), +// block_gaslimit: 0x445566u32.into(), +// block_chain_id: 1.into(), +// block_base_fee: 0xa.into(), +// ..Default::default() +// }; +// +// let mut contract_code = HashMap::new(); +// contract_code.insert(keccak(vec![]), vec![]); +// +// let inputs = GenerationInputs { +// signed_txn: None, +// withdrawals: vec![], +// tries: TrieInputs { +// state_trie: expected_state_trie_after, +// transactions_trie: Node::Empty.into(), +// receipts_trie: Node::Empty.into(), +// storage_tries: vec![], +// }, +// trie_roots_after: TrieRoots { +// state_root: trie_roots_after.state_root, +// transactions_root: HashedPartialTrie::from(Node::Empty).hash(), +// receipts_root: HashedPartialTrie::from(Node::Empty).hash(), +// }, +// contract_code, +// checkpoint_state_trie_root: block_1_state_root, // We use block 1 as +// new checkpoint. block_metadata: block_2_metadata, +// txn_number_before: 0.into(), +// gas_used_before: 0.into(), +// gas_used_after: 0.into(), +// block_hashes: BlockHashes { +// prev_hashes: block_hashes, +// cur_hash: block_2_hash, +// }, +// }; +// +// let (root_proof, public_values) = +// all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, +// None)?; all_circuits.verify_root(root_proof.clone())?; +// +// // We can just duplicate the initial proof as the state didn't change. +// let (agg_proof, updated_agg_public_values) = +// all_circuits.prove_aggregation( false, +// &root_proof, +// public_values.clone(), +// false, +// &root_proof, +// public_values, +// )?; +// all_circuits.verify_aggregation(&agg_proof)?; +// +// let (second_block_proof, _block_public_values) = +// all_circuits.prove_block( None, // We don't specify a previous proof, +// considering block 1 as the new checkpoint. &agg_proof, +// updated_agg_public_values, +// )?; +// all_circuits.verify_block(&second_block_proof) +// } +// +// /// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 +// #[test] +// fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> { +// // This test checks that inserting into the transaction and receipt +// `HashedPartialTrie`s works as expected. let mut example_txn_trie = +// HashedPartialTrie::from(Node::Empty); +// +// // We consider two transactions, with one log each. +// let transaction_0 = LegacyTransactionRlp { +// nonce: 157823u64.into(), +// gas_price: 1000000000u64.into(), +// gas: 250000u64.into(), +// to: +// AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), +// value: 0u64.into(), +// data: +// hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000bd9fe6f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58" +// ) .to_vec() +// .into(), +// v: 0x1c.into(), +// r: +// hex!("d0eeac4841caf7a894dd79e6e633efc2380553cdf8b786d1aa0b8a8dee0266f4"). +// into(), s: +// hex!("740710eed9696c663510b7fb71a553112551121595a54ec6d2ec0afcec72a973"). +// into(), }; +// +// // Insert the first transaction into the transaction trie. +// example_txn_trie.insert( +// Nibbles::from_str("0x80").unwrap(), // RLP(0) = 0x80 +// rlp::encode(&transaction_0).to_vec(), +// ); +// +// let transaction_1 = LegacyTransactionRlp { +// nonce: 157824u64.into(), +// gas_price: 1000000000u64.into(), +// gas: 250000u64.into(), +// to: +// AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), +// value: 0u64.into(), +// data: +// hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000004920eaa814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243" +// ) .to_vec() +// .into(), +// v: 0x1b.into(), +// r: +// hex!("a3ff39967683fc684dc7b857d6f62723e78804a14b091a058ad95cc1b8a0281f"). +// into(), s: +// hex!("51b156e05f21f499fa1ae47ebf536b15a237208f1d4a62e33956b6b03cf47742"). +// into(), }; +// +// // Insert the second transaction into the transaction trie. +// example_txn_trie.insert( +// Nibbles::from_str("0x01").unwrap(), +// rlp::encode(&transaction_1).to_vec(), +// ); +// +// // Receipts: +// let mut example_receipt_trie = HashedPartialTrie::from(Node::Empty); +// +// let log_0 = LogRlp { +// address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), +// topics: vec![ +// +// hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d"). +// into(), +// hex!("000000000000000000000000000000000000000000000000000000000000002a"). +// into(), +// hex!("0000000000000000000000000000000000000000000000000000000000bd9fe6"). +// into(), ], +// data: +// hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") +// .to_vec() +// .into(), +// }; +// +// let receipt_0 = LegacyReceiptRlp { +// status: true, +// cum_gas_used: 0x016e5bu64.into(), +// bloom: +// hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000020000000000008000000000000000000000000" +// ).to_vec().into(), logs: vec![log_0], +// }; +// +// // Insert the first receipt into the receipt trie. +// example_receipt_trie.insert( +// Nibbles::from_str("0x80").unwrap(), // RLP(0) is 0x80 +// rlp::encode(&receipt_0).to_vec(), +// ); +// +// let log_1 = LogRlp { +// address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), +// topics: vec![ +// +// hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d"). +// into(), +// hex!("0000000000000000000000000000000000000000000000000000000000000004"). +// into(), +// hex!("00000000000000000000000000000000000000000000000000000000004920ea"). +// into(), ], +// data: +// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") +// .to_vec() +// .into(), +// }; +// +// let receipt_1 = LegacyReceiptRlp { +// status: true, +// cum_gas_used: 0x02dcb6u64.into(), +// bloom: +// hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000" +// ).to_vec().into(), logs: vec![log_1], +// }; +// +// // Insert the second receipt into the receipt trie. +// example_receipt_trie.insert( +// Nibbles::from_str("0x01").unwrap(), +// rlp::encode(&receipt_1).to_vec(), +// ); +// +// // Check that the trie hashes are correct. +// assert_eq!( +// example_txn_trie.hash(), +// hex!(" +// 3ab7120d12e1fc07303508542602beb7eecfe8f262b83fd71eefe7d6205242ce").into() +// ); +// +// assert_eq!( +// example_receipt_trie.hash(), +// hex!(" +// da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98").into() +// ); +// +// Ok(()) +// } fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/self_balance_gas_cost.rs b/evm_arithmetization/tests/self_balance_gas_cost.rs index 510c33cb3..04a2fa475 100644 --- a/evm_arithmetization/tests/self_balance_gas_cost.rs +++ b/evm_arithmetization/tests/self_balance_gas_cost.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use std::time::Duration; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; @@ -11,12 +11,16 @@ use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -35,14 +39,6 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let to = hex!("1000000000000000000000000000000000000000"); - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender); - let to_hashed = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - let code = [ 0x5a, 0x47, 0x5a, 0x90, 0x50, 0x90, 0x03, 0x60, 0x02, 0x90, 0x03, 0x60, 0x01, 0x55, 0x00, ]; @@ -58,7 +54,7 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { + 3 // SUB + 3 // PUSH1 + 22100; // SSTORE - let code_hash = keccak(code); + let code_hash = hash_bytecode_u256(code.to_vec()); let beneficiary_account_before = AccountRlp { nonce: 1.into(), @@ -73,19 +69,30 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { ..AccountRlp::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_before).to_vec(), - )?; - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: Node::Empty.into(), - storage_tries: vec![(to_hashed, Node::Empty.into())], }; let txn = hex!("f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b"); @@ -106,10 +113,11 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { }; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); contract_code.insert(code_hash, code.to_vec()); - let expected_state_trie_after = { + let expected_state_smt_after = { + let mut smt = Smt::::default(); let beneficiary_account_after = AccountRlp { nonce: 1.into(), ..AccountRlp::default() @@ -121,28 +129,29 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { }; let to_account_after = AccountRlp { code_hash, - // Storage map: { 1 => 5 } - storage_root: HashedPartialTrie::from(Node::Leaf { - // TODO: Could do keccak(pad32(1)) - nibbles: Nibbles::from_str( - "0xb10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6", - ) - .unwrap(), - value: vec![5], - }) - .hash(), ..AccountRlp::default() }; - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - )?; - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; - expected_state_trie_after + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(to), + &to_account_after, + &HashMap::from([(1.into(), 5.into())]), // Storage map: { 1 => 5 } + ); + + smt }; let receipt_0 = LegacyReceiptRlp { @@ -163,7 +172,7 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -194,3 +203,18 @@ fn self_balance_gas_cost() -> anyhow::Result<()> { fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/selfdestruct.rs b/evm_arithmetization/tests/selfdestruct.rs index 0ef48d2f4..1d29b7fc8 100644 --- a/evm_arithmetization/tests/selfdestruct.rs +++ b/evm_arithmetization/tests/selfdestruct.rs @@ -1,8 +1,9 @@ +use std::collections::HashMap; use std::str::FromStr; use std::time::Duration; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, BigEndianHash, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; @@ -10,12 +11,16 @@ use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -33,17 +38,10 @@ fn test_selfdestruct() -> anyhow::Result<()> { let sender = hex!("5eb96AA102a29fAB267E12A40a5bc6E9aC088759"); let to = hex!("a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0"); - let sender_state_key = keccak(sender); - let to_state_key = keccak(to); - - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_state_key.as_bytes()).unwrap(); - let sender_account_before = AccountRlp { nonce: 5.into(), balance: eth_to_wei(100_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak([]), + ..Default::default() }; let code = vec![ 0x32, // ORIGIN @@ -52,19 +50,28 @@ fn test_selfdestruct() -> anyhow::Result<()> { let to_account_before = AccountRlp { nonce: 12.into(), balance: eth_to_wei(10_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak(&code), + code_hash: hash_bytecode_u256(code.clone()), + ..Default::default() }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: HashedPartialTrie::from(Node::Empty), receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries: vec![], }; // Generated using a little py-evm script. @@ -83,18 +90,26 @@ fn test_selfdestruct() -> anyhow::Result<()> { block_bloom: [0.into(); 8], }; - let contract_code = [(keccak(&code), code), (keccak([]), vec![])].into(); + let contract_code = [ + (hash_bytecode_u256(code.clone()), code), + (hash_bytecode_u256(vec![]), vec![]), + ] + .into(); - let expected_state_trie_after: HashedPartialTrie = { - let mut state_trie_after = HashedPartialTrie::from(Node::Empty); + let expected_state_smt_after = { + let mut smt = Smt::::default(); let sender_account_after = AccountRlp { nonce: 6.into(), balance: eth_to_wei(110_000.into()) - 26_002 * 0xa, - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak([]), + ..Default::default() }; - state_trie_after.insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - state_trie_after + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + smt }; let receipt_0 = LegacyReceiptRlp { @@ -115,7 +130,7 @@ fn test_selfdestruct() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -151,3 +166,18 @@ fn eth_to_wei(eth: U256) -> U256 { fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/simple_transfer.rs b/evm_arithmetization/tests/simple_transfer.rs index 8adbc1c42..595c24e85 100644 --- a/evm_arithmetization/tests/simple_transfer.rs +++ b/evm_arithmetization/tests/simple_transfer.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use std::time::Duration; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, BigEndianHash, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; @@ -11,12 +11,16 @@ use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -34,31 +38,32 @@ fn test_simple_transfer() -> anyhow::Result<()> { let sender = hex!("2c7536e3605d9c16a7a3d7b1898e529396a65c23"); let to = hex!("a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0"); - let sender_state_key = keccak(sender); - let to_state_key = keccak(to); - - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_state_key.as_bytes()).unwrap(); - let sender_account_before = AccountRlp { nonce: 5.into(), balance: eth_to_wei(100_000.into()), - storage_root: HashedPartialTrie::from(Node::Empty).hash(), - code_hash: keccak([]), + ..Default::default() }; + let to_account_before = AccountRlp::default(); - let state_trie_before = Node::Leaf { - nibbles: sender_nibbles, - value: rlp::encode(&sender_account_before).to_vec(), - } - .into(); + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: HashedPartialTrie::from(Node::Empty), receipts_trie: HashedPartialTrie::from(Node::Empty), - storage_tries: vec![], }; // Generated using a little py-evm script. @@ -79,9 +84,10 @@ fn test_simple_transfer() -> anyhow::Result<()> { }; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); - let expected_state_trie_after: HashedPartialTrie = { + let expected_state_smt_after = { + let mut smt = Smt::::default(); let txdata_gas = 2 * 16; let gas_used = 21_000 + txdata_gas; @@ -95,22 +101,15 @@ fn test_simple_transfer() -> anyhow::Result<()> { ..to_account_before }; - let mut children = core::array::from_fn(|_| Node::Empty.into()); - children[sender_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: sender_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&sender_account_after).to_vec(), - } - .into(); - children[to_nibbles.get_nibble(0) as usize] = Node::Leaf { - nibbles: to_nibbles.truncate_n_nibbles_front(1), - value: rlp::encode(&to_account_after).to_vec(), - } - .into(); - Node::Branch { - children, - value: vec![], - } - .into() + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to), &to_account_after, &HashMap::new()); + + smt }; let receipt_0 = LegacyReceiptRlp { @@ -131,7 +130,7 @@ fn test_simple_transfer() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -167,3 +166,18 @@ fn eth_to_wei(eth: U256) -> U256 { fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } + +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/evm_arithmetization/tests/withdrawals.rs b/evm_arithmetization/tests/withdrawals.rs index 7f133518b..9286b1eff 100644 --- a/evm_arithmetization/tests/withdrawals.rs +++ b/evm_arithmetization/tests/withdrawals.rs @@ -2,20 +2,23 @@ use std::collections::HashMap; use std::time::Duration; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{H160, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::generation::mpt::AccountRlp; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; use evm_arithmetization::{AllStark, Node, StarkConfig}; -use keccak_hash::keccak; -use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; use rand::random; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -31,31 +34,28 @@ fn test_withdrawals() -> anyhow::Result<()> { let block_metadata = BlockMetadata::default(); - let state_trie_before = HashedPartialTrie::from(Node::Empty); + let state_smt_before = Smt::::default(); let transactions_trie = HashedPartialTrie::from(Node::Empty); let receipts_trie = HashedPartialTrie::from(Node::Empty); - let storage_tries = vec![]; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); // Just one withdrawal. let withdrawals = vec![(H160(random()), U256(random()))]; - let state_trie_after = { - let mut trie = HashedPartialTrie::from(Node::Empty); - let addr_state_key = keccak(withdrawals[0].0); - let addr_nibbles = Nibbles::from_bytes_be(addr_state_key.as_bytes()).unwrap(); + let state_smt_after = { + let mut smt = Smt::::default(); let account = AccountRlp { balance: withdrawals[0].1, ..AccountRlp::default() }; - trie.insert(addr_nibbles, rlp::encode(&account).to_vec())?; - trie + set_account(&mut smt, withdrawals[0].0, &account, &HashMap::new()); + smt }; let trie_roots_after = TrieRoots { - state_root: state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -64,10 +64,9 @@ fn test_withdrawals() -> anyhow::Result<()> { signed_txn: None, withdrawals, tries: TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie, receipts_trie, - storage_tries, }, trie_roots_after, contract_code, @@ -92,3 +91,17 @@ fn test_withdrawals() -> anyhow::Result<()> { fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} diff --git a/proof_gen/Cargo.toml b/proof_gen/Cargo.toml index 9a613bcc9..c5e5d350b 100644 --- a/proof_gen/Cargo.toml +++ b/proof_gen/Cargo.toml @@ -17,5 +17,5 @@ plonky2 = { workspace = true } serde = { workspace = true } # Local dependencies -trace_decoder = { version = "0.2.0", path = "../trace_decoder" } -evm_arithmetization = { version = "0.1.2", path = "../evm_arithmetization" } +trace_decoder = "0.2.0" +evm_arithmetization = "0.1.2" # TODO: adapt with type2 and bring back paths diff --git a/smt_trie/Cargo.toml b/smt_trie/Cargo.toml new file mode 100644 index 000000000..7c01f23bb --- /dev/null +++ b/smt_trie/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "smt_trie" +description = "Types and utility functions for building/working with Polygon Hermez Sparse Merkle Trees." +version = "0.1.0" +authors = ["William Borgeaud "] +readme = "README.md" +categories = ["cryptography"] +edition.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true +keywords.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bytes = { workspace = true } +enum-as-inner = { workspace = true } +ethereum-types = { workspace = true } +hex = { workspace = true } +hex-literal = { workspace = true } +keccak-hash = { workspace = true } +thiserror = "1.0.40" +log = { workspace = true } +num-traits = "0.2.15" +uint = "0.9.5" +rlp = { workspace = true } +parking_lot = { version = "0.12.1", features = ["serde"] } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2", rev = "c1728d4e43e9ff434f9297e4f6171ddf28ec8fca" } +rand = "0.8.5" +serde = { workspace = true, features = ["derive", "rc"] } + + +[dev-dependencies] +eth_trie = "0.1.0" +pretty_env_logger = "0.4.0" +rlp-derive = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = "1.0.96" diff --git a/smt_trie/README.md b/smt_trie/README.md new file mode 100644 index 000000000..54707f9fe --- /dev/null +++ b/smt_trie/README.md @@ -0,0 +1,2 @@ +Types and functions to work with the Hermez/Polygon zkEVM sparse Merkle tree (SMT) format. +See https://github.com/0xPolygonHermez/zkevm-commonjs for reference implementation. diff --git a/smt_trie/src/bits.rs b/smt_trie/src/bits.rs new file mode 100644 index 000000000..4d2d2ed91 --- /dev/null +++ b/smt_trie/src/bits.rs @@ -0,0 +1,103 @@ +use std::ops::Add; + +use ethereum_types::{BigEndianHash, H256, U256}; +use serde::{Deserialize, Serialize}; + +pub type Bit = bool; + +#[derive( + Copy, Clone, Deserialize, Default, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Debug, +)] +pub struct Bits { + /// The number of bits in this sequence. + pub count: usize, + /// A packed encoding of these bits. Only the first (least significant) + /// `count` bits are used. The rest are unused and should be zero. + pub packed: U256, +} + +impl From for Bits { + fn from(packed: U256) -> Self { + Bits { count: 256, packed } + } +} + +impl From for Bits { + fn from(packed: H256) -> Self { + Bits { + count: 256, + packed: packed.into_uint(), + } + } +} + +impl Add for Bits { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + assert!(self.count + rhs.count <= 256, "Overflow"); + Self { + count: self.count + rhs.count, + packed: self.packed * (U256::one() << rhs.count) + rhs.packed, + } + } +} + +impl Bits { + pub fn empty() -> Self { + Bits { + count: 0, + packed: U256::zero(), + } + } + + pub fn is_empty(&self) -> bool { + self.count == 0 + } + + pub fn pop_next_bit(&mut self) -> Bit { + assert!(!self.is_empty(), "Cannot pop from empty bits"); + let b = !(self.packed & U256::one()).is_zero(); + self.packed >>= 1; + self.count -= 1; + b + } + + pub fn get_bit(&self, i: usize) -> Bit { + assert!(i < self.count, "Index out of bounds"); + !(self.packed & (U256::one() << (self.count - 1 - i))).is_zero() + } + + pub fn push_bit(&mut self, bit: Bit) { + self.packed = self.packed * 2 + U256::from(bit as u64); + self.count += 1; + } + + pub fn add_bit(&self, bit: Bit) -> Self { + let mut x = *self; + x.push_bit(bit); + x + } + + pub fn common_prefix(&self, k: &Bits) -> (Self, Option<(Bit, Bit)>) { + let mut a = *self; + let mut b = *k; + while a.count > b.count { + a.pop_next_bit(); + } + while a.count < b.count { + b.pop_next_bit(); + } + if a == b { + return (a, None); + } + let mut a_bit = a.pop_next_bit(); + let mut b_bit = b.pop_next_bit(); + while a != b { + a_bit = a.pop_next_bit(); + b_bit = b.pop_next_bit(); + } + assert_ne!(a_bit, b_bit, "Sanity check."); + (a, Some((a_bit, b_bit))) + } +} diff --git a/smt_trie/src/code.rs b/smt_trie/src/code.rs new file mode 100644 index 000000000..a7973e2c9 --- /dev/null +++ b/smt_trie/src/code.rs @@ -0,0 +1,75 @@ +/// Functions to hash contract bytecode using Poseidon. +/// See `hashContractBytecode()` in https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt-utils.js for reference implementation. +use ethereum_types::U256; +use plonky2::field::types::Field; +use plonky2::hash::poseidon::Poseidon; + +use crate::smt::{HashOut, F}; +use crate::utils::hashout2u; + +pub fn hash_contract_bytecode(mut code: Vec) -> HashOut { + code.push(0x01); + while code.len() % 56 != 0 { + code.push(0x00); + } + *code.last_mut().unwrap() |= 0x80; + + let mut capacity = [F::ZERO; 4]; + for i in 0..code.len() / 56 { + let mut block = [0u8; 56]; + block.copy_from_slice(&code[i * 56..(i + 1) * 56]); + let mut arr = [F::ZERO; 12]; + for j in 0..8 { + arr[j] = block[j * 7..(j + 1) * 7] + .iter() + .enumerate() + .fold(F::ZERO, |acc, (k, x)| { + acc + (F::from_canonical_u64((*x as u64) << (k * 8))) + }); + } + arr[8..12].copy_from_slice(&capacity); + capacity = F::poseidon(arr)[0..4].try_into().unwrap(); + } + HashOut { elements: capacity } +} + +pub fn hash_bytecode_u256(code: Vec) -> U256 { + hashout2u(hash_contract_bytecode(code)) +} + +#[cfg(test)] +mod tests { + use hex_literal::hex; + + use super::*; + + #[test] + fn test_empty_code() { + assert_eq!( + hash_contract_bytecode(vec![]).elements, + [ + 10052403398432742521, + 15195891732843337299, + 2019258788108304834, + 4300613462594703212, + ] + .map(F::from_canonical_u64) + ); + } + + #[test] + fn test_some_code() { + let code = hex!("60806040526004361061003f5760003560e01c80632b68b9c6146100445780633fa4f2451461005b5780635cfb28e714610086578063718da7ee14610090575b600080fd5b34801561005057600080fd5b506100596100b9565b005b34801561006757600080fd5b506100706100f2565b60405161007d9190610195565b60405180910390f35b61008e6100f8565b005b34801561009c57600080fd5b506100b760048036038101906100b29190610159565b610101565b005b60008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16ff5b60015481565b34600181905550565b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050565b600081359050610153816101f1565b92915050565b60006020828403121561016f5761016e6101ec565b5b600061017d84828501610144565b91505092915050565b61018f816101e2565b82525050565b60006020820190506101aa6000830184610186565b92915050565b60006101bb826101c2565b9050919050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000819050919050565b600080fd5b6101fa816101b0565b811461020557600080fd5b5056fea26469706673582212207ae6e5d5feddef608b24cca98990c37cf78f8b377163a7c4951a429d90d6120464736f6c63430008070033"); + + assert_eq!( + hash_contract_bytecode(code.to_vec()).elements, + [ + 13311281292453978464, + 8384462470517067887, + 14733964407220681187, + 13541155386998871195 + ] + .map(F::from_canonical_u64) + ); + } +} diff --git a/smt_trie/src/db.rs b/smt_trie/src/db.rs new file mode 100644 index 000000000..f71fad29a --- /dev/null +++ b/smt_trie/src/db.rs @@ -0,0 +1,23 @@ +use std::collections::HashMap; + +use crate::smt::{Key, Node}; + +pub trait Db: Default { + fn get_node(&self, key: &Key) -> Option<&Node>; + fn set_node(&mut self, key: Key, value: Node); +} + +#[derive(Debug, Clone, Default)] +pub struct MemoryDb { + pub db: HashMap, +} + +impl Db for MemoryDb { + fn get_node(&self, key: &Key) -> Option<&Node> { + self.db.get(key) + } + + fn set_node(&mut self, key: Key, value: Node) { + self.db.insert(key, value); + } +} diff --git a/smt_trie/src/keys.rs b/smt_trie/src/keys.rs new file mode 100644 index 000000000..1f122adbb --- /dev/null +++ b/smt_trie/src/keys.rs @@ -0,0 +1,99 @@ +#![allow(clippy::needless_range_loop)] + +/// This module contains functions to generate keys for the SMT. +/// See https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt-utils.js for reference implementation. +use ethereum_types::{Address, U256}; +use plonky2::{field::types::Field, hash::poseidon::Poseidon}; + +use crate::smt::{Key, F}; + +const HASH_ZEROS: [u64; 4] = [ + 4330397376401421145, + 14124799381142128323, + 8742572140681234676, + 14345658006221440202, +]; + +const SMT_KEY_BALANCE: u64 = 0; +const SMT_KEY_NONCE: u64 = 1; +const SMT_KEY_CODE: u64 = 2; +const SMT_KEY_STORAGE: u64 = 3; +const SMT_KEY_LENGTH: u64 = 4; + +pub fn key_balance(addr: Address) -> Key { + let mut arr = [F::ZERO; 12]; + for i in 0..5 { + arr[i] = F::from_canonical_u32(u32::from_be_bytes( + addr.0[16 - 4 * i..16 - 4 * i + 4].try_into().unwrap(), + )); + } + + arr[6] = F::from_canonical_u64(SMT_KEY_BALANCE); + arr[8..12].copy_from_slice(&HASH_ZEROS.map(F::from_canonical_u64)); + + Key(F::poseidon(arr)[0..4].try_into().unwrap()) +} + +pub fn key_nonce(addr: Address) -> Key { + let mut arr = [F::ZERO; 12]; + for i in 0..5 { + arr[i] = F::from_canonical_u32(u32::from_be_bytes( + addr.0[16 - 4 * i..16 - 4 * i + 4].try_into().unwrap(), + )); + } + + arr[6] = F::from_canonical_u64(SMT_KEY_NONCE); + arr[8..12].copy_from_slice(&HASH_ZEROS.map(F::from_canonical_u64)); + + Key(F::poseidon(arr)[0..4].try_into().unwrap()) +} + +pub fn key_code(addr: Address) -> Key { + let mut arr = [F::ZERO; 12]; + for i in 0..5 { + arr[i] = F::from_canonical_u32(u32::from_be_bytes( + addr.0[16 - 4 * i..16 - 4 * i + 4].try_into().unwrap(), + )); + } + + arr[6] = F::from_canonical_u64(SMT_KEY_CODE); + arr[8..12].copy_from_slice(&HASH_ZEROS.map(F::from_canonical_u64)); + + Key(F::poseidon(arr)[0..4].try_into().unwrap()) +} + +pub fn key_storage(addr: Address, slot: U256) -> Key { + let mut arr = [F::ZERO; 12]; + for i in 0..5 { + arr[i] = F::from_canonical_u32(u32::from_be_bytes( + addr.0[16 - 4 * i..16 - 4 * i + 4].try_into().unwrap(), + )); + } + + arr[6] = F::from_canonical_u64(SMT_KEY_STORAGE); + let capacity: [F; 4] = { + let mut arr = [F::ZERO; 12]; + for i in 0..4 { + arr[2 * i] = F::from_canonical_u32(slot.0[i] as u32); + arr[2 * i + 1] = F::from_canonical_u32((slot.0[i] >> 32) as u32); + } + F::poseidon(arr)[0..4].try_into().unwrap() + }; + arr[8..12].copy_from_slice(&capacity); + + Key(F::poseidon(arr)[0..4].try_into().unwrap()) +} + +pub fn key_code_length(addr: Address) -> Key { + let mut arr = [F::ZERO; 12]; + for i in 0..5 { + arr[i] = F::from_canonical_u32(u32::from_be_bytes( + addr.0[16 - 4 * i..16 - 4 * i + 4].try_into().unwrap(), + )); + } + + arr[6] = F::from_canonical_u64(SMT_KEY_LENGTH); + arr[8..12].copy_from_slice(&HASH_ZEROS.map(F::from_canonical_u64)); + + Key(F::poseidon(arr)[0..4].try_into().unwrap()) +} diff --git a/smt_trie/src/lib.rs b/smt_trie/src/lib.rs new file mode 100644 index 000000000..11315f12c --- /dev/null +++ b/smt_trie/src/lib.rs @@ -0,0 +1,8 @@ +pub mod bits; +pub mod code; +pub mod db; +pub mod keys; +pub mod smt; +#[cfg(test)] +mod smt_test; +pub mod utils; diff --git a/smt_trie/src/smt.rs b/smt_trie/src/smt.rs new file mode 100644 index 000000000..acf77d995 --- /dev/null +++ b/smt_trie/src/smt.rs @@ -0,0 +1,423 @@ +#![allow(clippy::needless_range_loop)] + +use ethereum_types::U256; +use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::field::types::{Field, PrimeField64}; +use plonky2::hash::poseidon::{Poseidon, PoseidonHash}; +use plonky2::plonk::config::Hasher; + +use crate::bits::Bits; +use crate::db::Db; +use crate::utils::{ + f2limbs, get_unique_sibling, hash0, hash_key_hash, hashout2u, key2u, limbs2f, u2h, u2k, +}; + +const HASH_TYPE: u8 = 0; +const INTERNAL_TYPE: u8 = 1; +const LEAF_TYPE: u8 = 2; + +pub type F = GoldilocksField; +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Key(pub [F; 4]); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Node(pub [F; 12]); +pub type Hash = PoseidonHash; +pub type HashOut = >::Hash; + +impl Key { + pub fn split(&self) -> Bits { + let mut bits = Bits::empty(); + let mut arr: [_; 4] = std::array::from_fn(|i| self.0[i].to_canonical_u64()); + for _ in 0..64 { + for j in 0..4 { + bits.push_bit(arr[j] & 1 == 1); + arr[j] >>= 1; + } + } + bits + } + + pub fn join(bits: Bits, rem_key: Self) -> Self { + let mut n = [0; 4]; + let mut accs = [0; 4]; + for i in 0..bits.count { + if bits.get_bit(i) { + accs[i % 4] |= 1 << n[i % 4]; + } + n[i % 4] += 1; + } + let key = std::array::from_fn(|i| { + F::from_canonical_u64((rem_key.0[i].to_canonical_u64() << n[i]) | accs[i]) + }); + Key(key) + } + + fn remove_key_bits(&self, nbits: usize) -> Self { + let full_levels = nbits / 4; + let mut auxk = self.0.map(|x| x.to_canonical_u64()); + for i in 0..4 { + let mut n = full_levels; + if full_levels * 4 + i < nbits { + n += 1; + } + auxk[i] >>= n; + } + Key(auxk.map(F::from_canonical_u64)) + } +} + +impl Node { + pub fn is_one_siblings(&self) -> bool { + self.0[8].is_one() + } +} + +/// Sparse Merkle tree (SMT). +/// Represented as a map from keys to leaves and a map from keys to internal +/// nodes. Leaves hold either a value node, representing an account in the state +/// SMT or a value in the storage SMT, or a hash node, representing a hash of a +/// subtree. Internal nodes hold the hashes of their children. +/// The root is the hash of the root internal node. +/// Leaves are hashed using a prefix of 0, internal nodes using a prefix of 1. +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct Smt { + pub db: D, + pub root: HashOut, +} + +impl Smt { + /// Returns `Poseidon(x, [0,0,0,0])` and save it in DB. + pub fn hash0(&mut self, x: [F; 8]) -> [F; 4] { + let h = hash0(x); + let a = std::array::from_fn(|i| if i < 8 { x[i] } else { F::ZERO }); + self.db.set_node(Key(h), Node(a)); + h + } + + /// Returns `Poseidon(key || h, [1,0,0,0])` and save it in DB. + pub fn hash_key_hash(&mut self, k: Key, h: [F; 4]) -> [F; 4] { + let a: [_; 8] = std::array::from_fn(|i| if i < 4 { k.0[i] } else { h[i - 4] }); + let a = std::array::from_fn(|i| match i { + j if j < 8 => a[i], + 8 => F::ONE, + _ => F::ZERO, + }); + let h = hash_key_hash(k, h); + self.db.set_node(Key(h), Node(a)); + h + } + + /// Returns the value associated with the key if it is in the SMT, otherwise + /// returns 0. + pub fn get(&self, key: Key) -> U256 { + let keys = key.split(); + let mut level = 0; + let mut acc_key = Bits::empty(); + let mut r = Key(self.root.elements); + + while !r.0.iter().all(F::is_zero) { + let sibling = self.db.get_node(&r).unwrap(); + if sibling.is_one_siblings() { + let found_val_a: [F; 8] = self + .db + .get_node(&Key(sibling.0[4..8].try_into().unwrap())) + .unwrap() + .0[0..8] + .try_into() + .unwrap(); + let found_rem_key = Key(sibling.0[0..4].try_into().unwrap()); + let found_val = limbs2f(found_val_a); + let found_key = Key::join(acc_key, found_rem_key); + if found_key == key { + return found_val; + } else { + return U256::zero(); + } + } else { + let b = keys.get_bit(level as usize); + r = Key(sibling.0[b as usize * 4..(b as usize + 1) * 4] + .try_into() + .unwrap()); + acc_key.push_bit(b); + level += 1; + } + } + unreachable!() + } + + /// Set the value associated with the key in the SMT. + /// If the value is 0 and the key is in the SMT, the key is removed from the + /// SMT. Reference implementation in https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt.js. + pub fn set(&mut self, key: Key, value: U256) { + let mut r = Key(self.root.elements); + let mut new_root = self.root; + let keys = key.split(); + let mut level = 0isize; + let mut acc_key = Bits::empty(); + let mut found_key = None; + let mut found_rem_key = None; + let mut found_old_val_h = None; + let mut siblings = vec![]; + + while !r.0.iter().all(F::is_zero) { + let sibling = self.db.get_node(&r).unwrap(); + siblings.push(*sibling); + if sibling.is_one_siblings() { + found_old_val_h = Some(sibling.0[4..8].try_into().unwrap()); + let found_val_a: [F; 8] = + self.db.get_node(&Key(found_old_val_h.unwrap())).unwrap().0[0..8] + .try_into() + .unwrap(); + found_rem_key = Some(Key(sibling.0[0..4].try_into().unwrap())); + let _found_val = limbs2f(found_val_a); + found_key = Some(Key::join(acc_key, found_rem_key.unwrap())); + break; + } else { + let b = keys.get_bit(level as usize); + r = Key(sibling.0[b as usize * 4..(b as usize + 1) * 4] + .try_into() + .unwrap()); + acc_key.push_bit(b); + level += 1; + } + } + + level -= 1; + if !acc_key.is_empty() { + acc_key.pop_next_bit(); + } + + if value.is_zero() { + if let Some(found_key) = found_key { + if key == found_key { + if level >= 0 { + let i = (keys.get_bit(level as usize) as usize) * 4; + siblings[level as usize].0[i..i + 4].copy_from_slice(&[F::ZERO; 4]); + let mut u_key = get_unique_sibling(siblings[level as usize]); + + if u_key >= 0 { + let k = siblings[level as usize].0 + [u_key as usize * 4..u_key as usize * 4 + 4] + .try_into() + .unwrap(); + siblings[(level + 1) as usize] = *self.db.get_node(&Key(k)).unwrap(); + if siblings[(level + 1) as usize].is_one_siblings() { + let val_h = + siblings[(level + 1) as usize].0[4..8].try_into().unwrap(); + let val_a = self.db.get_node(&Key(val_h)).unwrap().0[0..8] + .try_into() + .unwrap(); + let r_key = + siblings[(level + 1) as usize].0[0..4].try_into().unwrap(); + + let _val = limbs2f(val_a); + + assert!(u_key == 0 || u_key == 1); + let ins_key = Key::join(acc_key.add_bit(u_key != 0), Key(r_key)); + while (u_key >= 0) && (level >= 0) { + level -= 1; + if level >= 0 { + u_key = get_unique_sibling(siblings[level as usize]); + } + } + + let old_key = ins_key.remove_key_bits((level + 1) as usize); + let old_leaf_hash = self.hash_key_hash(old_key, val_h); + + if level >= 0 { + let b = keys.get_bit(level as usize) as usize * 4; + siblings[level as usize].0[b..b + 4] + .copy_from_slice(&old_leaf_hash); + } else { + new_root = HashOut { + elements: old_leaf_hash, + }; + } + } + } else { + panic!() + } + } else { + new_root = HashOut { + elements: [F::ZERO; 4], + }; + } + } + } + } else if let Some(found_key) = found_key { + if key == found_key { + let new_val_h = self.hash0(f2limbs(value)); + let new_leaf_hash = self.hash_key_hash(found_rem_key.unwrap(), new_val_h); + if level >= 0 { + let i = (keys.get_bit(level as usize) as usize) * 4; + siblings[level as usize].0[i..i + 4].copy_from_slice(&new_leaf_hash); + } else { + new_root = HashOut { + elements: new_leaf_hash, + }; + } + } else { + let mut node = [F::ZERO; 8]; + let mut level2 = level + 1; + let found_keys = found_key.split(); + while keys.get_bit(level2 as usize) == found_keys.get_bit(level2 as usize) { + level2 += 1; + } + let old_key = found_key.remove_key_bits(level2 as usize + 1); + let old_leaf_hash = self.hash_key_hash(old_key, found_old_val_h.unwrap()); + + let new_key = key.remove_key_bits(level2 as usize + 1); + let new_val_h = self.hash0(f2limbs(value)); + let new_leaf_hash = self.hash_key_hash(new_key, new_val_h); + + let b = keys.get_bit(level2 as usize) as usize * 4; + let bb = found_keys.get_bit(level2 as usize) as usize * 4; + node[b..b + 4].copy_from_slice(&new_leaf_hash); + node[bb..bb + 4].copy_from_slice(&old_leaf_hash); + + let mut r2 = self.hash0(node); + level2 -= 1; + + while level2 != level { + node = [F::ZERO; 8]; + let b = keys.get_bit(level2 as usize) as usize * 4; + node[b..b + 4].copy_from_slice(&r2); + + r2 = self.hash0(node); + level2 -= 1; + } + + if level >= 0 { + let b = keys.get_bit(level as usize) as usize * 4; + siblings[level as usize].0[b..b + 4].copy_from_slice(&r2); + } else { + new_root = HashOut { elements: r2 }; + } + } + } else { + let new_key = key.remove_key_bits((level + 1) as usize); + let new_val_h = self.hash0(f2limbs(value)); + let new_leaf_hash = self.hash_key_hash(new_key, new_val_h); + + if level >= 0 { + let b = keys.get_bit(level as usize) as usize * 4; + siblings[level as usize].0[b..b + 4].copy_from_slice(&new_leaf_hash); + } else { + new_root = HashOut { + elements: new_leaf_hash, + }; + } + } + siblings.truncate((level + 1) as usize); + + while level >= 0 { + new_root = F::poseidon(siblings[level as usize].0)[0..4] + .try_into() + .unwrap(); + self.db + .set_node(Key(new_root.elements), siblings[level as usize]); + level -= 1; + if level >= 0 { + let b = keys.get_bit(level as usize) as usize * 4; + siblings[level as usize].0[b..b + 4].copy_from_slice(&new_root.elements); + } + } + self.root = new_root; + } + + /// Serialize the SMT into a vector of U256. + /// Starts with a [0, 0] for convenience, that way `ptr=0` is a canonical + /// empty node. Therefore the root of the SMT is at `ptr=2`. + /// Serialization rules: + /// ```pseudocode + /// serialize( HashNode { h } ) = [HASH_TYPE, h] + /// serialize( InternalNode { left, right } ) = [INTERNAL_TYPE, serialize(left).ptr, serialize(right).ptr] + /// serialize( LeafNode { rem_key, value } ) = [LEAF_TYPE, rem_key, value] + /// ``` + pub fn serialize(&self) -> Vec { + let mut v = vec![U256::zero(); 2]; // For empty hash node. + let key = Key(self.root.elements); + serialize(self, key, &mut v); + if v.len() == 2 { + v.extend([U256::zero(); 2]); + } + v + } +} + +fn serialize(smt: &Smt, key: Key, v: &mut Vec) -> usize { + if key.0.iter().all(F::is_zero) { + return 0; // `ptr=0` is an empty node. + } + + if let Some(node) = smt.db.get_node(&key) { + if node.0.iter().all(F::is_zero) { + panic!("wtf?"); + } + + if node.is_one_siblings() { + let val_h = node.0[4..8].try_into().unwrap(); + let val_a = smt.db.get_node(&Key(val_h)).unwrap().0[0..8] + .try_into() + .unwrap(); + let rem_key = Key(node.0[0..4].try_into().unwrap()); + let val = limbs2f(val_a); + let index = v.len(); + v.push(LEAF_TYPE.into()); + v.push(key2u(rem_key)); + v.push(val); + index + } else { + let key_left = Key(node.0[0..4].try_into().unwrap()); + let key_right = Key(node.0[4..8].try_into().unwrap()); + let index = v.len(); + v.push(INTERNAL_TYPE.into()); + v.push(U256::zero()); + v.push(U256::zero()); + let i_left = serialize(smt, key_left, v).into(); + v[index + 1] = i_left; + let i_right = serialize(smt, key_right, v).into(); + v[index + 2] = i_right; + index + } + } else { + todo!("Add a hash node here."); + } +} + +/// Hash a serialized state SMT, i.e., one where leaves hold accounts. +pub fn hash_serialize(v: &[U256]) -> HashOut { + _hash_serialize(v, 2) +} + +pub fn hash_serialize_u256(v: &[U256]) -> U256 { + hashout2u(hash_serialize(v)) +} + +fn _hash_serialize(v: &[U256], ptr: usize) -> HashOut { + assert!(v[ptr] <= u8::MAX.into()); + match v[ptr].as_u64() as u8 { + HASH_TYPE => u2h(v[ptr + 1]), + + INTERNAL_TYPE => { + let mut node = Node([F::ZERO; 12]); + for b in 0..2 { + let child_index = v[ptr + 1 + b]; + let child_hash = _hash_serialize(v, child_index.as_usize()); + node.0[b * 4..(b + 1) * 4].copy_from_slice(&child_hash.elements); + } + F::poseidon(node.0)[0..4].try_into().unwrap() + } + LEAF_TYPE => { + let rem_key = u2k(v[ptr + 1]); + let value = f2limbs(v[ptr + 2]); + let value_h = hash0(value); + let mut node = Node([F::ZERO; 12]); + node.0[8] = F::ONE; + node.0[0..4].copy_from_slice(&rem_key.0); + node.0[4..8].copy_from_slice(&value_h); + F::poseidon(node.0)[0..4].try_into().unwrap() + } + _ => panic!("Should not happen"), + } +} diff --git a/smt_trie/src/smt_test.rs b/smt_trie/src/smt_test.rs new file mode 100644 index 000000000..fca18375f --- /dev/null +++ b/smt_trie/src/smt_test.rs @@ -0,0 +1,274 @@ +use ethereum_types::U256; +use plonky2::field::types::{Field, Sample}; +use rand::{thread_rng, Rng}; + +use crate::{ + db::MemoryDb, + smt::{hash_serialize, Key, Smt, F}, +}; + +#[test] +fn test_add_and_rem() { + let mut smt = Smt::::default(); + + let k = Key(F::rand_array()); + let v = U256(thread_rng().gen()); + smt.set(k, v); + assert_eq!(v, smt.get(k)); + + smt.set(k, U256::zero()); + assert_eq!(smt.root.elements, [F::ZERO; 4]); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_add_and_rem_hermez() { + let mut smt = Smt::::default(); + + let k = Key([F::ONE, F::ZERO, F::ZERO, F::ZERO]); + let v = U256::from(2); + smt.set(k, v); + assert_eq!(v, smt.get(k)); + assert_eq!( + smt.root.elements, + [ + 16483217357039062949, + 6830539605347455377, + 6826288191577443203, + 8219762152026661456 + ] + .map(F::from_canonical_u64) + ); + + smt.set(k, U256::zero()); + assert_eq!(smt.root.elements, [F::ZERO; 4]); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_update_element_1() { + let mut smt = Smt::::default(); + + let k = Key(F::rand_array()); + let v1 = U256(thread_rng().gen()); + let v2 = U256(thread_rng().gen()); + smt.set(k, v1); + let root = smt.root; + smt.set(k, v2); + smt.set(k, v1); + assert_eq!(smt.root, root); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_add_shared_element_2() { + let mut smt = Smt::::default(); + + let k1 = Key(F::rand_array()); + let k2 = Key(F::rand_array()); + assert_ne!(k1, k2, "Unlucky"); + let v1 = U256(thread_rng().gen()); + let v2 = U256(thread_rng().gen()); + smt.set(k1, v1); + smt.set(k2, v2); + smt.set(k1, U256::zero()); + smt.set(k2, U256::zero()); + assert_eq!(smt.root.elements, [F::ZERO; 4]); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_add_shared_element_3() { + let mut smt = Smt::::default(); + + let k1 = Key(F::rand_array()); + let k2 = Key(F::rand_array()); + let k3 = Key(F::rand_array()); + let v1 = U256(thread_rng().gen()); + let v2 = U256(thread_rng().gen()); + let v3 = U256(thread_rng().gen()); + smt.set(k1, v1); + smt.set(k2, v2); + smt.set(k3, v3); + smt.set(k1, U256::zero()); + smt.set(k2, U256::zero()); + smt.set(k3, U256::zero()); + assert_eq!(smt.root.elements, [F::ZERO; 4]); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_add_remove_128() { + let mut smt = Smt::::default(); + + let kvs = (0..128) + .map(|_| { + let k = Key(F::rand_array()); + let v = U256(thread_rng().gen()); + smt.set(k, v); + (k, v) + }) + .collect::>(); + for &(k, v) in &kvs { + smt.set(k, v); + } + for &(k, _) in &kvs { + smt.set(k, U256::zero()); + } + assert_eq!(smt.root.elements, [F::ZERO; 4]); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_should_read_random() { + let mut smt = Smt::::default(); + + let kvs = (0..128) + .map(|_| { + let k = Key(F::rand_array()); + let v = U256(thread_rng().gen()); + smt.set(k, v); + (k, v) + }) + .collect::>(); + for &(k, v) in &kvs { + smt.set(k, v); + } + for &(k, v) in &kvs { + assert_eq!(smt.get(k), v); + } + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_add_element_similar_key() { + let mut smt = Smt::::default(); + + let k1 = Key([F::ZERO; 4]); + let k2 = Key([F::from_canonical_u16(15), F::ZERO, F::ZERO, F::ZERO]); + let k3 = Key([F::from_canonical_u16(31), F::ZERO, F::ZERO, F::ZERO]); + let v1 = U256::from(2); + let v2 = U256::from(3); + smt.set(k1, v1); + smt.set(k2, v1); + smt.set(k3, v2); + + let expected_root = [ + 442750481621001142, + 12174547650106208885, + 10730437371575329832, + 4693848817100050981, + ] + .map(F::from_canonical_u64); + assert_eq!(smt.root.elements, expected_root); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_leaf_one_level_depth() { + let mut smt = Smt::::default(); + + let k0 = Key([ + 15508201873038097485, + 13226964191399612151, + 16289586894263066011, + 5039894867879804772, + ] + .map(F::from_canonical_u64)); + let k1 = Key([ + 844617937539064431, + 8280782215217712600, + 776954566881514913, + 1946423943169448778, + ] + .map(F::from_canonical_u64)); + let k2 = Key([ + 15434611863279822111, + 11975487827769517766, + 15368078704174133449, + 1970673199824226969, + ] + .map(F::from_canonical_u64)); + let k3 = Key([ + 4947646911082557289, + 4015479196169929139, + 8997983193975654297, + 9607383237755583623, + ] + .map(F::from_canonical_u64)); + let k4 = Key([ + 15508201873038097485, + 13226964191399612151, + 16289586894263066011, + 5039894867879804772, + ] + .map(F::from_canonical_u64)); + + let v0 = U256::from_dec_str( + "8163644824788514136399898658176031121905718480550577527648513153802600646339", + ) + .unwrap(); + let v1 = U256::from_dec_str( + "115792089237316195423570985008687907853269984665640564039457584007913129639934", + ) + .unwrap(); + let v2 = U256::from_dec_str( + "115792089237316195423570985008687907853269984665640564039457584007913129639935", + ) + .unwrap(); + let v3 = U256::from_dec_str("7943875943875408").unwrap(); + let v4 = U256::from_dec_str( + "35179347944617143021579132182092200136526168785636368258055676929581544372820", + ) + .unwrap(); + + smt.set(k0, v0); + smt.set(k1, v1); + smt.set(k2, v2); + smt.set(k3, v3); + smt.set(k4, v4); + + let expected_root = [ + 13590506365193044307, + 13215874698458506886, + 4743455437729219665, + 1933616419393621600, + ] + .map(F::from_canonical_u64); + assert_eq!(smt.root.elements, expected_root); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} + +#[test] +fn test_no_write_0() { + let mut smt = Smt::::default(); + + let k1 = Key(F::rand_array()); + let k2 = Key(F::rand_array()); + let v = U256(thread_rng().gen()); + smt.set(k1, v); + let root = smt.root; + smt.set(k2, U256::zero()); + assert_eq!(smt.root, root); + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); +} diff --git a/smt_trie/src/utils.rs b/smt_trie/src/utils.rs new file mode 100644 index 000000000..267b6b8e9 --- /dev/null +++ b/smt_trie/src/utils.rs @@ -0,0 +1,89 @@ +use ethereum_types::U256; +use plonky2::field::types::{Field, PrimeField64}; +use plonky2::hash::poseidon::Poseidon; + +use crate::smt::{HashOut, Key, Node, F}; + +/// Returns `Poseidon(x, [0,0,0,0])`. +pub(crate) fn hash0(x: [F; 8]) -> [F; 4] { + F::poseidon(std::array::from_fn(|i| if i < 8 { x[i] } else { F::ZERO }))[0..4] + .try_into() + .unwrap() +} + +/// Returns `Poseidon(x, [1,0,0,0])`. +pub(crate) fn hash1(x: [F; 8]) -> [F; 4] { + F::poseidon(std::array::from_fn(|i| match i { + j if j < 8 => x[i], + 8 => F::ONE, + _ => F::ZERO, + }))[0..4] + .try_into() + .unwrap() +} + +/// Returns `Poseidon(key || h, [1,0,0,0])`. +pub(crate) fn hash_key_hash(k: Key, h: [F; 4]) -> [F; 4] { + hash1(std::array::from_fn( + |i| if i < 4 { k.0[i] } else { h[i - 4] }, + )) +} + +/// Split a U256 into 8 32-bit limbs in little-endian order. +pub(crate) fn f2limbs(x: U256) -> [F; 8] { + std::array::from_fn(|i| F::from_canonical_u32((x >> (32 * i)).low_u32())) +} + +/// Pack 8 32-bit limbs in little-endian order into a U256. +pub(crate) fn limbs2f(limbs: [F; 8]) -> U256 { + limbs + .into_iter() + .enumerate() + .fold(U256::zero(), |acc, (i, x)| { + acc + (U256::from(x.to_canonical_u64()) << (i * 32)) + }) +} + +/// Convert a `HashOut` to a `U256`. +pub fn hashout2u(h: HashOut) -> U256 { + key2u(Key(h.elements)) +} + +/// Convert a `Key` to a `U256`. +pub fn key2u(key: Key) -> U256 { + U256(key.0.map(|x| x.to_canonical_u64())) +} + +/// Convert a `U256` to a `Hashout`. +pub(crate) fn u2h(x: U256) -> HashOut { + HashOut { + elements: x.0.map(F::from_canonical_u64), + } +} + +/// Convert a `U256` to a `Key`. +pub(crate) fn u2k(x: U256) -> Key { + Key(x.0.map(F::from_canonical_u64)) +} + +/// Given a node, return the index of the unique non-zero sibling, or -1 if +/// there is no such sibling. +pub(crate) fn get_unique_sibling(node: Node) -> isize { + let mut nfound = 0; + let mut fnd = 0; + for i in (0..12).step_by(4) { + if !(node.0[i].is_zero() + && node.0[i + 1].is_zero() + && node.0[i + 2].is_zero() + && node.0[i + 3].is_zero()) + { + nfound += 1; + fnd = i as isize / 4; + } + } + if nfound == 1 { + fnd + } else { + -1 + } +} diff --git a/trace_decoder/Cargo.toml b/trace_decoder/Cargo.toml index 2d4dab80f..447325ea0 100644 --- a/trace_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -27,8 +27,10 @@ serde_with = "3.4.0" thiserror = { workspace = true } # Local dependencies -mpt_trie = { version = "0.2.0", path = "../mpt_trie" } -evm_arithmetization = { version = "0.1.2", path = "../evm_arithmetization" } +# TODO: update decoder to take local versions again +mpt_trie = { git = "https://github.com/0xPolygonZero/zk_evm", branch = "develop" } +evm_arithmetization = { git = "https://github.com/0xPolygonZero/zk_evm", branch = "develop" } [dev-dependencies] pretty_env_logger = "0.5.0" + From 7ee36ed903712221b4ec9e43ee16f49d80a7ac5c Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Thu, 4 Apr 2024 18:55:39 +0900 Subject: [PATCH 02/19] Skip trie loading when being passed default inputs (#141) --- evm_arithmetization/src/generation/mpt.rs | 10 ++++++++++ evm_arithmetization/src/generation/state.rs | 4 ++++ 2 files changed, 14 insertions(+) diff --git a/evm_arithmetization/src/generation/mpt.rs b/evm_arithmetization/src/generation/mpt.rs index 8e1a775e1..89dcc2fc3 100644 --- a/evm_arithmetization/src/generation/mpt.rs +++ b/evm_arithmetization/src/generation/mpt.rs @@ -32,6 +32,16 @@ pub struct TrieRootPtrs { pub receipt_root_ptr: usize, } +impl Default for TrieRootPtrs { + fn default() -> Self { + Self { + state_root_ptr: 2, + txn_root_ptr: 0, + receipt_root_ptr: 0, + } + } +} + impl Default for AccountRlp { fn default() -> Self { Self { diff --git a/evm_arithmetization/src/generation/state.rs b/evm_arithmetization/src/generation/state.rs index 82ab141a6..7ea2fd4d6 100644 --- a/evm_arithmetization/src/generation/state.rs +++ b/evm_arithmetization/src/generation/state.rs @@ -307,6 +307,10 @@ pub(crate) struct GenerationState { impl GenerationState { fn preinitialize_mpts(&mut self, trie_inputs: &TrieInputs) -> TrieRootPtrs { + if trie_inputs.state_smt == TrieInputs::default().state_smt { + return TrieRootPtrs::default(); + } + let (trie_roots_ptrs, trie_data) = load_all_mpts(trie_inputs).expect("Invalid MPT data for preinitialization"); From ca06c9dd48616f6ea67880f2a0a7280db68f03d3 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 4 Apr 2024 14:32:55 +0200 Subject: [PATCH 03/19] Fix SELFDESTRUCT in the type2 using EIP-6780 (#130) * Cherry-pick https://github.com/0xPolygonZero/plonky2/commit/1d8a8416a8146675bbe639157554bcd8c6780620 * Add new segment * Delete slots * Newline * Minor * Remove ignore and rlp alloc --- .../asm/core/create_contract_account.asm | 20 ++- .../cpu/kernel/asm/core/selfdestruct_list.asm | 9 +- .../src/cpu/kernel/asm/core/terminate.asm | 70 ++++++-- .../src/cpu/kernel/asm/core/transfer.asm | 4 +- .../kernel/asm/journal/account_created.asm | 18 +- .../kernel/asm/mpt/storage/storage_write.asm | 38 ++++ .../src/cpu/kernel/asm/smt/delete.asm | 40 ++++- .../cpu/kernel/asm/transactions/type_0.asm | 1 - .../cpu/kernel/constants/global_metadata.rs | 14 +- evm_arithmetization/src/memory/segments.rs | 13 +- evm_arithmetization/tests/selfdestruct.rs | 162 ++++++++++++++++++ 11 files changed, 365 insertions(+), 24 deletions(-) diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm b/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm index 512dd37a0..fb547aea3 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/create_contract_account.asm @@ -23,7 +23,13 @@ %%add_account: // stack: address - DUP1 %journal_add_account_created + DUP1 PUSH 1 + // stack: is_contract, address, address + %journal_add_account_created + // stack: address + DUP1 + %append_created_contracts + // stack: address PUSH 0 %%do_insert: // stack: new_acct_value, address @@ -49,3 +55,15 @@ %%end: // stack: status %endmacro + +%macro append_created_contracts + // stack: address + %mload_global_metadata(@GLOBAL_METADATA_CREATED_CONTRACTS_LEN) + // stack: nb_created_contracts, address + SWAP1 DUP2 + // stack: nb_created_contracts, address, nb_created_contracts + %mstore_kernel(@SEGMENT_CREATED_CONTRACTS) + // stack: nb_created_contracts + %increment + %mstore_global_metadata(@GLOBAL_METADATA_CREATED_CONTRACTS_LEN) +%endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/selfdestruct_list.asm b/evm_arithmetization/src/cpu/kernel/asm/core/selfdestruct_list.asm index 258f79405..13d411a72 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/selfdestruct_list.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/selfdestruct_list.asm @@ -14,7 +14,7 @@ %endmacro /// Remove one occurrence of the address from the list. -/// Panics if the address is not in the list. +/// No effect if the address is not in the list. global remove_selfdestruct_list: // stack: addr, retdest %mload_global_metadata(@GLOBAL_METADATA_SELFDESTRUCT_LIST_LEN) @@ -24,7 +24,7 @@ global remove_selfdestruct_list: remove_selfdestruct_list_loop: // `i` and `len` are both scaled by SEGMENT_SELFDESTRUCT_LIST %stack (i, len, addr, retdest) -> (i, len, i, len, addr, retdest) - EQ %jumpi(panic) + EQ %jumpi(remove_selfdestruct_not_found) // stack: i, len, addr, retdest DUP1 MLOAD_GENERAL // stack: loaded_addr, i, len, addr, retdest @@ -46,6 +46,11 @@ remove_selfdestruct_list_found: MSTORE_GENERAL // Store the last address at the position of the removed address. JUMP +remove_selfdestruct_not_found: + // stack: i, len, addr, retdest + %pop3 + JUMP + global delete_all_selfdestructed_addresses: // stack: retdest %mload_global_metadata(@GLOBAL_METADATA_SELFDESTRUCT_LIST_LEN) diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm b/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm index 5528eb7e4..029755f59 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/terminate.asm @@ -85,24 +85,22 @@ global sys_selfdestruct: %charge_gas %stack (kexit_info, balance, address, recipient) -> (balance, address, recipient, kexit_info) - // Insert address into the selfdestruct set. - // stack: balance, address, recipient, kexit_info - DUP2 %insert_selfdestruct_list - // Set the balance of the address to 0. // stack: balance, address, recipient, kexit_info DUP1 ISZERO %jumpi(selfdestruct_balance_is_zero) DUP2 %key_balance %smt_delete_state // stack: balance, address, recipient, kexit_info selfdestruct_balance_is_zero: - %stack (balance, address, recipient, kexit_info) -> - (address, recipient, address, recipient, balance, kexit_info) + // EIP-6780: insert address into the selfdestruct set only if contract has been created + // during the current transaction. + // stack: balance, address, recipient, kexit_info + DUP2 %contract_just_created + // stack: is_just_created, balance, address, recipient, kexit_info + %jumpi(sys_selfdestruct_just_created) - // If the recipient is the same as the address, then we're done. - // Otherwise, send the balance to the recipient. - // stack: address, recipient, address, recipient, balance, kexit_info - EQ %jumpi(sys_selfdestruct_journal_add) - %stack (address, recipient, balance, kexit_info) -> (recipient, balance, address, recipient, balance, kexit_info) + // Send the balance to the recipient. + %stack (balance, address, recipient, kexit_info) -> + (recipient, balance, address, recipient, balance, kexit_info) %add_eth sys_selfdestruct_journal_add: @@ -115,6 +113,21 @@ sys_selfdestruct_journal_add: PUSH 1 // success %jump(terminate_common) +sys_selfdestruct_just_created: + // Send funds to beneficiary only if the recipient isn't the same as the address. + %stack (balance, address, recipient, kexit_info) -> (address, recipient, balance, address, recipient, balance, kexit_info) + EQ ISZERO + // stack: address ≠ recipient, balance, address, recipient, balance, kexit_info + MUL + // stack: maybe_balance, address, recipient, balance, kexit_info + DUP3 + // stack: recipient, maybe_balance, address, recipient, balance, kexit_info + %add_eth + // stack: address, recipient, balance, kexit_info + DUP1 + %insert_selfdestruct_list + %jump(sys_selfdestruct_journal_add) + global sys_revert: // stack: kexit_info, offset, size %stack (kexit_info, offset, size) -> (offset, size, kexit_info, offset, size) @@ -219,3 +232,38 @@ global terminate_common: // stack: parent_pc, success, leftover_gas JUMP + +// Returns 1 if the address is present in SEGMENT_CREATED_CONTRACTS, meaning that it has been +// created during this transaction. Returns 0 otherwise. +// Pre stack: addr +// Post stack: is_just_created +%macro contract_just_created + // stack: addr + %mload_global_metadata(@GLOBAL_METADATA_CREATED_CONTRACTS_LEN) + // stack: nb_created_contracts, addr + PUSH 0 +%%contract_just_created_loop: + %stack (i, nb_created_contracts, addr) -> (i, nb_created_contracts, i, nb_created_contracts, addr) + EQ %jumpi(%%contract_just_created_false) + // stack: i, nb_created_contracts, addr + DUP1 %mload_kernel(@SEGMENT_CREATED_CONTRACTS) + // stack: addr_created_contract, i, nb_created_contracts, addr + DUP4 + // stack: addr, addr_created_contract, i, nb_created_contracts, addr + EQ %jumpi(%%contract_just_created_true) + // stack: i, nb_created_contracts, addr + %increment + %jump(%%contract_just_created_loop) +%%contract_just_created_true: + // stack: i, nb_created_contracts, addr + %pop3 + PUSH 1 + // stack: 1 + %jump(%%after) +%%contract_just_created_false: + // stack: i, nb_created_contracts, addr + %pop3 + PUSH 0 + // stack: 0 +%%after: +%endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm b/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm index 148d37d1a..247bd3dc3 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/transfer.asm @@ -96,7 +96,9 @@ add_eth_zero_balance: global add_eth_new_account: // stack: addr, amount, retdest - DUP1 %journal_add_account_created + DUP1 PUSH 0 + // stack: is_eoa, addr, amount, retdest + %journal_add_account_created // stack: addr, amount, retdest DUP1 %key_code %stack (key_code) -> (key_code, @EMPTY_STRING_POSEIDON_HASH) diff --git a/evm_arithmetization/src/cpu/kernel/asm/journal/account_created.asm b/evm_arithmetization/src/cpu/kernel/asm/journal/account_created.asm index 4748d5cbc..2fd4c15fa 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/journal/account_created.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/journal/account_created.asm @@ -1,13 +1,23 @@ -// struct AccountCreated { address } +// struct AccountCreated { account_type, address } +// account_type is 0 for an EOA, 1 for a contract. %macro journal_add_account_created - %journal_add_1(@JOURNAL_ENTRY_ACCOUNT_CREATED) + %journal_add_2(@JOURNAL_ENTRY_ACCOUNT_CREATED) %endmacro global revert_account_created: // stack: entry_type, ptr, retdest POP - %journal_load_1 - // stack: address, retdest + %journal_load_2 + // stack: account_type, address, retdest + %jumpi(decrement_created_contracts_len) + +revert_account_finish: %delete_account JUMP + +decrement_created_contracts_len: + %mload_global_metadata(@GLOBAL_METADATA_CREATED_CONTRACTS_LEN) + %decrement + %mstore_global_metadata(@GLOBAL_METADATA_CREATED_CONTRACTS_LEN) + %jump(revert_account_finish) diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm index a46375894..bb2796675 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/storage/storage_write.asm @@ -103,6 +103,13 @@ sstore_after_refund: %stack (kexit_info, current_value, slot, value) -> (value, current_value, current_value, slot, value, kexit_info) EQ %jumpi(sstore_noop) + // stack: current_value, slot, value, kexit_info + DUP1 ISZERO + // stack: current_value==0, current_value, slot, value, kexit_info + DUP4 MUL + // stack: value & current_value==0, current_value, slot, value, kexit_info + %jumpi(new_storage_slot) +global not_new_storage_slot: // stack: current_value, slot, value, kexit_info DUP2 %address %journal_add_storage_change // stack: slot, value, kexit_info @@ -135,3 +142,34 @@ sstore_delete: %smt_delete_state // stack: value, kexit_info POP EXIT_KERNEL + +%macro insert_new_storage_slot + // stack: address, slot + %mload_global_metadata(@GLOBAL_METADATA_NEW_STORAGE_SLOTS_LEN) + // stack: list_len, address, slot + DUP1 %add_const(@SEGMENT_NEW_STORAGE_SLOTS) + // stack: index, list_len, address, slot + DUP1 %add_const(1) + %stack (index_plus_1, index, list_len, address, slot) -> (address, index, slot, index_plus_1, list_len) + MSTORE_GENERAL MSTORE_GENERAL + // stack: list_len + %add_const(2) + // stack: list_len+2 + %mstore_global_metadata(@GLOBAL_METADATA_NEW_STORAGE_SLOTS_LEN) + // stack: (empty) +%endmacro + +new_storage_slot: + // stack: current_value, slot, value, kexit_info + %address DUP1 %contract_just_created + // stack: contract_just_created, address, current_value, slot, value, kexit_info + %jumpi(new_storage_slot_new_contract) + // stack: address, current_value, slot, value, kexit_info + POP %jump(not_new_storage_slot) +new_storage_slot_new_contract: + // stack: address, current_value, slot, value, kexit_info + DUP3 SWAP1 + // stack: address, slot, current_value, slot, value, kexit_info + %insert_new_storage_slot + // stack: current_value, slot, value, kexit_info + %jump(not_new_storage_slot) diff --git a/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm b/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm index 3f131704a..701bd4193 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/smt/delete.asm @@ -245,7 +245,45 @@ zero_code: zero_code_length: // N.B.: We don't delete the storage, since there's no way of knowing keys used. // stack: key_code_length, address, retdest - %pop2 JUMP + POP + // stack: address, retdest + %mload_global_metadata(@GLOBAL_METADATA_NEW_STORAGE_SLOTS_LEN) + // stack: slots_len, address, retdest + PUSH 0 + // stack: i, slots_len, address, retdest +delete_storage_slots_loop: + // stack: i, slots_len, address, retdest + DUP2 DUP2 EQ %jumpi(delete_storage_slots_loop_end) + // stack: i, slots_len, address, retdest + DUP1 %add_const(@SEGMENT_NEW_STORAGE_SLOTS) + // stack: addr_index, i, slots_len, address, retdest + MLOAD_GENERAL + // stack: slot_addr, i, slots_len, address, retdest + DUP4 EQ + // stack: address==slot_addr, i, slots_len, address, retdest + %jumpi(delete_storage_slot) + // stack: i, slots_len, address, retdest + %add_const(2) %jump(delete_storage_slots_loop) +delete_storage_slot: + // stack: i, slots_len, address, retdest + DUP1 %increment %add_const(@SEGMENT_NEW_STORAGE_SLOTS) + // stack: slot_index, i, slots_len, address, retdest + MLOAD_GENERAL + // stack: slot, i, slots_len, address, retdest + DUP4 %key_storage + // stack: key_storage, i, slots_len, address, retdest + DUP1 %smt_read_state ISZERO %jumpi(zero_slot) + // stack: key_storage, i, slots_len, address, retdest + DUP1 %smt_delete_state +zero_slot: + // stack: key_storage, i, slots_len, address, retdest + POP + // stack: i, slots_len, address, retdest + %add_const(2) %jump(delete_storage_slots_loop) + +delete_storage_slots_loop_end: + // stack: i, slots_len, address, retdest + %pop3 JUMP %macro delete_account %stack (address) -> (address, %%after) diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm index 6eaf019c1..12d105b8a 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm @@ -84,7 +84,6 @@ type_0_compute_signed_data: // otherwise, it is // keccak256(rlp([nonce, gas_price, gas_limit, to, value, data])) - %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_NONCE) diff --git a/evm_arithmetization/src/cpu/kernel/constants/global_metadata.rs b/evm_arithmetization/src/cpu/kernel/constants/global_metadata.rs index 4baa18f48..9e8bb0f7e 100644 --- a/evm_arithmetization/src/cpu/kernel/constants/global_metadata.rs +++ b/evm_arithmetization/src/cpu/kernel/constants/global_metadata.rs @@ -85,7 +85,7 @@ pub(crate) enum GlobalMetadata { ContractCreation, IsPrecompileFromEoa, CallStackDepth, - /// Transaction logs list length + /// Transaction logs list length. LogsLen, LogsDataLen, LogsPayloadLen, @@ -94,10 +94,16 @@ pub(crate) enum GlobalMetadata { KernelHash, KernelLen, + + /// Number of created contracts during the current transaction. + CreatedContractsLen, + + /// Number of used storage slots in newly created contracts. + NewStorageSlotsLen, } impl GlobalMetadata { - pub(crate) const COUNT: usize = 47; + pub(crate) const COUNT: usize = 49; /// Unscales this virtual offset by their respective `Segment` value. pub(crate) const fn unscale(&self) -> usize { @@ -153,6 +159,8 @@ impl GlobalMetadata { Self::TxnNumberAfter, Self::KernelHash, Self::KernelLen, + Self::CreatedContractsLen, + Self::NewStorageSlotsLen, ] } @@ -206,6 +214,8 @@ impl GlobalMetadata { Self::TxnNumberAfter => "GLOBAL_METADATA_TXN_NUMBER_AFTER", Self::KernelHash => "GLOBAL_METADATA_KERNEL_HASH", Self::KernelLen => "GLOBAL_METADATA_KERNEL_LEN", + Self::CreatedContractsLen => "GLOBAL_METADATA_CREATED_CONTRACTS_LEN", + Self::NewStorageSlotsLen => "GLOBAL_METADATA_NEW_STORAGE_SLOTS_LEN", } } } diff --git a/evm_arithmetization/src/memory/segments.rs b/evm_arithmetization/src/memory/segments.rs index 407fa0977..555022a09 100644 --- a/evm_arithmetization/src/memory/segments.rs +++ b/evm_arithmetization/src/memory/segments.rs @@ -73,10 +73,15 @@ pub(crate) enum Segment { ContextCheckpoints = 32 << SEGMENT_SCALING_FACTOR, /// List of 256 previous block hashes. BlockHashes = 33 << SEGMENT_SCALING_FACTOR, + /// List of contracts which have been created during the current + /// transaction. + CreatedContracts = 34 << SEGMENT_SCALING_FACTOR, + /// List of used storage slots in newly created contracts. + NewStorageSlots = 35 << SEGMENT_SCALING_FACTOR, } impl Segment { - pub(crate) const COUNT: usize = 34; + pub(crate) const COUNT: usize = 36; /// Unscales this segment by `SEGMENT_SCALING_FACTOR`. pub(crate) const fn unscale(&self) -> usize { @@ -119,6 +124,8 @@ impl Segment { Self::TouchedAddresses, Self::ContextCheckpoints, Self::BlockHashes, + Self::CreatedContracts, + Self::NewStorageSlots, ] } @@ -159,6 +166,8 @@ impl Segment { Segment::TouchedAddresses => "SEGMENT_TOUCHED_ADDRESSES", Segment::ContextCheckpoints => "SEGMENT_CONTEXT_CHECKPOINTS", Segment::BlockHashes => "SEGMENT_BLOCK_HASHES", + Segment::CreatedContracts => "SEGMENT_CREATED_CONTRACTS", + Segment::NewStorageSlots => "SEGMENT_NEW_STORAGE_SLOTS", } } @@ -198,6 +207,8 @@ impl Segment { Segment::TouchedAddresses => 256, Segment::ContextCheckpoints => 256, Segment::BlockHashes => 256, + Segment::CreatedContracts => 256, + Segment::NewStorageSlots => 256, } } } diff --git a/evm_arithmetization/tests/selfdestruct.rs b/evm_arithmetization/tests/selfdestruct.rs index 1d29b7fc8..924afea54 100644 --- a/evm_arithmetization/tests/selfdestruct.rs +++ b/evm_arithmetization/tests/selfdestruct.rs @@ -103,12 +103,17 @@ fn test_selfdestruct() -> anyhow::Result<()> { balance: eth_to_wei(110_000.into()) - 26_002 * 0xa, ..Default::default() }; + let to_account_after = AccountRlp { + balance: U256::zero(), + ..to_account_before + }; set_account( &mut smt, H160(sender), &sender_account_after, &HashMap::new(), ); + set_account(&mut smt, H160(to), &to_account_after, &HashMap::new()); smt }; @@ -158,6 +163,163 @@ fn test_selfdestruct() -> anyhow::Result<()> { verify_proof(&all_stark, proof, &config) } +#[test] +fn test_selfdestruct_with_storage() -> anyhow::Result<()> { + init_logger(); + + let all_stark = AllStark::::default(); + let config = StarkConfig::standard_fast_config(); + + let beneficiary = hex!("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"); + let sender = hex!("5eb96AA102a29fAB267E12A40a5bc6E9aC088759"); + let to = hex!("a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0"); + + let sender_account_before = AccountRlp { + nonce: 5.into(), + balance: eth_to_wei(100_000.into()), + ..Default::default() + }; + let initcode = vec![ + 0x42, // TIMESTAMP + 0x60, 0x01, // PUSH1 1 + 0x80, // DUP1 + 0x55, // SSTORE + 0x32, // ORIGIN + 0xFF, // SELFDESTRUCT + ]; + let code = [ + vec![ + 0x66, // PUSH7 + ], + initcode, + vec![ + 0x5F, // PUSH0 + 0x52, // MSTORE + 0x60, 0x07, // PUSH1 7 + 0x60, 0x19, // PUSH1 25 + 0x5F, // PUSH0 + 0xF0, // CREATE + ], + ] + .concat(); + let to_account_before = AccountRlp { + nonce: 12.into(), + balance: eth_to_wei(10_000.into()), + code_hash: hash_bytecode_u256(code.clone()), + ..Default::default() + }; + + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); + + let tries_before = TrieInputs { + state_smt: state_smt_before.serialize(), + transactions_trie: HashedPartialTrie::from(Node::Empty), + receipts_trie: HashedPartialTrie::from(Node::Empty), + }; + + // Generated using a little py-evm script. + let txn = hex!("f868050a831e848094a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0880de0b6b3a76400008025a09bab8db7d72e4b42cba8b117883e16872966bae8e4570582de6ed0065e8c36a1a01256d44d982c75e0ab7a19f61ab78afa9e089d51c8686fdfbee085a5ed5d8ff8"); + + let block_metadata = BlockMetadata { + block_beneficiary: Address::from(beneficiary), + block_timestamp: 0x03e8.into(), + block_number: 1.into(), + block_difficulty: 0x020000.into(), + block_random: H256::from_uint(&0x020000.into()), + block_gaslimit: 0xff112233u32.into(), + block_chain_id: 1.into(), + block_base_fee: 0xa.into(), + block_gas_used: 80131.into(), + block_bloom: [0.into(); 8], + }; + + let contract_code = [ + (hash_bytecode_u256(code.clone()), code), + (hash_bytecode_u256(vec![]), vec![]), + ] + .into(); + + let value = eth_to_wei(1.into()); + let expected_state_smt_after = { + let mut smt = Smt::::default(); + let sender_account_after = AccountRlp { + nonce: sender_account_before.nonce + 1, + balance: sender_account_before.balance - 80131 * 0xa - value, + ..sender_account_before + }; + let to_account_after = AccountRlp { + nonce: to_account_before.nonce + 1, + balance: to_account_before.balance + value, + ..to_account_before + }; + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to), &to_account_after, &HashMap::new()); + smt + }; + + let receipt_0 = LegacyReceiptRlp { + status: true, + cum_gas_used: 80131.into(), + bloom: vec![0; 256].into(), + logs: vec![], + }; + let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + receipts_trie.insert( + Nibbles::from_str("0x80").unwrap(), + rlp::encode(&receipt_0).to_vec(), + )?; + let transactions_trie: HashedPartialTrie = Node::Leaf { + nibbles: Nibbles::from_str("0x80").unwrap(), + value: txn.to_vec(), + } + .into(); + + let trie_roots_after = TrieRoots { + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), + transactions_root: transactions_trie.hash(), + receipts_root: receipts_trie.hash(), + }; + let inputs = GenerationInputs { + signed_txn: Some(txn.to_vec()), + withdrawals: vec![], + tries: tries_before, + trie_roots_after, + contract_code, + checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + block_metadata, + txn_number_before: 0.into(), + gas_used_before: 0.into(), + gas_used_after: 80131.into(), + block_hashes: BlockHashes { + prev_hashes: vec![H256::default(); 256], + cur_hash: H256::default(), + }, + }; + + let mut timing = TimingTree::new("prove", log::Level::Debug); + let proof = prove::(&all_stark, &config, inputs, &mut timing, None)?; + timing.filter(Duration::from_millis(100)).print(); + + verify_proof(&all_stark, proof, &config) +} + fn eth_to_wei(eth: U256) -> U256 { // 1 ether = 10^18 wei. eth * U256::from(10).pow(18.into()) From 3384aeba547841207282fee6981cf14409226fda Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 4 Apr 2024 16:27:06 +0200 Subject: [PATCH 04/19] Add rlp alloc in type0 (#142) --- evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm | 1 + 1 file changed, 1 insertion(+) diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm index 12d105b8a..6eaf019c1 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm @@ -84,6 +84,7 @@ type_0_compute_signed_data: // otherwise, it is // keccak256(rlp([nonce, gas_price, gas_limit, to, value, data])) + %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_NONCE) From 58219f4f9234a2aca47d4074794ed08d59b96b47 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Fri, 5 Apr 2024 15:14:19 +0900 Subject: [PATCH 05/19] Fix data overwrite in RLP segment (#145) --- evm_arithmetization/src/cpu/kernel/asm/main.asm | 4 ++++ .../src/cpu/kernel/asm/transactions/type_0.asm | 1 - .../src/cpu/kernel/asm/transactions/type_1.asm | 1 - .../src/cpu/kernel/asm/transactions/type_2.asm | 1 - 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/evm_arithmetization/src/cpu/kernel/asm/main.asm b/evm_arithmetization/src/cpu/kernel/asm/main.asm index 45e573856..810e799ba 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/main.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/main.asm @@ -17,7 +17,11 @@ global main: // Initialize the RLP DATA pointer to its initial position, // skipping over the preinitialized empty node. + // Because hashing with the SMT doesn't require RLP encoding, + // we shift the initial pointer by MAX_RLP_BLOB_SIZE to not + // overwrite any transaction field. PUSH @INITIAL_TXN_RLP_ADDR + %add_const(@MAX_RLP_BLOB_SIZE) %mstore_global_metadata(@GLOBAL_METADATA_RLP_DATA_SIZE) // Encode constant nodes diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm index 6eaf019c1..12d105b8a 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_0.asm @@ -84,7 +84,6 @@ type_0_compute_signed_data: // otherwise, it is // keccak256(rlp([nonce, gas_price, gas_limit, to, value, data])) - %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_NONCE) diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm index ecbe37333..f8a7a556e 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_1.asm @@ -38,7 +38,6 @@ global process_type_1_txn: // The signatureYParity, signatureR, signatureS elements of this transaction represent a secp256k1 signature // over keccak256(0x01 || rlp([chainId, nonce, gasPrice, gasLimit, to, value, data, accessList])). type_1_compute_signed_data: - %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_CHAIN_ID) diff --git a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm index e1b6bfcd5..41bdfd4ed 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/transactions/type_2.asm @@ -41,7 +41,6 @@ global process_type_2_txn: // The signature_y_parity, signature_r, signature_s elements of this transaction represent a secp256k1 signature over // keccak256(0x02 || rlp([chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, destination, amount, data, access_list])) type_2_compute_signed_data: - %alloc_rlp_block POP // Doesn't work otherwise. TODO: Figure out why. %alloc_rlp_block // stack: rlp_addr_start, retdest %mload_txn_field(@TXN_FIELD_CHAIN_ID) From afdf9ade0f759959ab84987f483925291ec60ca3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alonso=20Gonz=C3=A1lez?= Date: Mon, 8 Apr 2024 11:08:13 +0200 Subject: [PATCH 06/19] Add `POSEIDON_GENERAL` (#129) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Working with dummy hash * Use keccak * Add account * Clippy * Support storage trie * Use remaining key and clean * Write pointer to leaf data in serialization * Minor * Comments * Cleaning * Cleaning * readme * Fix plonky2 dep * Fix dep * minor * Remove dbg * Squashed 'evm_arithmetization/' changes from d547f8d5..5ec32fcb 5ec32fcb FMT c1728d4e Merge remote-tracking branch 'mir/main' into tmp-type2/hermez 01bb7974 Add Poseidon constraints e695ae18 Tests passing 207c54df Poseidon STARK 02d6435f Poseidon files 710225c9 Simulate jumpdest data with the interpreter (#1489) 3ec1bfdd Update `starky` and leverage it as dependency for `plonky2_evm` (#1503) f0ffb811 Fix tests 9f68d71d Fix tests 5a385392 Fix interpreter b6fec06c Fix nightly build (ahash issue) (#1524) b600142c Cleanup `alloc` / `std` imports for plonky2 (#1518) 6b39fc90 Remove risk of panics in interpreter (#1519) d4213223 Fix dep 756d3a27 Clippy e46112e3 Cleaning 061e70ce All tests passing d2598bde Revert "Remove StarkProofWithMetadata (#1497)" (#1502) 4ea4807c Passing erc20 test 1a08e783 Fix no-std tests and add corresponding jobs in the CI (#1501) 06444eaa Switch permutation argument for logUp in `starky` (#1496) fb8452de Merge pull request #1500 from topos-protocol/ci 246c2b62 Fix workflow 8f919133 Fix nightly version 212f29cf Add missing constraints mentioned by auditors (#1499) af0259c5 Remove StarkProofWithMetadata (#1497) f3f7433c Some cleanup (#1498) 63579636 Reorganize lookup / ctl modules (#1495) e502a0df Make CTLs more generic (#1493) 1dc22b76 Fix BaseSumGenerator and BaseSplitGenerator Ids (#1494) f76245e2 Cleanup imports (#1492) a9060e61 Add LA audit report 36e62a13 Use usize::BITS and wrapping_shr in reverse_index_bits_in_place_small (#1478) 14bb5bdb Use web_time crate instead of std::time (#1481) eff7cc0f Fix circuit sizes (#1484) ca2e56e2 Fix bugs in jumpdest analysis (#1474) c0700065 Improve `BIGNUM` operations (#1482) acc59c35 Speed-up `bn254` pairing operation (#1476) ae0907ff Merge pull request #1462 from 0xPolygonZero/pg/evm-licensing 8cb80e09 Improve `blake2f` call (#1477) 319fc6a2 Improve SHA2 precompile (#1480) 8e1969db Fix interpreter jumps (#1471) b8a16b39 Fix typos (#1479) 265d46a9 chore: update ci workflow (#1475) 39a2d62d Fix touched_addresses removal (#1473) 10fc9660 Remove some more CPU cycles (#1472) 990eb34d Remove some CPU cycles (#1469) 30b47998 Fix simulation for jumpdest analysis (#1467) 5c1ec524 proofreading (#1466) c4319dce fix: make add_generators public (#1463) bb48cabd Add math rendering with Katex (#1459) 70483050 Fix fill_gaps (#1465) c2a73ad8 Fix clippy (#1464) 219365d6 Packed rlp prover inputs (#1460) ccd4ff87 Update Cargo.toml de2709d8 Update README.md 7f5fae84 Add files via upload b119e96f Merge pull request #1461 from 0xPolygonZero/eth_trie_utils_bug_fix ab05d181 Bumped `eth_trie_utils` f80ebe77 Remove full memory channel (#1450) a78a29a6 Merge pull request #1423 from topos-protocol/jumpdest_nd b05e84dd Interpreter GenerationInputs (#1455) 3b1ed824 Merge pull request #1447 from topos-protocol/plonky2_doc cda30847 Apply review ac9f704f Fix comment f9c3ad66 Update empty_txn_list fdedf3e3 Merge remote-tracking branch 'public/main' into jumpdest_nd bead1d60 Adress review comments 233ddd4e Constrain syscall/exceptions filter to be boolean (#1458) 6ef0a3c7 Apply suggestions from code review 606732a8 Free up some CPU cycles (#1457) 99a1eb5c Missing review comments ae4a720a Address comments 92aaa404 Apply suggestions from code review 0bf9cd2f Use current context in ecrecover (#1456) c329b368 chore(evm,field,plonky2):fix typos (#1454) 85524cfa Intra doc link 33def084 Merge branch 'main' into plonky2_doc aedfe5df Implement CTL bundling (#1439) c8430dac Add Boolean constraints for `ArithmeticStark` (#1453) 54a13135 Improve some calls to `%mstore_rlp` (#1452) 1715573c Fix problems after address bundling 95c83add Merge pull request #1399 from topos-protocol/refactor_encode_funcs 5b71eb4e Address review comments 3c699be7 Merge remote-tracking branch 'public/main' into refactor_encode_funcs 22e267c3 Address bundling bd02117c Fix `after_mpt_delete_extension_branch` (#1449) f4be34dc Some more dcbfef6d chore: fix typos (#1451) 82804e42 Add some more + module doc 80917cbe Merge remote-tracking branch 'public/main' into refactor_encode_funcs 7fc6b86d Minor 77f51095 Adress reviewer comments cb19f219 Add crate-level documentation (#1444) 47b42856 Remove unused macro 2dacbfe2 Address bundling (#1426) 3e61f06a Remove gas check in sys_stop (#1448) ed2e1bc7 Add comment 3c8b150f Rustdoc 20db596e Add some more explicit doc on plonky2 crate 18a14bf2 Remove assertion 897ba585 Remove assertion in packed verif 1c994737 Address comments f4713c44 Apply suggestions from code review 247d655b Minor 8f1efa15 Fix minor error ab4508fc Add packed verification f46cf4ef Prevent some lints from being allowed (#1443) 6cf4df7d Add initial constraint z polynomial (#1440) ef07eabf Pacify latest clippy (#1442) 1a95f7aa Clippy 9c573a07 Restore simple_transfer and Clippy a85f9872 Fix bug in jumpdest proof generation and check that jumpdest addr < code_len 48b9769e Remove duplicated label a8340496 Rebase to main ae3003a9 Add alternative method to prove txs without pre-loaded table circuits (#1438) c3d707c1 Constrain partial_channel (#1436) dfcf276d Refactor encode_empty_node and encode_branch_node cb3f91a0 add Debug trait to PartitionWitness to enable trace information output (#1437) 24ae0d9d Clippy 3e78865d Remove aborts for invalid jumps and Rebase 0ae56db0 Reabse to main 11d668f5 Remove aborts for invalid jumps 4e569484 Improve proof generation c4025063 Clippy aaa38b33 Fix fmt 08982498 Remove U256::as_u8 in comment 5acabad7 Eliminate nested simulations 0bec6278 Apply suggestions from code review ed260980 Fix jumpdest analisys test ff3dc2e5 Refactor run_next_jumpdest_table_proof 9e39d88a Rebase to main 6ababc96 Remove aborts for invalid jumps 7eff4e27 Constrain first offset of a segment (#1397) 829ae64f Improve proof generation a291d92c Merge pull request #1392 from 0xPolygonZero/dp-from_values-take-ref 7cb04884 Minor cleanup (#1435) 096c7456 Constrain new top to loaded value in MLOAD_GENERAL (#1434) 18e08f4f Filter range checks (#1433) f67ee258 Add exceptions handling to the interpreter (#1393) 536cd1c8 Regenerate tries upon Kernel failure during `hash_final_tries` (#1424) ee91b67c Merge pull request #1432 from 0xPolygonZero/discord_badge 0b56ab75 Added a Discord badge to `README.md` f8f6b07a Change context to current context for BN precompiles (#1428) 68b9f0ad Add ERC721 test (#1425) a64311cf Add aborting signal (#1429) 77f1cd34 Clippy 5a0c1ad8 Fix fmt ad8c2df8 Remove U256::as_u8 in comment 81f13f3f Eliminate nested simulations fdd7ee46 fix: make `from_noncanonical_biguint` work for zero (#1427) 2c5347c4 Apply suggestions from code review 71dff6e9 Constrain MSTORE_32BYTES new offset limbs (#1415) 746e1344 Fix jumpdest analisys test f76ab777 Refactor run_next_jumpdest_table_proof 3e8ad086 Rebase to main bc1a3c48 Merge `push` and `prover_input` flags (#1417) 51ff8c5b Merge pull request #1420 from dzizazda/main 837434cf Fix a minor typo in evm/spec/cpulogic.tex 4e4e61c2 typo fix 00ed16fc minor typo fix 724437d0 typo fix 942e43ab typo fix bfcfcdb4 Add `Checkpoint` heights (#1418) 5607faf3 Check that limbs after the length are 0 (#1419) 3195c205 Merge MSTORE_32BYTES and MLOAD_32BYTES columns (#1414) 56e83956 Merge pull request #1416 from AdventureSeeker987/main 43ecf1df chore: fix some comment typos 7efd147e Use mstore_32bytes to optimize decode_int_given_len (#1413) edfc86c3 Remove is_keccak_sponge (#1410) 170ce5f2 Preinitialize all code segments (#1409) a90aa40b Implement MPT preinitialization (#1406) 4ba7718e Optimize asserts (#1411) 47e24306 Remove GenerationOutputs (#1408) 46b6aa10 Implement degree 2 filters (#1404) 2d36559d Make some functions const (#1407) 7ac6bf2c Implement `PublicValues` retrieval from public inputs (#1405) 6c3e3c0e Use logUp for CTLs (#1398) cb2a22a5 Update stack op cost (#1402) d28ba240 Pacify clippy (#1403) d682769b Fix set_context constraints (#1401) 2d0df393 Merge pull request #1391 from succinctlabs/chris/recursion 32d00967 Fix kernel codehash discrepancy (#1400) 5572da30 Remove intermediary block bloom filters (#1395) 30c944f7 Remove bootstrapping (#1390) e68195fc chore: Remove TODOs about `from_values` taking a reference 37918ccc Revert "chore: from_values takes ref" 471ff68d Optimize `num_bytes` and `hex_prefix_rlp` (#1384) 7cc123e0 chore: from_values takes ref b598e6ff VerifierCircuitData Clone,PartialEq,Eq 64cc1000 Move stack_len_bounds_aux to general columns (#1360) 96f3faf2 Changes in interpreter and implement interpreter version for add11 (#1359) 6dd2e313 Add upgradeability to `AllRecursiveCircuits` and output verifier data (#1387) 3440ba94 Remove extra rows in BytePackingStark (#1388) 2a6065b4 comment ab70bc53 Fix run_syscall in interpreter. (#1351) 2039e18f Fix genesis block number in `prove_block` (#1382) acd3b1ad Merge pull request #1383 from topos-protocol/mpt_specs 110a8eff Update evm/spec/mpts.tex ee450d6d Remove redundant sect about MPT 36e31c53 Address comment 08e0afe4 Fix typo in evm/spec/mpts.tex c7951fcc Update evm/spec/mpts.tex 12b522cb Update evm/spec/mpts.tex 98bed16a Update evm/spec/mpts.tex 0c0818c7 Update evm/spec/mpts.tex c6178a45 Update evm/spec/mpts.tex a3b5e13e Update evm/spec/mpts.tex 3af316f3 Add MPT specs d4b05f37 Add specs for stack handling (#1381) a7364586 Backporting gas handling to the specs (#1379) 2d5a84a1 Add specs for the CPU table (#1375) bec1073c Explain difference between simple opcodes and syscalls (#1378) 48e2b24b Add range check constraints for the looked table (#1380) 8d473168 Add specs for BytePackingStark (#1373) fe311c7f Check is_kernel_mode when halting (#1369) 98b5e5be Initialize blockhashes (#1370) 06933b1d Starting the specs for the CPU logic (#1377) 1f65a8a8 Add exceptions to specs (#1372) 0c4d9a8e CTL and range-check documentation (#1368) f1be8409 Update README.md (#1371) 7a50292a Update README.md f2b75fa5 Update README.md edeb2c76 Merge pull request #1367 from 0xPolygonZero/avm-readme 29762c85 Update README.md d98305b3 Create README.md 398b15c1 wip 79c6de14 Update Keccak-f specs. (#1365) b44fc0d6 Add specs for KeccakSponge (#1366) b9328815 Reduce visibility (#1364) ddecf8bd Update specs for Logic and Arithmetic Tables (#1363) f879d925 Add doc for privileged instructions (#1355) 2c951de4 Update Memory in specs (#1362) 24aa9668 Revert "Make gas fit in 2 limbs (#1261)" (#1361) 40d3c6dd Merge pull request #1294 from matthiasgoergens/matthias/make-clippy-happy eda7fd65 Constrain is_keccak_sponge (#1357) 4dc42c51 Merge public values inside prove_aggreg (#1358) 0e63e661 Implement out of gas exception (#1328) 01f229a8 Add push constraints (#1352) a0876d73 Refactor JUMPDEST analysis (#1347) 6f2b3349 Fix parsing of non-legacy receipts (#1356) f7d5e7c4 Fix MSTORE_32BYTES in interpreter (#1354) 6d751b13 Remove values of last memory channel (#1291) 75ae0eb5 Merge pull request #1346 from 0xPolygonZero/dp-unwrap-hunting 88fcc329 Reduce visibility for a bunch of structs and methods in EVM crate (#1289) 5800e6ad Add run_syscall and tests for sload and sstore (#1344) cc0cdd07 Remove unnecessary code duplication (#1349) 3810bd1a Charge gas for native instructions in interpreter (#1348) 5c41dc4d Range-check keccak sponge inputs to bytes (#1342) d2b5882a Root out some unwraps ec41b754 Fix ranges in AllRecursiveCircuits initialization for log_opcode aggregation test (#1345) 01bbf1a0 Constrain clock (#1343) 954d1a77 Remove logic for multiple txns at once (#1341) 5d5628b5 Move empty_check inside final iteration 605932d1 restore `no-std` support (#1335) e41435e9 Add memory checks for prover_input, as well as range_checks for prover_input, syscalls/exceptions (#1168) fa93454c Add withdrawals (#1322) 19178072 Remove `len` column in `KeccakSpongeStark` (#1334) 099994ab Add test for ERC20 transfer (#1331) d8f6de25 Merge pull request #1329 from shuoer86/main d941539b Fix typos in comments a0ea26f4 Fix typos in comments d2b549af Constrain uninitialized memory to 0 (#1318) f71f227d Add test for selfdestruct (#1321) 1e8ed78f Merge pull request #1320 from topos-protocol/fix-combine-keccak-jumpdest 85b38bec Fix merging of jumpdest and keccak_general. 41362075 Combine JUMPDEST and KECCAK_GENERAL flags. (#1259) 20501d9b Add context constraints (#1260) 3ca16620 Merge pull request #1317 from topos-protocol/more_memcpy_bytes 6332900d Combine PUSH0 and PC flags. (#1256) 0d97b93a Add some documentation in EVM crate (#1295) afd357f4 More of memcpy_bytes 0f299d4c Merge pull request #1316 from topos-protocol/memcpy_tiny af4935cd Merge NOT and POP flags. (#1257) 4b40bc03 Remerge context flags (#1292) c96a8700 Move empty check inside final iteration 0258ad4a Merge pull request #1314 from topos-protocol/refactor_wcopy 6f52b76d Review 7447959f Fix kexit_info in test 666a155d Remove new_stack_top_channel from StackBehavior (#1296) 15a9e992 Fix test on interpreter side 75fdd342 Fix calldatacopy ade5b8c3 Fix a9e47afc Refactor codecopy a1b178f6 Refactor memcpy 3feb04d2 Refactor wcopy syscalls 3aeec83a Add missing constraints for DUP/SWAP (#1310) 44af80f2 Merge pull request #1309 from topos-protocol/fix_padding_in_wcopy c1c1ab6d Fix wcopy and extcodecopy for ranges over code limit ed5ec3ca Merge pull request #1307 from topos-protocol/fmt b31f7061 Apply rustfmt with latest nightly b212fff7 Merge pull request #1306 from topos-protocol/fix_empty_last_chunk 8326db60 refactor: remove usage of unstable `generic_const_exprs` in starky (#1300) 60811d08 Also for memset 83054b0f Handle empty case for memcpy_bytes 8af189b9 Merge pull request #1305 from topos-protocol/memset 385ab3c6 Remove redundant d185d30e Speed-up memset and fix it to write 0 values 0300a322 Merge pull request #1304 from topos-protocol/memcpy_bytes 4140eb54 Fix 503a31b6 Reviews e2b66206 Merge pull request #1302 from topos-protocol/remove_kernel_memory_zeroing 9607a41b Merge pull request #1303 from topos-protocol/amortize_receipt_reset 595dfa6b Merge pull request #1301 from topos-protocol/submod_kernel ba61d15c Add macro for copying sequences of bytes 6d2586ef Amortize bloom reset 40de5059 Remove outdated code 07ffe4f5 Combine stack macros for fewer operations 6ca9970a Alter stack to remove SWAPs for SUBMOD 29005dc5 Use SUBMOD in Kernel 05006deb Pad according to RATE rather than WIDTH (#1299) 49976ea2 Check gas in sys_stop (#1297) 817e3e78 Combine DUP and SWAP (#1254) 29fdd3e3 minor: use explicit builder.assert_zero for readability (#1293) c9391be0 Update check_ctls with extra looking values (#1290) f9242702 Make clippy happy d89966b0 Merge pull request #1288 from 0xPolygonZero/git_dep_org_update a02a2ecb Updated `mir-protocol` --> `0xPolygonZero` 1d604319 Store top of the stack in memory channel 0 (#1215) 762e6f07 Fix hash node case in `mpt_delete_branch` (#1278) d7990ee1 Add journal entry for logs (#1286) 9fd0425f Fix journal order in `sys_selfdestruct` (#1287) 49ca63ee Fix sys_blockhash (#1285) 2aeecc3d Fix failed receipt. (#1284) b4203c3d Make sure success is 0 in contract failure (#1283) 41a29f06 Remove some dead_code in EVM crate (#1281) 8a5eed9d Fix shift constraint (#1280) e58d7795 Remove reg_preimage columns in KeccakStark (#1279) 0de6f949 Remove extra SHL/SHR CTL. (#1270) 51eb7c0b Merge pull request #1276 from topos-protocol/fix_empty_to_encoding 571dc14f Fix encoding for empty recipient 3ac0c4ae Fix genesis state trie root when calling `prove_root` (#1271) cd36e96c Derive clone for txn RLP structs (#1264) 0f19cd0d Make gas fit in 2 limbs (#1261) 4e2cba56 Merge pull request #1262 from topos-protocol/fix_rc_doc 8afd06cf Fix description of Range-Check columns in STARK modules 30005957 Optimize lookup builder (#1258) 1ff6d4a2 Merge pull request #1235 from topos-protocol/new-logup f49fbc8e Transactions trie support (#1232) acc659da Add type 1 and 2 txn for RLP encoding support (#1255) 916ce0dd Merge pull request #1228 from topos-protocol/constrain-genesis-state 5694af79 Merge remote-tracking branch 'mir-plonky2/main' into constrain-genesis-state 75c0e47a Apply comments. 03a95581 Handle additional panics (#1250) 72241ca7 Connect block_gas_used (#1253) a24cd4f3 Merge pull request #1251 from topos-protocol/fix-observe-challenges 043d12c2 Fix observe_block_metadata 8c78271f Add `random` value to block metadata and fix `sys_prevrandao` (#1207) bbc6fe76 Merge branch 'main' into 'new-logup' 3983969c Use function for genesis block connection. 9d0101d6 Merge branch 'main' into 'constrain-genesis-state' 0abc3b92 Apply comments (#1248) d6be2b98 Remove `generic_const_exprs` feature from EVM crate (#1246) 70d6dd97 Merge branch 'main' into new-logup f438d45f Merge branch 'main' into 'new-logup'. 6618cfad Remove SEQUENCE_LEN in BytePackingStark (#1241) 1b7207ee Merge pull request #1244 from topos-protocol/block_metadata_doc 459e9b3d Merge pull request #1245 from topos-protocol/indexing_tables d8874c83 Update ranges indices c468465c Merge pull request #1243 from tamirhemo/main edd3f383 Add some doc for BlockMetadata / ExtraBlockData 8a19f436 Merge pull request #1242 from topos-protocol/fix_multi_row_ctl 8839285f add trait bound a44379b5 fmt and clippy 7d7f01da refactor prove method 4eb6a3b5 Fix eval_table ca441872 Merge branch 'main' into new-logup b60a3d4b Merge pull request #1239 from topos-protocol/mload_mstore_with_packing 053553d4 Reuse new packing instructions for MLOAD and MSTORE 696377ba Merge pull request #1231 from topos-protocol/error_vs_panic b5c28bd6 Rename utility methods for U256 conversion f07351fc Merge pull request #1238 from topos-protocol/cleanup f3ea95ca Merge branch 'main' into error_vs_panic c4be838a Typo d1c00767 Cleanup lookup_test module and reduce module visibility 15064b3a Merge pull request #1229 from topos-protocol/next_row_ctls 1a4caaa0 Move next row logic inside Column ffa71787 Merge remote-tracking branch 'mir/main' into new-logup a9b7b5a6 Revert "Remove where clauses: [(); CpuStark::::COLUMNS]" 8903aec1 Change padding rule for CPU (#1234) 865f185b Merge branch 'main' of github.com:mir-protocol/plonky2 into new-logup 1afcafad Merge pull request #1236 from topos-protocol/ci 5db6abf0 Update clippy in CI 4f0330ad Update clippy in CI ec9e6196 Fix range 66f935a7 Remove where clauses: [(); CpuStark::::COLUMNS] 91000591 Merge branch 'main' of github.com:mir-protocol/plonky2 into new-logup 9697c906 Clippy 7dc2a774 Cleanup c5af894e Add assert with char(F). Cleanup. Fix recursive challenges. 9ab8a118 Remove one helper function 17f661f9 Fix BytePacking range-check. Fix lookup challenges c9c0f8b7 Use CTL challenges for logUP + change comments + add assert f65ad58a Implement logUp d4a8026b Combine mstore_general and mload_general into one flag (#1188) 27d9113f Merge branch 'main' into next_row_ctls 0b5ac312 Merge pull request #1203 from topos-protocol/constrain_nv_stack_len 19220b21 Remove redundant Keccak sponge cols (#1233) 06bc73f7 Combine arithmetic flags on the CPU side (#1187) 61a1c246 Fix CTLs c27fc96a Merge branch 'main' into next_row_ctls f944a08b Fix self_balance_gas_cost and basic_smart_contract. (#1227) 7ebbb47f Swap ordering in stack macro (#1230) 5a1b05ac Remove risks of panic 9508b490 Move byte packing / unpacking to a distinct table (#1212) 3c4f938f Make next row available to CTLs 4d7d9ffa Constrain genesis block's state trie. d1c395ef Merge pull request #1202 from mir-protocol/keccak-preimage 3571f097 Merge pull request #1224 from mir-protocol/latest-nightly 9a8a769d more clippy suggestions 55d05147 clippy suggestions a4e6c6ae clippy suggestions 7415810f clippy suggestions 8af3b0fe clippy suggestions ed8bcf9d clippy suggestions 1dd77d6d fmt e947a624 suppress incorrect Clippy error 967f7b12 latest nightly in CI and rust-toolchain 65917f5f Merge pull request #1222 from mir-protocol/internal_crate_path_stablization 2f1ed951 Merge pull request #1220 from mir-protocol/latest_nightly_fix 90ea0318 Merge pull request #1223 from succinctlabs/uma/change-witness-visibility faa70e07 Merge pull request #1219 from succinctlabs/uma/add-mock-feature-flag a184f09b Made visibilities outside of crate to allow for forking partial witness gen outside of crate 1be1ca4d clippy a6433071 Fixes 5936c67f Now refers to sub-crates using paths (and removed `patch` section) 180c2094 Merge pull request #1208 from topos-protocol/blockhash_opcode 71b2ece1 Merge pull request #1216 from topos-protocol/checkpoint_lengths 0b7c4082 Merge pull request #1218 from topos-protocol/keccak_col 05e9fc0b Apply Nick's comment d0379e94 Apply Nick's comment 4716fe7d Also included clippy fixes introduced by new nightly 6d3d2cb2 Now builds on the latest nightly 5a3c8b26 clippy 0ca796e1 Removed mock feature flag and added mock_build 170f7d83 Fix Clippy 9a06fc9b Fix memop reads, from_prover_inputs and cleanup. ddf2b817 Clippy 1c01d682 Fix overflow check and test. Remove [..8] when using h256_limbs. c30b1834 Change h256_ulimbs 4e0fe74a Apply comments 42f70380 Add blockhash sys opcode 4782519d remove spurious 18d31412 Added mock feature flag and test 258b075f Remove filter column for KeccakStark e6ca4606 Merge pull request #1214 from jtguibas/jtguibas/serde-target 5690b951 Merge pull request #1217 from topos-protocol/cleanup_duplicates fa9aae1f Remove duplicate code 6207f446 Merge pull request #1206 from topos-protocol/missing-public-value-links 8dcb29e5 Display actual trace lengths instead of number of ops 800603d6 feat: serde for targets a7096546 Merge pull request #1209 from topos-protocol/receipts-all-types d4b71c56 Replace genesis state trie check with TODO 6bd17e29 Apply comments dd3b61a3 Merge pull request #1211 from mir-protocol/comment-fix ac89c7cd Fix comment in `proof.rs` bf21b278 Apply comments 9ba2b895 Implement receipts of types 1 and 2 b0764436 Add missing links between public values 8beba569 Constrain next row's stack length ea03e418 Keccak STARK: constraint preimage to equal A on first round 760f09a8 Merge pull request #1201 from shuklaayush/fix/keccak-stark-reg-preimage 301aedf0 fix: constrain higher bits of reg_preimage a0b2b489 Merge pull request #1200 from topos-protocol/fix_empty_txn_list 71967147 Update range from ReceiptTrie PR 6e7fcc9e Merge pull request #1199 from jtguibas/john/make-generate-partial-witness-pub d3f33bae make generate partial_witness pub 62f271a8 Merge pull request #1198 from mir-protocol/public_values_serde 975fd451 Made `PublicValues` serializable 86fb6aa0 Merge pull request #1097 from topos-protocol/receipts_and_logs 6a2e2423 Clippy caae038c Cleanup 5b962f3c Change receipts_trie in basic_smart_contract and self_balance_gas_cost ad9796cb Fix tests and address comments 925cdd53 Cleanup c0b4f155 Implement receipts and logs 44115de7 Merge pull request #1174 from topos-protocol/merge-context-flags a881c70e Merge pull request #1191 from mir-protocol/eth_trie_utils_patch 18ca89f0 Patched plonky2 to use a patch for eth_trie_utils 4e5f6e7e Apply comment 10bbda03 Remove unnecessary changes in the Operation enum c3cb2278 Combine get_context and set_context into one flag 74212a29 Merge pull request #1192 from topos-protocol/misc_constraints f6f9fa31 Merge pull request #1190 from topos-protocol/mpt-remove-cow 06e20f87 Apply comment a94d9282 Merge pull request #1194 from topos-protocol/block_basefee 8476fdcd Refactor 9a450068 Update BlockBaseFee to fit in 2 limbs c138f2d6 Merge pull request #1193 from topos-protocol/observe_pv 68bb4967 Update tests to have a blockgaslimit fitting u32s 976d7521 Observe public values 0b78c43f Remove filtering in membus 91e8d52d Reduce overconstraining in decode module b711e527 Combine a few constraints d96c6491 Merge pull request #1165 from topos-protocol/ci-test d70d67fa Remove copy on write for mpt_insert and mpt_delete 1997bf24 Implement inverse from Fermat little theorem (#1176) eb7bb461 Merge pull request #1189 from topos-protocol/remove_is_bootstrap_kernel_flag 49d92cb8 Remove is_bootstrap_kernel column 683501cc Merge pull request #1183 from topos-protocol/remove_is_cpu_cycle_flag 815a02ab Remove is_cpu_cycle 89e62e55 Use Keccak config in simple tests 7b07229b Add guidance for external contributors to README.md 830fdf53 Merge pull request #1184 from topos-protocol/combine_jump_flags 12f379f9 Combine jump flags 470788d0 Merge pull request #1185 from topos-protocol/combine_simple_logic_flags 7cdb6baf Merge pull request #1177 from topos-protocol/alloc 7829dccf Combine EQ and ISZERO flags dc7e0aa7 Merge pull request #1181 from topos-protocol/combine_logic_flags 654f7cac Comment e10eaad0 Combine all logic flags together 437f57a8 Fix logic CTL 5100e032 Revert changes in cyclic_subgroup_unknown_order 8541a04b Apply Nicholas comment 56ebda49 Address review 12a687d3 Reduce reallocations ee9ce4c5 Combine AND and OR flags in CpuStark 6f98fd76 Merge pull request #1147 from metacraft-labs/gate_make_public_parameter 5f4b15af Connect SHL/SHR operations to the Arithmetic table (#1166) df07ae09 Write trie roots to memory before kernel bootstrapping (#1172) c9eed2bb Connect public values in aggregation circuit (#1169) 397ee266 Merge pull request #1171 from topos-protocol/exception-flag 017e6217 Set exception flag to 1. e6407089 Error instead of panicking for missing preprocessed circuits (#1159) b2626fdc Merge pull request #1162 from topos-protocol/cleanup_attributes 9eeb69f0 Merge pull request #1105 from topos-protocol/poseidon_warning bf1ed783 Merge pull request #1161 from topos-protocol/fix_recursive_ctl c9bd32d5 Fix trait import. (#1163) 9f8c1522 Remove unused attributes 4a762e33 Merge pull request #1160 from topos-protocol/keccak_general 5b9e8d85 Merge branch 'main' into poseidon_warning bd3834c4 Silence Poseidon warnings for ARM targets 5316f890 Clippy 8365608b Convert to u32 instead of u64 c93f9d5f Fix endianness in benefiary limbs bca3e09b Reuse set_public_value_targets f01098a7 Constrain keccak general 9e0719e6 Better document constraints on addcy carries (#1139) d8e314bc Merge pull request #1155 from 0xmozak/matthias/generalise-transpose 8c6e8d63 Merge pull request #1158 from mir-protocol/jacqui/gas-check-spec c52ed29e Gas handling brain dump eebf7eb0 Merge pull request #1157 from mir-protocol/update-versions b414b8e9 fmt f574effe make imports conditional on config 84321955 update versions in cross-crate references 3a556029 update versions for crates.io updates 7537193d Generalise transpose 5b8740a7 Merge pull request #1026 from topos-protocol/memory-ctl-verifier-bus 3b21b87d Merge pull request #1151 from mir-protocol/jacqui/dead-memtable-cols 7a882d0a Clippy 6253a68e Change public values into public inputs 59b73c84 Apply comments 1590c1d0 Fix indices in CTL functions f97deab8 Remove non-passing debug assert 06037f81 Fix the memory CTL and implement the verifier memory bus b3f00d4a Merge pull request #1146 from topos-protocol/overlap-cpu-syscalls 831fe862 Cut 5 Columns From The Memory Table With This One Weird Trick! bfd6b988 Merge pull request #1148 from topos-protocol/lookup_check ee9cd80c Change arg to non-mutable reference 0276446e Add additional lookup unit tests dc70902f Remove always true condition bc246780 Fix name in ID 6ca3f1a9 Make GateRef value public bfa7ab36 Merge pull request #1111 from topos-protocol/lookup_serial 16227f90 Merge syscall and exceptions constraints. 0f52c889 Merge pull request #1145 from mir-protocol/npwardberkeley-patch-1 535fb7d8 Update prover.rs e047676e Merge pull request #1114 from onsen-egg/onsen-egg/lookup-opt a67cfdcb Precompute RE poly evals for challenges 03d90f30 Faster multiplicity counting for lookup tables 167518ed Merge pull request #1143 from succinctlabs/build_issue_on_mac_M2 6a772879 Fix negative quotient issue (#1140) 25678f46 Merge pull request #1144 from mir-protocol/build-in-subdirectories f3e87ec4 CI: build in subdirectories 8a86e195 fix: add itertools/use_std feature flag for [std] 2d8c02bf Merge pull request #1138 from 0xmozak/bing/dep-serde-rc 7ba051f4 Fix failing byte constraint (#1135) 152e3959 Merge pull request #1137 from topos-protocol/fix-kernel-panic e28b484a deps(serde): use rc 1af1afcf Change current context in bignum_modmul cf278eac Merge pull request #1136 from topos-protocol/div_by_zero f116c855 Fix risk of division by zero b27389df Merge pull request #1134 from topos-protocol/avx_tests 04657d24 Fix import 1d6ca589 Add LUT hash to remove CircuitBuilder overhead 4893a860 Merge pull request #1116 from topos-protocol/recursive_ranges 00579850 Merge pull request #1132 from mir-protocol/dependabot/cargo/itertools-0.11.0 8b35fefb Rename cd to common_data for consistency Cf review cbb3da15 Reduce number of lookup accesses b32345cd Update lookup serialization c8020126 Provide methods for ProverOnlyCircuitData serialization 544aff27 Also provide CommonCircuitData in serialization of gates and generators 47781e47 Add CommonCircuitData to gates deserialization method b43d6c1d Add CommonCircuitData to generators deserialization method d684ee2d Switch Field type of generators to be F: RichField + Extendable 5d513207 Update itertools requirement from 0.10.3 to 0.11.0 4400757f Merge pull request #1128 from mir-protocol/dependabot/cargo/hex-literal-0.4.1 dc170915 Merge pull request #1131 from mir-protocol/dependabot/cargo/criterion-0.5.1 c202f4bc Merge pull request #1129 from mir-protocol/dependabot/cargo/ahash-0.8.3 0f284ca6 Merge pull request #1130 from mir-protocol/dependabot/cargo/hashbrown-0.14.0 63b8ceba Merge pull request #1124 from 0xmozak/matthias/remove_unused_deps e3f12709 Merge pull request #1123 from 0xmozak/matthias/fix-readme b0c5ddc0 Update criterion requirement from 0.4.0 to 0.5.1 fc70f36c Update hashbrown requirement from 0.12.3 to 0.14.0 0d9208a6 Update ahash requirement from 0.7.6 to 0.8.3 413f589e Update hex-literal requirement from 0.3.4 to 0.4.1 cc45ac9a Merge pull request #1125 from 0xmozak/matthias/fix-resolver-warning 1f561771 Merge pull request #1126 from 0xmozak/matthias/add-dependabot 7437fe2b Fill modulus in cpu row for Fp254 operations. (#1122) 9e748a47 Enable github's Dependabot 6c2f76d5 Fix resolver warning 4b0fc861 Remove unused dependency `blake2` from `evm` crate f6b2e742 Fix spaces and wording in README ee5d1aa6 Merge pull request #1033 from 0x0ece/transpose 398f86af Merge pull request #1092 from matthiasgoergens/matthias/move_to_field 2d7a94de formatting 94f880b6 Merge pull request #1104 from topos-protocol/serializer dca50adf Merge pull request #1119 from mir-protocol/jacqui/topos-protocol/stack_len_bounds_aux_error 6b493d6f Remove redundant case (error in kernel mode) 1664ab44 Merge pull request #1112 from topos-protocol/fix-generate-jump 7aa5ed3b Merge pull request #1117 from topos-protocol/fix_set_context b9b227c8 Merge pull request #1118 from mir-protocol/revert-1109-new-clippy-fixes 14c40115 Revert "clippy fixes" f08afec6 Merge pull request #1109 from mir-protocol/new-clippy-fixes 5bff02a1 Fix generate_set_context 0a59aa6e Remove need for matching start ranges 325cd2f7 Compute stack_len_bounds_aux correctly in generate_error 224064bf Fix jump operation generation c982826e Add feature "rc" to serde crate import 3870524a Merge pull request #1113 from topos-protocol/ci 6bd575d1 Fix nightly version in CI dbb23587 Merge pull request #964 from topos-protocol/lookup 96fbecd9 ignoring where appropriate (for izip), fixing elsewhere b0568a79 remove useless vec 0fec1124 update itertools 08a6e66d fix 5b08ac58 fix 3c776a8d clippy fixes 4df4d865 No default implementation 91c55d15 Add wrapper types for Lookup and LookupTable 43512371 Review c0fc349c Fix lookup serialization and update with latest serialization changes 7e80b42a Serialize Lookup gates and generators 35abffd5 Implement lookups with logarithmic derivatives in Plonk 6122dccb Move operations to Field 56a127eb Make Buffer available in no-std d960bfe2 Make serializer work with slices instead of Vec 3de92d9e Merge pull request #1102 from mir-protocol/modexp-memory-context-change 605ea47f reset Cargo.toml 897e2e99 fix 54cf74ac addressed comments 975e9a49 fmt 8eeca9be undo dummy change 2fa43121 dummy change to get tests to rerun :P 9bb6da04 fmt afd4bd04 cleanup bc53ddc5 fix 264192aa modexp uses current_general 39d2237d Merge pull request #1101 from mir-protocol/blake_fix ee452fc0 Merge pull request #1099 from mir-protocol/blake_fix_fix ef8ea64d Minor 7559bb2f Minor 83ee5fd6 Minor c8ff80ca Fix blake2 fix 7ca56768 fix 246eb8d8 blake fix 23bc390a Merge pull request #1095 from mir-protocol/jacqui/push0-opcode 3eb41edb William comments 564864ea Remove parts of the copy-on-write logic (#1096) cedeff52 PUSH0 9cc35360 Merge pull request #1082 from mir-protocol/jacqui/simplify-stack-bounds 8ded9e84 Minor: William comment ec07255f Fix halt loop (#1094) 01efa013 Fix account touch in calls (#1093) ba705703 Use current context for pairing memory (#1091) 9838a367 Check call depth in create (#1089) e51c4d0d Set returndata size to 0 in some create errors (#1088) d37c5455 Increment call depth in precompiles (#1087) 56e7ad00 Fix LOG* gas (#1086) 68b15ea5 Fix CALLDATALOAD for large offsets (#1085) f852984e Implement PREVRANDAO as if it was DIFFICULTY (#1084) 6920992e Simplify stack bounds constraints 01175419 Merge pull request #1071 from mir-protocol/jacqui/bad-opcode-witness-generation ae290dbf William PR comments 0f7e1c0b Call stack depth (#1081) 0f874317 Minor fix to REVERT (#1080) 90bb4741 RIPEMD doesn't get untouched (#1079) fd48e5d1 Contract creation fixes (#1078) 77f0d8b5 Don't revert state in CREATE in case of OOF or nonce overflow (#1077) 63a6e706 Fill BLOCKHASH and PREVRANDAO syscalls with dummy code (#1076) 0e23606e Revert #1074 (#1075) 5a13b62d Don't overwrite existing account (#1074) 2cf31f5f Prevent shift ops from panicking (#1073) d3387172 Commit missing file c773476c Minor docs 55b29cac Remove bootloader.asm (#1072) 3ecf5309 Minor bugfixes 448bc719 Lints 1d804e46 Fix stack after precompiles (#1061) 7ab0bba5 Merge branch 'main' into jacqui/bad-opcode-witness-generation b7220428 Error handling 973624f1 Minor fixes to RETURN and RETURNDATACOPY (#1060) 720faa67 Fix create OOG because of code deposit cost (#1062) fbf6591b Warm precompiles earlier (#1065) f605d912 Propagate static flag (#1066) 73079796 Fix pairing invalid input (#1067) 49bbe4e0 Fix arithmetic stark padding (#1069) 0d819cf8 Implement EVM `BYTE` operation (#1059) 8153dc78 Remove `-C prefer-dynamic=y` from CI build. a492d3e1 Fix revert gas bug 7dfdacf2 Fix return and revert gas (#1058) 42f33017 Fix ecrecover edge case (#1057) c0abefda Fix DUP in call gas e6a7b8c5 Add contract creation flag (#1056) 30b97b29 Fix DelegateCall bug 9727eaf1 Fix extcodehash when account is empty (#1055) 08a061bc Implement LOG* gas and remove panic (#1054) 354664c8 Fix ecmul (#1053) 6e303601 Support for type-2 transactions (#1052) 9b0092ab Support for type-1 transactions (#1051) 15dec6fa Encode `to` as B160. (#1011) beefc91d Pop checkpoint in the right place 5a7c176c Fix issues related to CREATE2 collisions (#1050) e720090e Merge pull request #1041 from mir-protocol/storage_addr_h160_to_h256 d57b62ff Perform jumpdest analysis whenever entering a new context (#1049) 971bfba6 EIP-2681: Limit account nonce to 2^64-1 (#1048) 8faea881 Don't add an event for account creation for pre-existing account (#1047) 29fac4ca Check balance in create (#1046) 1616c0ba Fix extcodecopy 3a9e5cc0 More fixes to contract creation (#1045) 49979df9 Fixed failing test a294c7e2 Some fixes to contract creation (#1044) 84c15606 Minor fixes to returndata and create (#1043) 10e6c768 `TrieInputs` now uses `H256` for storage account addresses ce6ac9f8 Merge pull request #1038 from mir-protocol/tests-memory-context-fix c36ed15e Merge pull request #941 from mir-protocol/bls-fp2 6292d8d7 redundant d3986e6b merge successful 59ae7103 merge 244d5e9b Add refund journal event and checkpoint after access address event (#1040) bde7fb50 Various fixes to checkpoint logic (#1039) e5b0fce6 revert testing changes 3a77c5a0 fix 14f92f7b Cargo.toml change for testing b116929f Delete touched recipient in EOA -> EOA (#1037) 6ebee38e fix d05db497 Don't touch contract address in DELEGATECALL or CALLCODE (#1036) bfd6834d Journal of state changes + state reversion (#1028) 74ba3032 MPT deletion (#1025) 202985b2 Fix CALL gas (#1030) 944d4a24 SSTORE refund (#1018) f1cc284d Optimize transpose c134b597 Cross-table lookup for arithmetic stark (#905) 779456c2 Merge pull request #1029 from mir-protocol/precompile-memory-context-change 6f4f00c6 Merge pull request #1027 from mir-protocol/memory-refactor 2c5f6fd6 Fix compile time problems and generic hash implementation (#1024) 76fb3160 Merge branch 'memory-refactor' into precompile-memory-context-change 0d9e3216 fix (mstore_unpacking returns offset) 97aedd11 Merge branch 'memory-refactor' into precompile-memory-context-change 6e7fa6da fix 675d6440 Merge branch 'memory-refactor' into precompile-memory-context-change 57bcb451 use mstore_unpacking and mload_packing 6669f73a use mstore_unpacking and mload_packing af12368a addressed comments & cleanup 98a75774 cleanup 5dc043aa Merge pull request #1012 from honeywest/transpose a4a4fbb3 fmt 057b650f fix a076da75 fix 24159886 precompile memory context change d9694d95 fix 446c3b71 fix b566dbd7 refactor memory/core.asm to make code more reusable 46c7903b Merge pull request #1023 from topos-network/clippy 92c2378c Fix clippy f11921c9 Fix doubly_encode_rlp_scalar in the 0 case. (#1022) 653a6b15 Remove `generic_const_exprs` dependency from field crate. (#1020) 4380395e Merge pull request #1017 from mir-protocol/expmod-fix 91067e58 expmod edge case fix b159c9e7 Merge pull request #1013 from topos-network/overflow-check f0df03f6 Merge pull request #1009 from mir-protocol/expmod_precompile 099e7946 fixes 17a7c57d Change add_or_fault macro 1f39c555 Address overflow-related TODOs in ASM code, using a macro add_or_fault. This is related to https://github.com/mir-protocol/plonky2/pull/930/files/a4ea0965d79561c345e2f77836c07949c7e0bc69 40515dc6 Merge pull request #1014 from toposware/bootstrap_constraint f3de2afc remove test file cae5a2cf fix 050c2e65 fix: calculate gas properly ab8ebdfb Merge pull request #1016 from mir-protocol/remove-proof-challenges-serialization 8d738729 Merge pull request #1015 from mir-protocol/clippy-fix ade5b433 fix 30e58ad2 remove ProofChallenges serialization 8358b85d remove unneeded mut 08e6c352 addressed comments 037c2f5e addressed comments 841c5829 Fix todo in kernel bootstrapping debe65f9 addressed comments 1d8f71f8 optimize transpose_in_place_square_small code ae21ef8f Merge pull request #997 from mir-protocol/pairing-test 84f17699 comments f9aad433 neutral input 397d5953 fix 031fe6ed Merge branch 'main' into expmod_precompile 9e4056e2 cleanup badbf010 store and unpack at end 998cd8ab addressed comments a638ebe0 fix 7ede443e Merge pull request #1006 from mir-protocol/blake_precompile dc076df5 addressed comments 96742f29 addressed comments e40b9edb addressed comments 9b18b3ae fix unit 16928fd0 peculiar... b37e049a fmt f6a49e88 fair naming 4a42ddb2 on stack 57113905 redundant 2aa83d9a Merge branch 'pairing-test' of github.com:mir-protocol/plonky2 into pairing-test ae4b5091 neutral name 503cb8a9 random inp 4ad8520e SNARKV precompile (#1010) b28e3e0d minor ab721fa3 SSTORE gas (#1007) efd5a81b Merge pull request #980 from mir-protocol/serialize_common_circuit_data 13c653bc mul works 6599c90a abstraction 78a368cf fix 8df0c743 remove build_without_randomizing (no longer needed) b640bf63 serialize ProofChallenges 537debdc return bool 89122a3d it works 479e919c fmt 6dc094a8 test 4c235e5a Merge branch 'main' of github.com:mir-protocol/plonky2 into pairing-test b4e06271 fix d31c60a0 clean up d928a70b clean 1e9db292 fixes 67a3edb2 Precompiles exist (#1008) 0d98e4b8 formatting e642b824 move serialization to separate example dc91554d expmod precompile 3b7ad771 cleanup 46d9cee0 charge gas! 9460acc1 rename blake2b ce033410 fix 6a239c4f fix a41cf018 fixed blake tests 11a03c5e Merge pull request #1005 from mir-protocol/precompile-fixes 621c63de clippy fix c083cc63 fix 29a8367b fmt 2d98dd3c commented out unused functions 5dc44916 Merge branch 'main' into blake_precompile 454e0add fixed blake2_f, and testing 137a9966 Merge pull request #998 from mir-protocol/even-smaller-bignum-modexp-test f225ea49 add comment 93398472 Merge branch 'precompile-fixes' into blake_precompile 1eba893e mload_packing macro ffc5d6d6 Merge branch 'main' into precompile-fixes 43f4d2b8 clean more 14ee46c8 cleanup 0f662ed0 fixes b7e93511 New contract hook (#1002) a4b714e6 EIP-3541: Reject new contract code starting with the 0xEF byte (#1003) 472face2 EIP-3860: Limit and meter initcode (#999) f3f3641f rename 975a35c3 fmt f718f857 cleanup 8d50806b fix b9f1c1c5 cleanup 858c59a2 cleanup 45c0d894 cleanup 905c5eb5 deterministic build function 146e6605 don't serialize challenges 66763c7d cleanup 993ed149 seralizing 34a03545 Serialize impls, and use in Fibonacci example ea82d680 Merge pull request #981 from toposware/serialization 86acc15f blake precompile progress b288ff5f Merge branch 'precompile-fixes' into blake_precompile d5060ecd precompile optimizations 26204461 it works 9f0c2f47 blake precompile progress dd58b9b5 dont panic 26d99a9b memory compress 0c55aa04 clean 50752246 clean ff0695d7 renumber memory f1bbf66c it works c01b2bf2 minor 5f564b67 initial work on blake precompile 44a623d4 initialize out in asm 4e0be664 Merge branch 'main' of github.com:mir-protocol/plonky2 into pairing-test b35d2524 Merge pull request #1001 from mir-protocol/eth_trie_utils_0_6_0_bump db93bada Bumped `eth_trie_utils` to `0.6.0` 1a0a6300 EIP170 (#1000) 723f197d Cleanup 099c4b97 msg 82bca7fa error b661a709 twisted check 2a9c5cfd Add serialization check in square_root example bf02a3e8 Make generators public 5de5bfb5 Move serialization files into dedicated module 0e465c1c Customize range specification for AllRecursiveCircuits f71139d9 Add serialisation support for gates, generators, and various structs f7f5fb4e Change display for GoldilocksField 6edd5891 Gas and more for `CREATE(2)` (#995) a8e5613b EOA to precompiles logic (#993) df4a6f01 fix for full modexp test 6d84b988 fmt cb23bfca check for special cases and align with yellow paper bbe64674 tests passing 3628021a fmt d6584dcb restructure tate test 3e437a0c oops remove more debug stuff bf5dc256 undo debug commenting 0df18d5e tests de94ac25 missing file 3444e810 even smaller bignum modexp test, and fixes 21a1a98a reorg ca3a7f8a Merge branch 'main' of github.com:mir-protocol/plonky2 into pairing-test 9b54ee43 refactor c0ced26f Merge pull request #992 from mir-protocol/smaller-bignum-modexp-test 0e082432 reorg ada250f3 Merge branch 'main' into smaller-bignum-modexp-test d8fef87a Only print warning or errors from the log. (#996) 2e16ab04 Replace %stack calls with equivalent opcodes. (#994) 3a3ff87a fmt 7dda0eff works 1f077628 new api c9b09936 compiles d112c716 fmt a704b152 even less thorough :P a4f60a04 less thorough bignum modexp test 049a258b Merge pull request #991 from mir-protocol/disable_couple_tests 8562abe0 Disable a couple tests 690fd100 Merge pull request #990 from mir-protocol/mpt_failure_labels 67593f16 Labels for failed MPT read/insert e6864e98 Merge pull request #985 from toposware/kernel_serial 5dfac715 Fix generic const expressions warning (#984) 9037ceb0 Merge pull request #986 from mir-protocol/incremental_release eb7468e7 Incremental release builds df5a90cc Provide methods for serializing Kernel 6b2503f7 Merge pull request #970 from toposware/env 3cc39fa4 wip 9d60191d Implement returndatasize/returndatacopy for interpreter 191ca102 comment 0b85c8bb getting there 0b9ef768 nl 2106ae07 Merge branch 'bls-fp2' of github.com:mir-protocol/plonky2 into bls-fp2 a5c6b14e Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 9b9cd735 Update evm/src/extension_tower.rs 6946eaca Implement codesize/codecopy for interpreter 0f3285c3 Implement gasprice on the interpreter e9cc5632 Impl caller/address/origin opcodes for interpreter b721236e Precompiles interface (#983) b896f9b2 Merge pull request #974 from toposware/stack_bound c8637635 Remove dummy_yield_constr 4946c3d5 Merge branch 'main' into stack_bound 5fce67d1 Merge pull request #978 from toposware/stack_constraints 32a6bdf1 Merge pull request #971 from toposware/keccak_sponge_is_final_block 58f4568e Merge pull request #982 from toposware/sys_chainid 92d94dc6 Use Block chain id for sys_chainid ba844a24 Change shl/shr behavior as well as BASIC_TERNARY_OP 475b2ba0 Fix copy_returndata_to_mem (#976) c7e60073 Check if context is static for state-changing opcodes (#973) 142be4e1 Implement rest of *CALL opcodes (#972) b202196b switch f2650418 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 1e57ef96 Remove unnecessary constraint 29726f92 Apply review f424bd36 Merge pull request #966 from toposware/interpreter 938e3bd5 Set stack_len_bounds_aux properly 18d27d2f Remove is_final_block column in KeccakSpongeStark 011ea8e4 Fix from review 5b1fd5f2 CALL gas (#969) d79d2c49 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 8130a8a6 Merge pull request #950 from toposware/keccak_sponge 0529fa06 Change endianness within generate_keccak_general 3da8efa6 Implement sar in interpreter b943ddb0 Implement signextend in interpreter 4db00441 Implement sgt in interpreter ac2ccc1e Implement slt in interpreter 232832e3 Implement smod in interpreter 18d317b9 Implement sdiv in interpreter. 0e5e28f6 Merge pull request #968 from toposware/block_interpreter c0fae77c Merge pull request #951 from toposware/prove_block 6124e4d6 Fix BlockCircuitData proofs 0146f48a Cleanup 524b39e2 Reactivate CTL for keccak sponge d1379ac1 Fix hash output writing to memory 5f6098ff Add test for keccakf_u8s 99b0d009 Implement KeccakSpongeStark constraints 2fae2fbc Impl gaslimit opcode for interpreter 9e6f284b Impl chain_id opcode for interpreter ae8ee27e Impl coinbase opcode for interpreter e3572f1d Impl basefee opcode for interpreter 86bd055b Impl difficulty opcode for interpreter 60fed608 Impl number opcode for interpreter a17c6231 Impl timestamp opcode for interpreter 042c0042 Merge pull request #965 from mir-protocol/fix_run_constructor 31e134f0 Delete %set_new_ctx_parent_ctx 1a9f0104 Fix call logic (#963) ab692252 Minor fixes to context creation (#961) 7a65b1d4 Merge pull request #967 from toposware/fix_decode 310107f2 Fix decode constraint cfc54f95 Fix run_constructor d1c9277d Merge pull request #962 from mir-protocol/range-check-example d6bb5d5d range check example af3fa142 Implement sys_return and sys_revert (#959) f24c3537 Update README.md c11f4f41 Merge pull request #960 from mir-protocol/readme-updates fb24b200 README updates: examples and external tutorial 5ac12de9 Fix sys_exp (#958) 923722b1 Fix copy opcodes when offset is large (#957) d59fa59a Merge pull request #925 from mir-protocol/bignum-modexp 6a4e9ab6 fix 90f7ba9a addressed final comments 9690b60b Merge pull request #956 from mir-protocol/doubly_encode_storage_values e70e4fca Doubly RLP-encode storage values 889911e8 redundancy 93dd25a1 fmt 33dc8eae better names 251d7e34 systematize names 4e48fc43 all Stacks 0e3b86de frob 26da6dc7 rev stack d52c15e8 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 6fa59d20 Fix MSTORE8 (#955) f9217272 Fix signed syscalls stack (#954) 3b607bde Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 a061c3cf Merge pull request #952 from mir-protocol/extra_where_clauses 209c1ff1 Remove extra conditions e2f33dd3 Merge pull request #873 from toposware/hashconfig 9ee47ab7 Move HashConfig into GenericConfig associated types 46d5e62a Copy txn data to calldata (#935) 8e04cbfe Self-destruct list (#948) a926dcad Transaction validity checks (#949) f1a99e69 Add patch section to workspace config file e857c020 Make hash functions generic 2ca00a9a Selfdestruct gas and set (#947) 786a71d6 Merge pull request #946 from mir-protocol/selfBalanceGasCost 31cd0f64 Remove dbg 9ae69a7c Add an integration test for the `selfBalanceGasCost` case 56bf0892 Charge gas for extcodecopy (#942) f71d3642 Merge pull request #945 from mir-protocol/remove_CONSUME_GAS 9480cbed Signed operations as syscalls (#933) 2d87c5d6 Remove `CONSUME_GAS` 834522a6 Merge pull request #939 from mir-protocol/termination_fixes be0cccdf Merge pull request #938 from mir-protocol/rework_create_create2 39fdad8c Feedback a0d04ca3 Fix Wcopy when size=0 (#944) 9f1a5f97 Charge gas for keccak (#943) cdaabfe9 Merge branch 'main' into bignum-modexp b667c074 Merge pull request #940 from mir-protocol/eth_trie_utils_bump 3c7bc883 Removed a type alias 0c87a57f addressed comment e4f2e864 fix 1a348eed check for x < m bce25720 documentation 1e5677c4 comments a6ccd350 cleanup fb73e889 uncommented c18377d1 Merge branch 'main' into bignum-modexp 91fb4fc0 fix modexp test 823b06ac fp2 works 3b95e013 bls method 5783a374 Merge branch 'main' of github.com:mir-protocol/plonky2 into bls-fp2 7b93b81a Merge pull request #931 from mir-protocol/fp381-opcodes cf5a4edc prover input minor improvements 60ad9e03 Bumped `eth_trie_utils` to `0.5.0` c3a5fd86 merge 1f14ae98 skeleton 911dfedd Rework CREATE, CREATE2 syscalls b4eb8373 A few fixes for terminal instructions ce22d945 Access lists (#937) f9fa38d3 Fix new account insert key 7028b6ba comment 7ff2122e Merge branch 'main' of github.com:mir-protocol/plonky2 into fp381-opcodes 1ce47ceb Merge pull request #906 from mir-protocol/fp318 0650d263 remove .scale 3f4d970f Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 c8d2769c fmt 74afec70 remove imports f4e65feb Fix bugs in `wcopy` and `update_mem_words` (#934) 15bafce5 Implement CREATE2 address generation (#936) 874805cc Merge branch 'fp318' of github.com:mir-protocol/plonky2 into fp381-opcodes 645ef664 comment 63ec13e2 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 3425391a more comments d0b2b81e More MemoryError (#932) 1f3e3de7 clean and generalize 1627a9a0 tests pass e471818c comments 1fbe3050 Merge branch 'main' into bignum-modexp 06936c76 Implement various syscalls (#930) 84a0bcf8 cleanup 373062b2 on stack 0e8f6a2f test skeleton 392c29f4 compiles 9ea0ebd7 skeleton 1437affc fmt b847d16e redundancy 13d2ed90 merge 54b8ce74 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 143225f4 finish d928423c cleanup d59501e6 fixes, testing, and in-progress debugging fc72ce46 fp6 works 4d83c58d frob works 2df1439d Return error instead of panic in memory operation (#928) a79271a8 Minor account code fixes (#929) caaf3b4a merge fields cf1e6a76 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 24705e1e addressed comments ff81a565 Merge pull request #927 from mir-protocol/creation_fixes e1ae5392 Fix test afded168 Contract creation fixes 893b88c3 Implement syscalls for BALANCE and SELFBALANCE (#922) fc6487ca Merge pull request #926 from mir-protocol/fix_gas 3c4bc1d8 Fix GAS and implement storage value parsing be309a38 cleanup from comments 42d65839 addressed comments d340ff8c addressed comments 2e0b7992 addressed comments 9803581d fix 1a78f400 restored neq macro be9cbd5a fmt 902bc66a fmt 511f450a resolved conflicts 4aa212ab modexp fix e06f84dd modmul fix 76e70ac4 fixes ad85d61e fix 4cef5aaa modmul and modexp 1e019356 basic bignum b16b8261 Merge pull request #881 from mir-protocol/bignum-basic 6fe8554f Merge pull request #924 from mir-protocol/empty-stack-replacement 652b2bed allow empty stack replacement a5fad9eb addressed comments f6b9d6ee addressed comments 72b5bb0e fmt 2752456e addressed comments 5e98a5f9 adj trait 50388073 rename 25575df5 cleanup ec0f3ce7 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 692575a2 Bump eth_trie_utils version. (#923) 2ab16344 Merge pull request #921 from mir-protocol/dlubarov_misc bdf35374 Misc b80a28db Misc f13d603a Merge pull request #920 from mir-protocol/dlubarov_misc 47fac8e3 Couple fixes & minor refactor 64c76e76 Merge branch 'main' into bignum-basic c491a989 Merge pull request #919 from mir-protocol/mem_expansion f717a40b Charge for memory expansion e8405eff Merge branch 'main' into bignum-basic de246e22 Merge pull request #918 from mir-protocol/fix_read_ext 7ed53142 Fix reads from not-found ext nodes 7853656e Merge pull request #917 from mir-protocol/fix_clobbering a05ed9fc Fix clobbering of RLP data memory c3ba7a89 Merge branch 'main' into bignum-basic 2ac4fcdf Merge pull request #915 from mir-protocol/fix_clone_account 8c692b72 Fix account cloning f514d966 Merge branch 'main' into bignum-basic 994c54ab Merge pull request #912 from mir-protocol/stack_on_panic 5720cf8a updated function name cda31b5e Merge branch 'main' into bignum-basic 9f75132f Merge pull request #889 from mir-protocol/hash-asm-optimization 38f79e49 optimizations with rep 92ee7786 Merge branch 'main' into hash-asm-optimization 69b4a21c Merge branch 'main' into bignum-basic da07a7a8 Merge pull request #914 from mir-protocol/return_post_state 373421a1 Fix tests - need to supply empty code 44c77f55 Input addresses c8d591f6 Add a `prove_with_outputs` method 95347621 div instead of shr cecbfa9b fit c59b979c addmul fix a0a23147 Merge branch 'main' into bignum-basic f518a8b4 Merge branch 'main' into hash-asm-optimization b62bc35d fixes 2d7d2ac3 Merge pull request #886 from toposware/poseidon-native f1ad3da8 fix 062eb82a cleanup e0a4bc31 cleanup fda64475 fmt fa3443a5 new testing interface, and test data bb2233cb Override from_noncanonical_u96() for Goldilocks field 10e7329a Add FFT-based specification for Poseidon MDS layer on x86 targets ee9bfb08 fix 4e736b63 fixes 534395ee fmt 73633354 test data e6027142 cleanup 54eb29e7 fix 6f6c808d more efficient divmod 202990ed Merge branch 'main' into hash-asm-optimization 2195bdd4 Merge branch 'main' of github.com:mir-protocol/plonky2 into fp318 459d2929 folder 1c71fb34 Merge branch 'main' into bignum-basic 1576a300 Merge pull request #817 from mir-protocol/non-inv e97e8188 fixed iszero and cleanup 12e6527b fixed messed up merge 2a0df523 Merge branch 'main' into hash-asm-optimization 44a0596f fmt 930ebafd Merge branch 'main' into bignum-basic 35fb1499 Merge pull request #904 from mir-protocol/optimize-blake2b 6f8a5100 interface changes b0ed6ae0 cleanup 4ef981e4 initial test data ad38f957 TODO for possible future mul optimization 06276334 carry -> carry_limb e57358bc ge -> cmp and returns 0, 1, -1 d4a485ec Log stack on panic 7fad9eb8 Merge branch 'main' into optimize-blake2b a8956b94 flip limbs 9ec97744 run_ops dd7948e7 merge 5cf8028e Merge branch 'main' into bignum-basic de6f01f4 small optimizations 424d8d22 more optimizations 29df451d optimizations 97cb5c75 bug fix 8f231bd0 optimization 265d39a5 cleanup 85411ac4 fixes 7351a166 fix 684b668b fix 63301d6b refactor sha2 compression 2236f30a more small optimizations e5f8632b small optimizations 213ba8ff optimized initial hash value generation 7c8026e8 cleanup 2020202e optimize hash generation further further df7ea93a optimize hash generation further 3a0d86e2 hash function optimization ef377c0b cleanup 4e8af821 fixes 9ad25b2a optimizations eebdd029 Merge pull request #910 from mir-protocol/optimize-asm d23e4e20 deal with and test zero-len case 4b6a5146 fix 05788a99 compiles d4c7bfd5 addressed comments 725b5a08 cleanup 4a762553 name change c4b511ba addressed comments 2000d308 addressed comments a738afce Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into fp318 24e0b291 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 676a483c fix 476a554a OR -> ADD 87ad5714 addressed comments 04f44ef4 addressed comments 9e7dc7ca addressed comments 6f05a144 Merge branch 'main' into bignum-basic 7b2c4c61 Merge branch 'main' into optimize-blake2b ac068845 Merge pull request #909 from mir-protocol/gas_to_coinbase d5003b7c Gas fees go to coinbase 181e4409 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 625bdb68 skeleton 6dd99e43 Merge pull request #908 from mir-protocol/fix_call_field 84fbbbf4 Couple minor fixes 54f8dcf4 Merge branch 'main' into optimize-blake2b ce25cc84 Merge pull request #907 from toposware/wasm b3e93e91 Fix plonky2 compilation with wasm32-unknown-unknown target a96418b3 unused test 33ccf898 small optimizations fda2e190 restored blake2b_g_function and call_blake2b_g_function macros 4a378bce Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into fp318 18c83e77 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 6d997a65 more tests f2538fff cleanup bac38f82 fix 3a019f99 fix 3662e41d fixes 1100445d cleanup 4f412182 cleanup 0fdd93b8 cleanup f46694e7 more thorough tests 2aff3e10 cleanup 1d7c28ee bug fixes c98bfb0b cleanup af3dc287 cleanup 10893fe0 addmul test: use carry efd5e6ec cleanup 5477c7dd fixes 119eae95 fix 9976a4b0 addmul initial aa605b67 flag functions used only in tests fa605d7b basic bignum 0f55956a optimized initial hash value generation 5994f4d9 cleanup 9d8d81b4 optimize hash generation further further c37d1e25 optimize hash generation further 40f90d83 hash function optimization 70475a5a cleanup 5f592e60 fixes 93abd35f optimizations eea8ab62 Merge pull request #903 from mir-protocol/misc_evm_fixes a6ac0519 Misc EVM fixes 2eed209a Merge pull request #902 from mir-protocol/debug_tries_2 e6aa62f3 Some tooling for debugging tests where the updated tries are not correct 21db4a1b Merge pull request #900 from mir-protocol/sys_gas f117d76b sys_gas f19b7553 Merge pull request #899 from mir-protocol/evm_fixes c558eedd Misc EVM fixes ec216d28 Merge pull request #898 from mir-protocol/move-out-ecdsa 77fb333a Move ecdsa to its own repo 2621d582 Merge pull request #897 from mir-protocol/move-out-u32 18733f11 Move u32 to its own repo b08e7a08 Merge pull request #896 from mir-protocol/move-out-insertion bf8780b2 Move insertion to its own repo 64296bcc Merge pull request #895 from mir-protocol/move-out-waksman b95bc90b moved waksman to outside repo 1ee39b51 fmt ab32f03b fixed multiplication bde5c557 correct mul impl 2c73d5d7 bls field arithmetic 95e5fb59 cleaner rand 6ac59f16 arithmetic skeleton 8ace54dc Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 57ea64e3 Merge pull request #894 from mir-protocol/fix_storage_trie_lookup 1e1e75c9 Fix code that looks for an account's storage trie e8c94632 comment 1d94756e add inverse doc 5aafbaad Merge pull request #893 from mir-protocol/move_out_system_zero 801fa641 link bfaa80a3 Move system-zero to its own repo 13a8d670 loop test 2ea3e5e3 minor changes e3e5c678 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 7781dd36 Plonky2 to 0.1.3 db893831 Merge pull request #892 from mir-protocol/bump_plonky2 2133c7f3 Use new plonky2 52e34265 Bump plonky2 to 0.1.2 79084719 Merge pull request #891 from mir-protocol/fix_hash_or_noop e52b75b0 Fix `hash_or_noop` for general hash sizes d17f3aa4 Merge pull request #890 from mir-protocol/test_fixes 29f0692e Fix a few issues found by EVM tests 2a9d4b1a minor 5e3e40a0 more general kernel peek b89e668b minor f5b45ee4 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 33185476 Merge pull request #885 from mir-protocol/skip_log 745bec8d Skip log_kernel_instruction if debug logs disabled e8865130 put extract in interpreter 63f1fbfa fmt be351110 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 6f2d99c7 Merge pull request #884 from mir-protocol/evm-kernel-tests ea9846de format da7a8879 make hash functions take a location pointer a6ffb4b3 simplify byte extraction 4e4cfb06 function API / remove redundancy 53ab0ada remove blake storage 77a7af76 remove sha2 storage abc762f7 cleaner arithmetic 731c29c4 abstract c6cf1dc5 remove custom bce86718 simplify ripe md test e2cac0bb Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv f3946f75 Gas constraints (#880) 444da8f7 better comments 80e49caa segment virts d320fbfb update curve add with ops 787cc890 change segment 71243fd7 fix pairing code after big BN PR merge 2158c1d2 merge 9e60ee25 segment ac40bd5f Optimize `ecMul` precompile (scalar multiplication on BN254) (#852) 3332fbb4 Merge pull request #882 from mir-protocol/back_to_nightly ff80f28b Revert "Set CI to use an older version of nightly" 85b33093 Merge pull request #879 from 0x0ece/patch-1 614c4ae6 Make le_sum public 40866e77 Refactor arithmetic operation traits (#876) c6492bc5 merge fix 7b367f5c merge 83c0292b Move SHL and SHR generation to the CPU. (#878) b585b6a7 remove macro 31095e1b stack macro a061b88a naming cecad598 stack macro 361d6d72 tests and stacks cb7c638c more comments 69afed92 refactor 57146c83 miller loop test e63cc2aa Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 9f808fc7 align 69228491 Unify generation and verification of ADD/SUB/LT/GT operations (#872) 1c73e238 fp -> fp254 61ac0eff fmt 0f030fae naming for global labels c107c505 comments e1dca870 name 962754be rand impl a950a262 add comments cd5c92b5 merge ca002aea Optimize `ecrecover` ASM (#840) 9990632f Merge pull request #870 from mir-protocol/prep_for_publish 137bc785 Prep for publishing to crates.io 81511380 TODO 6c4ef29f Add range checks to the arithmetic Stark (#866) aed617c1 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv ea01e19c comment 5b124fb1 minor 6e8d4a57 fix 0eef28af bools 3ea8ad90 fmt 6958d46c names and comments 56be7317 comments b46af11f move consts f70243e7 better comments f0a6ec95 clean asm 136cdd05 Remove InterpolationGate trait (#868) 9c8f1166 ocd d98c69f0 better comments 0b81258a stack macros 3bdb2907 Optimized interpolation gate (#861) c9b005d2 new power works 5deb1648 refactor power 60cbdde8 clean 8ca6ba7b clean c13cf972 tate test 75c5938c rewrite w methods ec4cddb7 inv as method 7b524381 en route to ownership 17cfae66 reorg f34b35ed extra comments 94d99cca extra comments 8b670d54 meh 769c615c cleanup 530fb65b cleanup 155e973d slight refactor d2aa937a improved prover input and test api e06a2f2d duh a5c292c7 space 4d783da8 fmt d99cadeb stack macro b2f9d885 remove redundant macros and improve comments 8e62d994 fmt 922d3ebc add module and fix errors 3fcb5591 redundant macro c74a0c25 test inv from memory abab6bf1 test frob from memory 20fb2cb7 read output from memory 5f2baea0 mul test from memory 7f135fc0 reorg b44d9e2d Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 8ae36474 Merge pull request #865 from mir-protocol/increment_nonce eb7d18da fix clippy 54676487 cleaner description d6167a63 complete description 23698b74 more comments fda4b4c1 more comments 985e8160 transmute + comments 6e215386 comments f2e40541 Increment sender nonce + buy gas 0daaa3bf org bc9c431e remove comments 9977ae03 new inverse fe91e119 frob format 37ad3407 frob format ecde3d13 frob tests 9cd1f8a1 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv be19cb81 struct impl style arithmetic 31c5db91 rename module ccd4a38a remove make_stack b753836a Merge pull request #864 from mir-protocol/block_circuit b6f6c210 Block circuit 3a6d693f Merge pull request #863 from mir-protocol/smart_contract_test a2f4a58d log df2ba7a3 Basic smart contract test 07e02f2d Merge pull request #862 from mir-protocol/prover_inputs_error_handling a158effe Use error instead of panicking in FromStr 3fbc8bff move comment ea8cfc95 name 2a2880b7 name 800ceb60 zero name e6bcad6c Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into non-inv 446a0d3f name 81861095 Update evm/src/cpu/kernel/asm/curve/bn254/field_arithmetic/inverse.asm 4f38c3a7 name 70d7fb13 cleaner inv 32f24819 Update evm/src/cpu/kernel/asm/curve/bn254/curve_arithmetic/curve_add.asm 49db35d3 Merge branch 'non-inv' of github.com:mir-protocol/plonky2 into non-inv 42f98a09 Update evm/src/bn254.rs 82ce8153 \n 93a363c1 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 068f7485 Update evm/src/witness/util.rs 698ab6e7 Update evm/src/bn254.rs be7a489c Fix stack overflow 8ba8bb62 Merge pull request #860 from mir-protocol/agg_circuit_2 87be6097 Feedback ae212cfb Merge pull request #859 from mir-protocol/remove_older_evm_recursion_logic 14e6e7e9 Merge pull request #858 from mir-protocol/remove_ctl_defaults e4a5c2c9 Merge pull request #857 from mir-protocol/non_tight_degree_bound f4ac2d4f Fix vk 5df78441 Add aggregation circuit 76b3eb30 more fbb72e16 warning e12c6ad5 Remove some older EVM recursion logic 6655e776 Remove CTL defaults 0ca30840 Merge pull request #855 from mir-protocol/fixed_stark_recursion 5719c0b7 feedback 1ecdb96a Power of two length 2e59cecc import 40aecc8e Allow non-tight degree bound 18ce7ea5 Disable slow test on CI 595e751a Shrink STARK proofs to a constant degree 5cd86b66 names and format 2b91a1a6 simplify miller loop de494dcf remove prints 77798f88 remove loop endpoint de8637ce name 053a0206 Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv 32cda213 Merge pull request #854 from Sladuca/bool-or-gadget 403e2392 use doc comment 350b9029 add or gadget cca75c77 remove redundant definition c0744d76 TATE TEST PASSES 1f176734 better vec to fp12 9beca707 clean 84fab8d6 clean d4d80f35 rearrange 826702a7 clean f1d5c6bf tuck const e35644e9 miller test passes ef824110 miller in rust f2787a06 more clean 31ee8987 clippy b1f31caf more cleaning 89093b4d clean up 7af11f43 clean up prover code d5cec0e6 clean up code org 3c566e98 tangent and cords work bde569a2 it runs bf7da1c2 POP 41476ce4 fix cee6c653 hex a99b7d51 setup miller 6a93a6be rename e88e28a1 POWER WORKS e9e5528c space 5aab8ac0 first part works bc3adc16 debug pow 05e83526 test 7cd0dbae setup pow 32758829 refactor 950771a6 clean up inverse 95383db4 inverse edits c4e512ef Merge branch 'main' of github.com:mir-protocol/plonky2 into non-inv c2462971 inverse works 7788a29f skeleton inv d90a0559 Merge pull request #853 from mir-protocol/ctl_fixes b8e97aaa Fix logic and Keccak CTLs a503b058 fix 15ee75f2 all but inv d6c7… * Bump deps * Make smt_trie a local dependency * fmt * Add temporary bindings to old dependencies * Implement recursive table chain for Poseidon * Bring latest fixes * More missing fixes * More * PR feedback * Verify proof * [WIP] Add poseidon general * Copy paste code from create-poseidon-tables * [WIP] Hash mismatch * [WIP] Fix poseidon general * [WIP] Constraints fixed * Fix CTLs * Constraint is_simple with CTLs * Add input/output CTLs for poseidon and fix capacity * Fix circuit constraints and test * Fix Clippy * Remove commented code * Address reviews * Minor * Clean dead code * Cargo.toml * Clippy * Apply suggestions from code review Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> * (Partially) address reviews * Missing changes * Change plonky2 version in smt_trie/Cargo.toml * Missing nits * Minor --------- Co-authored-by: wborgeaud Co-authored-by: Robin Salen Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> --- Cargo.toml | 2 +- evm_arithmetization/Cargo.toml | 2 +- evm_arithmetization/src/all_stark.rs | 36 +- evm_arithmetization/src/cpu/columns/ops.rs | 1 + evm_arithmetization/src/cpu/cpu_stark.rs | 51 +- evm_arithmetization/src/cpu/decode.rs | 2 +- .../src/cpu/kernel/asm/account_code.asm | 139 +---- .../src/cpu/kernel/interpreter.rs | 9 +- evm_arithmetization/src/cpu/kernel/opcodes.rs | 1 + evm_arithmetization/src/cpu/stack.rs | 50 +- evm_arithmetization/src/generation/mod.rs | 7 +- evm_arithmetization/src/generation/mpt.rs | 2 + evm_arithmetization/src/generation/state.rs | 2 +- evm_arithmetization/src/poseidon/columns.rs | 38 ++ .../src/poseidon/poseidon_stark.rs | 545 ++++++++++++++++-- evm_arithmetization/src/verifier.rs | 8 + evm_arithmetization/src/witness/gas.rs | 1 + evm_arithmetization/src/witness/operation.rs | 57 +- evm_arithmetization/src/witness/traces.rs | 7 +- evm_arithmetization/src/witness/transition.rs | 8 +- evm_arithmetization/src/witness/util.rs | 3 + smt_trie/Cargo.toml | 2 +- smt_trie/src/code.rs | 52 +- trace_decoder/Cargo.toml | 1 - 24 files changed, 804 insertions(+), 222 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 6119293e3..a5f1cb63a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ members = [ "mpt_trie", "proof_gen", "smt_trie", - "trace_decoder" + "trace_decoder", ] resolver = "2" diff --git a/evm_arithmetization/Cargo.toml b/evm_arithmetization/Cargo.toml index 6156b470c..e4370cb86 100644 --- a/evm_arithmetization/Cargo.toml +++ b/evm_arithmetization/Cargo.toml @@ -59,7 +59,7 @@ asmtools = ["hex"] parallel = [ "plonky2/parallel", "plonky2_maybe_rayon/parallel", - "starky/parallel" + "starky/parallel", ] [[bin]] diff --git a/evm_arithmetization/src/all_stark.rs b/evm_arithmetization/src/all_stark.rs index 43e471db1..71d9f1069 100644 --- a/evm_arithmetization/src/all_stark.rs +++ b/evm_arithmetization/src/all_stark.rs @@ -23,8 +23,9 @@ use crate::logic; use crate::logic::LogicStark; use crate::memory::memory_stark; use crate::memory::memory_stark::MemoryStark; -use crate::poseidon::poseidon_stark; +use crate::poseidon::columns::POSEIDON_SPONGE_RATE; use crate::poseidon::poseidon_stark::PoseidonStark; +use crate::poseidon::poseidon_stark::{self, FELT_MAX_BYTES}; /// Structure containing all STARKs and the cross-table lookups. #[derive(Clone)] @@ -127,7 +128,9 @@ pub(crate) fn all_cross_table_lookups() -> Vec> { ctl_keccak_outputs(), ctl_logic(), ctl_memory(), - ctl_poseidon(), + ctl_poseidon_simple(), + ctl_poseidon_general_input(), + ctl_poseidon_general_output(), ] } @@ -296,6 +299,14 @@ fn ctl_memory() -> CrossTableLookup { Some(byte_packing_stark::ctl_looking_memory_filter(i)), ) }); + + let poseidon_general_reads = (0..FELT_MAX_BYTES * POSEIDON_SPONGE_RATE).map(|i| { + TableWithColumns::new( + *Table::Poseidon, + poseidon_stark::ctl_looking_memory(i), + Some(poseidon_stark::ctl_looking_memory_filter()), + ) + }); let all_lookers = vec![ cpu_memory_code_read, cpu_push_write_ops, @@ -306,6 +317,7 @@ fn ctl_memory() -> CrossTableLookup { .chain(cpu_memory_gp_ops) .chain(keccak_sponge_reads) .chain(byte_packing_ops) + .chain(poseidon_general_reads) .collect(); let memory_looked = TableWithColumns::new( *Table::Memory, @@ -315,9 +327,23 @@ fn ctl_memory() -> CrossTableLookup { CrossTableLookup::new(all_lookers, memory_looked) } -fn ctl_poseidon() -> CrossTableLookup { +fn ctl_poseidon_simple() -> CrossTableLookup { + CrossTableLookup::new( + vec![cpu_stark::ctl_poseidon_simple_op()], + poseidon_stark::ctl_looked_simple_op(), + ) +} + +fn ctl_poseidon_general_input() -> CrossTableLookup { + CrossTableLookup::new( + vec![cpu_stark::ctl_poseidon_general_input()], + poseidon_stark::ctl_looked_general_input(), + ) +} + +fn ctl_poseidon_general_output() -> CrossTableLookup { CrossTableLookup::new( - vec![cpu_stark::ctl_poseidon()], - poseidon_stark::ctl_looked(), + vec![cpu_stark::ctl_poseidon_general_output()], + poseidon_stark::ctl_looked_general_output(), ) } diff --git a/evm_arithmetization/src/cpu/columns/ops.rs b/evm_arithmetization/src/cpu/columns/ops.rs index 266354fd8..e040e9053 100644 --- a/evm_arithmetization/src/cpu/columns/ops.rs +++ b/evm_arithmetization/src/cpu/columns/ops.rs @@ -24,6 +24,7 @@ pub(crate) struct OpsColumnsView { pub shift: T, /// Combines JUMPDEST and KECCAK_GENERAL flags. pub jumpdest_keccak_general: T, + /// Combines POSEIDON and POSEIDON_GENERAL flags. pub poseidon: T, /// Combines JUMP and JUMPI flags. pub jumps: T, diff --git a/evm_arithmetization/src/cpu/cpu_stark.rs b/evm_arithmetization/src/cpu/cpu_stark.rs index 9c6c43249..143d2e7df 100644 --- a/evm_arithmetization/src/cpu/cpu_stark.rs +++ b/evm_arithmetization/src/cpu/cpu_stark.rs @@ -430,7 +430,7 @@ pub(crate) fn ctl_filter_set_context() -> Filter { } /// Returns the `TableWithColumns` for the CPU rows calling POSEIDON. -pub(crate) fn ctl_poseidon() -> TableWithColumns { +pub(crate) fn ctl_poseidon_simple_op() -> TableWithColumns { let mut columns = Vec::new(); for channel in 0..3 { for i in 0..VALUE_LIMBS / 2 { @@ -444,13 +444,58 @@ pub(crate) fn ctl_poseidon() -> TableWithColumns { } } columns.extend(Column::singles_next_row(COL_MAP.mem_channels[0].value)); + TableWithColumns::new(*Table::Cpu, columns, ctl_poseidon_simple_filter()) +} + +pub(crate) fn ctl_poseidon_general_input() -> TableWithColumns { + // When executing POSEIDON_GENERAL, the GP memory channels are used as follows: + // GP channel 0: stack[-1] = addr (context, segment, virt) + // GP channel 1: stack[-2] = len + let (context, segment, virt) = get_addr(&COL_MAP, 0); + let context = Column::single(context); + let segment: Column = Column::single(segment); + let virt = Column::single(virt); + let len = Column::single(COL_MAP.mem_channels[1].value[0]); + + let num_channels = F::from_canonical_usize(NUM_CHANNELS); + let timestamp = Column::linear_combination([(COL_MAP.clock, num_channels)]); + TableWithColumns::new( *Table::Cpu, - columns, - Some(Filter::new_simple(Column::single(COL_MAP.op.poseidon))), + vec![context, segment, virt, len, timestamp], + ctl_poseidon_general_filter(), ) } +pub(crate) fn ctl_poseidon_simple_filter() -> Option> { + Some(Filter::new( + vec![( + Column::single(COL_MAP.op.poseidon), + Column::linear_combination_with_constant([(COL_MAP.opcode_bits[0], -F::ONE)], F::ONE), + )], + vec![], + )) +} + +pub(crate) fn ctl_poseidon_general_filter() -> Option> { + Some(Filter::new( + vec![( + Column::single(COL_MAP.op.poseidon), + Column::single(COL_MAP.opcode_bits[0]), + )], + vec![], + )) +} + +/// Returns the `TableWithColumns` for the CPU rows calling POSEIDON_GENERAL. +pub(crate) fn ctl_poseidon_general_output() -> TableWithColumns { + let mut columns = Vec::new(); + columns.extend(Column::singles_next_row(COL_MAP.mem_channels[0].value)); + let num_channels = F::from_canonical_usize(NUM_CHANNELS); + columns.push(Column::linear_combination([(COL_MAP.clock, num_channels)])); + TableWithColumns::new(*Table::Cpu, columns, ctl_poseidon_general_filter()) +} + /// Disable the specified memory channels. /// Since channel 0 contains the top of the stack and is handled specially, /// channels to disable are 1, 2 or both. All cases can be expressed as a vec. diff --git a/evm_arithmetization/src/cpu/decode.rs b/evm_arithmetization/src/cpu/decode.rs index 8bbb5730b..22fbaefc1 100644 --- a/evm_arithmetization/src/cpu/decode.rs +++ b/evm_arithmetization/src/cpu/decode.rs @@ -32,7 +32,7 @@ const OPCODES: [(u8, usize, bool, usize); 6] = [ // manually here, and partly through the Arithmetic table CTL. FP254 operation flags are // handled partly manually here, and partly through the Arithmetic table CTL. (0x14, 1, false, COL_MAP.op.eq_iszero), - (0x22, 0, true, COL_MAP.op.poseidon), + (0x22, 1, true, COL_MAP.op.poseidon), // AND, OR and XOR flags are handled partly manually here, and partly through the Logic table // CTL. NOT and POP are handled manually here. // SHL and SHR flags are handled partly manually here, and partly through the Logic table CTL. diff --git a/evm_arithmetization/src/cpu/kernel/asm/account_code.asm b/evm_arithmetization/src/cpu/kernel/asm/account_code.asm index 0bdabe2e9..6dac4c32a 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/account_code.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/account_code.asm @@ -118,144 +118,11 @@ load_code_padded_ctd: MSTORE_GENERAL // stack: retdest, code_size JUMP - -// TODO: This could certainly be optimized, or implemented directly in the Poseidon Stark. global poseidon_hash_code: // stack: padded_code_size, codehash, ctx, retdest - %stack (padded_code_size, codehash, ctx) -> (0, 0, padded_code_size, ctx, codehash) -poseidon_hash_code_loop: - // stack: i, capacity, padded_code_size, ctx, codehash, retdest - DUP3 DUP2 EQ %jumpi(poseidon_hash_code_after) - %stack (i, capacity, code_size, ctx) -> (i, ctx, i, capacity, code_size, ctx) - ADD MLOAD_GENERAL - %stack (b, i, capacity, code_size, ctx) -> (1, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(8) ADD - %stack (b, i, capacity, code_size, ctx) -> (2, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(16) ADD - %stack (b, i, capacity, code_size, ctx) -> (3, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(24) ADD - %stack (b, i, capacity, code_size, ctx) -> (4, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(32) ADD - %stack (b, i, capacity, code_size, ctx) -> (5, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(40) ADD - %stack (b, i, capacity, code_size, ctx) -> (6, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(48) ADD - - %stack (b, i, capacity, code_size, ctx) -> (7, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(64) ADD - %stack (b, i, capacity, code_size, ctx) -> (8, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(72) ADD - %stack (b, i, capacity, code_size, ctx) -> (9, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(80) ADD - %stack (b, i, capacity, code_size, ctx) -> (10, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(88) ADD - %stack (b, i, capacity, code_size, ctx) -> (11, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(96) ADD - %stack (b, i, capacity, code_size, ctx) -> (12, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(104) ADD - %stack (b, i, capacity, code_size, ctx) -> (13, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(112) ADD - - %stack (b, i, capacity, code_size, ctx) -> (14, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(128) ADD - %stack (b, i, capacity, code_size, ctx) -> (15, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(136) ADD - %stack (b, i, capacity, code_size, ctx) -> (16, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(144) ADD - %stack (b, i, capacity, code_size, ctx) -> (17, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(152) ADD - %stack (b, i, capacity, code_size, ctx) -> (18, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(160) ADD - %stack (b, i, capacity, code_size, ctx) -> (19, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(168) ADD - %stack (b, i, capacity, code_size, ctx) -> (20, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(176) ADD - - %stack (b, i, capacity, code_size, ctx) -> (21, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(192) ADD - %stack (b, i, capacity, code_size, ctx) -> (22, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(200) ADD - %stack (b, i, capacity, code_size, ctx) -> (23, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(208) ADD - %stack (b, i, capacity, code_size, ctx) -> (24, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(216) ADD - %stack (b, i, capacity, code_size, ctx) -> (25, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(224) ADD - %stack (b, i, capacity, code_size, ctx) -> (26, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(232) ADD - %stack (b, i, capacity, code_size, ctx) -> (27, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(240) ADD - %stack (B0, i, capacity, code_size, ctx) -> (i, capacity, code_size, ctx, B0) - - %stack (i, capacity, code_size, ctx) -> (28, i, ctx, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL - %stack (b, i, capacity, code_size, ctx) -> (29, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(8) ADD - %stack (b, i, capacity, code_size, ctx) -> (30, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(16) ADD - %stack (b, i, capacity, code_size, ctx) -> (31, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(24) ADD - %stack (b, i, capacity, code_size, ctx) -> (32, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(32) ADD - %stack (b, i, capacity, code_size, ctx) -> (33, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(40) ADD - %stack (b, i, capacity, code_size, ctx) -> (34, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(48) ADD - - %stack (b, i, capacity, code_size, ctx) -> (35, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(64) ADD - %stack (b, i, capacity, code_size, ctx) -> (36, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(72) ADD - %stack (b, i, capacity, code_size, ctx) -> (37, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(80) ADD - %stack (b, i, capacity, code_size, ctx) -> (38, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(88) ADD - %stack (b, i, capacity, code_size, ctx) -> (39, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(96) ADD - %stack (b, i, capacity, code_size, ctx) -> (40, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(104) ADD - %stack (b, i, capacity, code_size, ctx) -> (41, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(112) ADD - - %stack (b, i, capacity, code_size, ctx) -> (42, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(128) ADD - %stack (b, i, capacity, code_size, ctx) -> (43, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(136) ADD - %stack (b, i, capacity, code_size, ctx) -> (44, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(144) ADD - %stack (b, i, capacity, code_size, ctx) -> (45, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(152) ADD - %stack (b, i, capacity, code_size, ctx) -> (46, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(160) ADD - %stack (b, i, capacity, code_size, ctx) -> (47, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(168) ADD - %stack (b, i, capacity, code_size, ctx) -> (48, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(176) ADD - - %stack (b, i, capacity, code_size, ctx) -> (49, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(192) ADD - %stack (b, i, capacity, code_size, ctx) -> (50, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(200) ADD - %stack (b, i, capacity, code_size, ctx) -> (51, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(208) ADD - %stack (b, i, capacity, code_size, ctx) -> (52, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(216) ADD - %stack (b, i, capacity, code_size, ctx) -> (53, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(224) ADD - %stack (b, i, capacity, code_size, ctx) -> (54, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(232) ADD - %stack (b, i, capacity, code_size, ctx) -> (55, i, ctx, b, i, capacity, code_size, ctx) - ADD ADD MLOAD_GENERAL %shl_const(240) ADD - %stack (B1, i, capacity, code_size, ctx, B0) -> (B0, B1, capacity, i, code_size, ctx) - POSEIDON - %stack (capacity, i, padded_code_size, ctx) -> (i, capacity, padded_code_size, ctx) - // stack: i, capacity, padded_code_size, ctx, codehash, retdest - %add_const(56) - %jump(poseidon_hash_code_loop) - -global poseidon_hash_code_after: - // stack: i, capacity, padded_code_size, ctx, codehash, retdest - %stack (i, capacity, padded_code_size, ctx, codehash) -> (capacity, codehash, padded_code_size, ctx) + // %stack (padded_code_size, codehash, ctx) -> (0, 0, padded_code_size, ctx, codehash) + %stack (padded_code_size, codehash, ctx) -> (ctx, padded_code_size, codehash, padded_code_size, ctx) + POSEIDON_GENERAL %assert_eq // stack: padded_code_size, ctx, retdest %decrement diff --git a/evm_arithmetization/src/cpu/kernel/interpreter.rs b/evm_arithmetization/src/cpu/kernel/interpreter.rs index 9c2802b4c..15bb45b9a 100644 --- a/evm_arithmetization/src/cpu/kernel/interpreter.rs +++ b/evm_arithmetization/src/cpu/kernel/interpreter.rs @@ -4,14 +4,17 @@ use core::cmp::Ordering; use core::ops::Range; use std::collections::{BTreeSet, HashMap}; -use anyhow::anyhow; -use ethereum_types::{BigEndianHash, U256}; +use anyhow::{anyhow, bail}; +use ethereum_types::{BigEndianHash, H160, H256, U256, U512}; +use itertools::Itertools; +use keccak_hash::keccak; use mpt_trie::partial_trie::PartialTrie; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::hash::poseidon::Poseidon; use serde::Serialize; +use smt_trie::code::poseidon_hash_padded_byte_vec; use smt_trie::smt::{hash_serialize, hash_serialize_u256}; use smt_trie::utils::hashout2u; @@ -46,7 +49,7 @@ use crate::{arithmetic, keccak, logic}; /// Halt interpreter execution whenever a jump to this offset is done. const DEFAULT_HALT_OFFSET: usize = 0xdeadbeef; -pub(crate) struct Interpreter { +pub(crate) struct Interpreter { /// The interpreter holds a `GenerationState` to keep track of the memory /// and registers. pub(crate) generation_state: GenerationState, diff --git a/evm_arithmetization/src/cpu/kernel/opcodes.rs b/evm_arithmetization/src/cpu/kernel/opcodes.rs index 47cc97a3c..800d5ae07 100644 --- a/evm_arithmetization/src/cpu/kernel/opcodes.rs +++ b/evm_arithmetization/src/cpu/kernel/opcodes.rs @@ -40,6 +40,7 @@ pub fn get_opcode(mnemonic: &str) -> u8 { "KECCAK256" => 0x20, "KECCAK_GENERAL" => 0x21, "POSEIDON" => 0x22, + "POSEIDON_GENERAL" => 0x23, "ADDRESS" => 0x30, "BALANCE" => 0x31, "ORIGIN" => 0x32, diff --git a/evm_arithmetization/src/cpu/stack.rs b/evm_arithmetization/src/cpu/stack.rs index 0c460ca11..a14e0ff55 100644 --- a/evm_arithmetization/src/cpu/stack.rs +++ b/evm_arithmetization/src/cpu/stack.rs @@ -102,6 +102,20 @@ pub(crate) const JUMPDEST_OP: StackBehavior = StackBehavior { disable_other_channels: true, }; +/// Stack behavior for POSEIDON. +pub(crate) const POSEIDON_OP: StackBehavior = StackBehavior { + num_pops: 3, + pushes: true, + disable_other_channels: true, +}; + +/// Stack behavior for POSEIDON_GENERAL. +pub(crate) const POSEIDON_GENERAL_OP: StackBehavior = StackBehavior { + num_pops: 2, + pushes: true, + disable_other_channels: true, +}; + // AUDITORS: If the value below is `None`, then the operation must be manually // checked to ensure that every general-purpose memory channel is either // disabled or has its read flag and address properly constrained. The same @@ -121,11 +135,7 @@ pub(crate) const STACK_BEHAVIORS: OpsColumnsView> = OpsCol disable_other_channels: false, }), jumpdest_keccak_general: None, - poseidon: Some(StackBehavior { - num_pops: 3, - pushes: true, - disable_other_channels: true, - }), + poseidon: None, push_prover_input: Some(StackBehavior { num_pops: 0, pushes: true, @@ -336,6 +346,20 @@ pub(crate) fn eval_packed( yield_constr, ); + // Constrain stack for POSEIDON. + let poseidon_filter = lv.op.poseidon * (P::ONES - lv.opcode_bits[0]); + eval_packed_one(lv, nv, poseidon_filter, POSEIDON_OP, yield_constr); + + // Constrain stack for POSEIDON_GENERAL. + let poseidon_general_filter = lv.op.poseidon * lv.opcode_bits[0]; + eval_packed_one( + lv, + nv, + poseidon_general_filter, + POSEIDON_GENERAL_OP, + yield_constr, + ); + // Stack constraints for POP. // The only constraints POP has are stack constraints. // Since POP and NOT are combined into one flag and they have @@ -656,6 +680,22 @@ pub(crate) fn eval_ext_circuit, const D: usize>( yield_constr, ); + // Constrain stack for POSEIDON. + let mut poseidon_filter = builder.sub_extension(one, lv.opcode_bits[0]); + poseidon_filter = builder.mul_extension(lv.op.poseidon, poseidon_filter); + eval_ext_circuit_one(builder, lv, nv, poseidon_filter, POSEIDON_OP, yield_constr); + + // Constrain stack for POSEIDON_GENERAL. + let poseidon_general_filter = builder.mul_extension(lv.op.poseidon, lv.opcode_bits[0]); + eval_ext_circuit_one( + builder, + lv, + nv, + poseidon_general_filter, + POSEIDON_GENERAL_OP, + yield_constr, + ); + // Stack constraints for POP. // The only constraints POP has are stack constraints. // Since POP and NOT are combined into one flag and they have diff --git a/evm_arithmetization/src/generation/mod.rs b/evm_arithmetization/src/generation/mod.rs index d7907827a..d5b900628 100644 --- a/evm_arithmetization/src/generation/mod.rs +++ b/evm_arithmetization/src/generation/mod.rs @@ -11,7 +11,6 @@ use plonky2::hash::hash_types::RichField; use plonky2::timed; use plonky2::util::timing::TimingTree; use serde::{Deserialize, Serialize}; -use smt_trie::smt::hash_serialize_u256; use starky::config::StarkConfig; use GlobalMetadata::{ ReceiptTrieRootDigestAfter, ReceiptTrieRootDigestBefore, StateTrieRootDigestAfter, @@ -35,8 +34,12 @@ pub(crate) mod rlp; pub(crate) mod state; mod trie_extractor; +use plonky2::field::types::PrimeField64; +use smt_trie::smt::{hash_serialize, hash_serialize_u256}; + +use self::mpt::{load_all_mpts, TrieRootPtrs}; use self::state::State; -use crate::witness::util::mem_write_log; +use crate::witness::util::{mem_write_log, stack_peek}; /// Inputs needed for trace generation. #[derive(Clone, Debug, Deserialize, Serialize, Default)] diff --git a/evm_arithmetization/src/generation/mpt.rs b/evm_arithmetization/src/generation/mpt.rs index 24ad6f19f..cebbe84ac 100644 --- a/evm_arithmetization/src/generation/mpt.rs +++ b/evm_arithmetization/src/generation/mpt.rs @@ -6,6 +6,8 @@ use ethereum_types::{Address, BigEndianHash, H256, U256, U512}; use keccak_hash::keccak; use mpt_trie::nibbles::{Nibbles, NibblesIntern}; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; +use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::hash::hash_types::RichField; use rlp::{Decodable, DecoderError, Encodable, PayloadInfo, Rlp, RlpStream}; use rlp_derive::{RlpDecodable, RlpEncodable}; use smt_trie::code::{hash_bytecode_u256, hash_contract_bytecode}; diff --git a/evm_arithmetization/src/generation/state.rs b/evm_arithmetization/src/generation/state.rs index 7ea2fd4d6..ed3aee09f 100644 --- a/evm_arithmetization/src/generation/state.rs +++ b/evm_arithmetization/src/generation/state.rs @@ -271,7 +271,7 @@ pub(crate) trait State { } #[derive(Debug)] -pub(crate) struct GenerationState { +pub(crate) struct GenerationState { pub(crate) inputs: GenerationInputs, pub(crate) registers: RegistersState, pub(crate) memory: MemoryState, diff --git a/evm_arithmetization/src/poseidon/columns.rs b/evm_arithmetization/src/poseidon/columns.rs index fcd0621b4..b5b86721c 100644 --- a/evm_arithmetization/src/poseidon/columns.rs +++ b/evm_arithmetization/src/poseidon/columns.rs @@ -3,6 +3,7 @@ use std::mem::{size_of, transmute}; use plonky2::hash::poseidon; +use super::poseidon_stark::FELT_MAX_BYTES; use crate::util::{indices_arr, transmute_no_compile_time_size_checks}; pub(crate) const POSEIDON_SPONGE_WIDTH: usize = poseidon::SPONGE_WIDTH; @@ -14,6 +15,32 @@ pub(crate) const POSEIDON_DIGEST: usize = 4; #[repr(C)] #[derive(Eq, PartialEq, Debug)] pub(crate) struct PoseidonColumnsView { + // The base address at which we will read the input block. + pub context: T, + pub segment: T, + pub virt: T, + + /// The timestamp at which Poseidon is called. + pub timestamp: T, + + /// The length of the original input for `PoseidonGeneralOp`. 0 for + /// `PoseidonSimpleOp`. + pub len: T, + /// The number of elements that have already been absorbed prior + /// to this block. + pub already_absorbed_elements: T, + + /// If this row represents a final block row, the `i`th entry should be 1 if + /// the final chunk of input has length `i` (in other words if `len - + /// already_absorbed == i`), otherwise 0. + /// + /// If this row represents a full input block, this should contain all 0s. + pub is_final_input_len: [T; POSEIDON_SPONGE_RATE], + + /// 1 if this row represents a full input block, i.e. one in which each + /// element is an input element, not a padding element; 0 otherwise. + pub is_full_input_block: T, + /// Registers to hold permutation inputs. pub input: [T; POSEIDON_SPONGE_WIDTH], @@ -44,6 +71,17 @@ pub(crate) struct PoseidonColumnsView { /// Holds the pseudo-inverse of (digest_high_limb_i - 2^32 + 1). pub pinv: [T; POSEIDON_DIGEST], + /// Holds the byte decomposition of the input, except for the less + /// significant byte. + pub input_bytes: [[T; FELT_MAX_BYTES - 1]; POSEIDON_SPONGE_RATE], + + /// Indicates if this is a simple operation where inputs are + /// read from the top of the stack. + pub is_simple_op: T, + + /// Indicates if this is the first row of a general operation. + pub is_first_row_general_op: T, + pub not_padding: T, } diff --git a/evm_arithmetization/src/poseidon/poseidon_stark.rs b/evm_arithmetization/src/poseidon/poseidon_stark.rs index bfa695cfa..79713c0a2 100644 --- a/evm_arithmetization/src/poseidon/poseidon_stark.rs +++ b/evm_arithmetization/src/poseidon/poseidon_stark.rs @@ -6,12 +6,14 @@ use itertools::Itertools; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::field::polynomial::PolynomialValues; -use plonky2::field::types::Field; +use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::hash::poseidon::Poseidon; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::timed; use plonky2::util::timing::TimingTree; +use plonky2_maybe_rayon::rayon::iter::{self, repeat}; +use smt_trie::code::poseidon_hash_padded_byte_vec; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use starky::cross_table_lookup::TableWithColumns; use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; @@ -28,20 +30,141 @@ use super::columns::{ use crate::all_stark::{EvmStarkFrame, Table}; use crate::witness::memory::MemoryAddress; -pub(crate) fn ctl_looked() -> TableWithColumns { +/// Maximum number of bytes that can be packed into a field element without +/// performing a modular reduction. +// TODO: this constant depends on the size of F, which is not bounded. +pub const FELT_MAX_BYTES: usize = 7; + +pub(crate) fn ctl_looked_simple_op() -> TableWithColumns { let mut columns = Column::singles(POSEIDON_COL_MAP.input).collect_vec(); columns.extend(Column::singles(POSEIDON_COL_MAP.digest)); TableWithColumns::new( *Table::Poseidon, columns, Some(Filter::new_simple(Column::single( - POSEIDON_COL_MAP.not_padding, + POSEIDON_COL_MAP.is_simple_op, + ))), + ) +} + +pub(crate) fn ctl_looked_general_output() -> TableWithColumns { + let mut columns = Column::singles(POSEIDON_COL_MAP.digest).collect_vec(); + columns.push(Column::single(POSEIDON_COL_MAP.timestamp)); + TableWithColumns::new( + *Table::Poseidon, + columns, + Some(Filter::new( + vec![( + Column::sum(POSEIDON_COL_MAP.is_final_input_len), + Column::linear_combination_with_constant( + [(POSEIDON_COL_MAP.is_simple_op, -F::ONE)], + F::ONE, + ), + )], + vec![], + )), + ) +} + +pub(crate) fn ctl_looked_general_input() -> TableWithColumns { + let context = Column::single(POSEIDON_COL_MAP.context); + let segment: Column = Column::single(POSEIDON_COL_MAP.segment); + let virt = Column::single(POSEIDON_COL_MAP.virt); + let len = Column::single(POSEIDON_COL_MAP.len); + + let timestamp = Column::single(POSEIDON_COL_MAP.timestamp); + + TableWithColumns::new( + *Table::Poseidon, + vec![context, segment, virt, len, timestamp], + Some(Filter::new_simple(Column::single( + POSEIDON_COL_MAP.is_first_row_general_op, ))), ) } +pub fn ctl_looking_memory(i: usize) -> Vec> { + let cols = POSEIDON_COL_MAP; + let mut res = vec![Column::constant(F::ONE)]; // is_read + + res.extend(Column::singles([cols.context, cols.segment])); + + res.push(Column::linear_combination_with_constant( + [ + (cols.virt, F::ONE), + (cols.already_absorbed_elements, F::ONE), + ], + F::from_canonical_usize(i), + )); + + // The first byte of of each field element is + // mem[virt + already_absorbed_elements + i/ FELT_MAX_BYTES] - \sum_j + // input_bytes[i/FELT_MAX_BYTES][j]* 256^j. + // The other bytes are input_bytes[i/FELT_MAX_BYTES][i % FELT_MAX_BYTES - 1] + if i % FELT_MAX_BYTES == 0 { + res.push(Column::linear_combination( + std::iter::once((cols.input[i / FELT_MAX_BYTES], F::ONE)).chain( + (0..FELT_MAX_BYTES - 1).map(|j| { + ( + cols.input_bytes[i / FELT_MAX_BYTES][j], + -F::from_canonical_u64(1 << (8 * (j + 1))), + ) + }), + ), + )); + } else { + res.push(Column::single( + cols.input_bytes[i / FELT_MAX_BYTES][(i % FELT_MAX_BYTES) - 1], + )); + } + res.extend((1..8).map(|_| Column::zero())); + + res.push(Column::single(cols.timestamp)); + + assert_eq!( + res.len(), + crate::memory::memory_stark::ctl_data::().len() + ); + + res +} + +// TODO: Support non-padded inputs? +pub fn ctl_looking_memory_filter() -> Filter { + let cols = POSEIDON_COL_MAP; + Filter::new( + vec![( + Column::single(cols.not_padding), + Column::linear_combination_with_constant([(cols.is_simple_op, -F::ONE)], F::ONE), + )], + vec![], + ) +} + +#[derive(Clone, Debug)] +pub enum PoseidonOp { + PoseidonSimpleOp(PoseidonSimpleOp), + PoseidonGeneralOp(PoseidonGeneralOp), +} + #[derive(Copy, Clone, Debug)] -pub struct PoseidonOp(pub [F; POSEIDON_SPONGE_WIDTH]); +pub struct PoseidonSimpleOp(pub [F; POSEIDON_SPONGE_WIDTH]); + +#[derive(Clone, Debug)] +pub struct PoseidonGeneralOp { + /// The base address at which inputs are read. + pub(crate) base_address: MemoryAddress, + + /// The timestamp at which inputs are read. + pub(crate) timestamp: usize, + + /// The input that was read. We assume that it was + /// previously padded. + pub(crate) input: Vec, + + /// Length of the input before paddding. + pub(crate) len: usize, +} #[derive(Copy, Clone, Default)] pub struct PoseidonStark { @@ -58,11 +181,28 @@ impl, const D: usize> PoseidonStark { operations: Vec>, min_rows: usize, ) -> Vec<[F; NUM_COLUMNS]> { - let num_rows = operations.len().max(min_rows).next_power_of_two(); - let mut rows = Vec::with_capacity(operations.len().max(min_rows)); + let base_len: usize = operations + .iter() + .map(|op| match op { + PoseidonOp::PoseidonSimpleOp(_) => 1, + PoseidonOp::PoseidonGeneralOp(op) => { + debug_assert!(op.input.len() % (FELT_MAX_BYTES * POSEIDON_SPONGE_RATE) == 0); + (op.input.len() + FELT_MAX_BYTES * POSEIDON_SPONGE_RATE - 1) + / (FELT_MAX_BYTES * POSEIDON_SPONGE_RATE) + } + }) + .sum(); + + let num_rows = base_len.max(min_rows).next_power_of_two(); + let mut rows = Vec::with_capacity(base_len.max(min_rows)); for op in operations { - rows.push(self.generate_row_for_op(op)); + match op { + PoseidonOp::PoseidonSimpleOp(op) => rows.push(self.generate_row_for_simple_op(op)), + PoseidonOp::PoseidonGeneralOp(op) => { + rows.extend(self.generate_rows_for_general_op(op)) + } + } } // We generate "actual" rows for padding to avoid having to store @@ -74,17 +214,127 @@ impl, const D: usize> PoseidonStark { tmp_row } .into(); - rows.resize(num_rows, padding_row); + while rows.len() < num_rows { + rows.push(padding_row); + } rows } - fn generate_row_for_op(&self, op: PoseidonOp) -> [F; NUM_COLUMNS] { + fn generate_row_for_simple_op(&self, op: PoseidonSimpleOp) -> [F; NUM_COLUMNS] { let mut row = PoseidonColumnsView::default(); Self::generate_perm(&mut row, op.0); + row.is_final_input_len[POSEIDON_SPONGE_RATE - 1] = F::ONE; row.not_padding = F::ONE; + row.is_simple_op = F::ONE; row.into() } + fn generate_rows_for_general_op(&self, op: PoseidonGeneralOp) -> Vec<[F; NUM_COLUMNS]> { + let mut input_blocks = op.input.chunks_exact(FELT_MAX_BYTES * POSEIDON_SPONGE_RATE); + let mut rows: Vec<[F; NUM_COLUMNS]> = + Vec::with_capacity(op.input.len() / (FELT_MAX_BYTES * POSEIDON_SPONGE_RATE)); + let last_non_padding_elt = op.len % (FELT_MAX_BYTES * POSEIDON_SPONGE_RATE); + let total_length = input_blocks.len(); + let mut already_absorbed_elements = 0; + let mut state = [F::ZERO; POSEIDON_SPONGE_WIDTH]; + for (counter, block) in input_blocks.enumerate() { + state[0..POSEIDON_SPONGE_RATE].copy_from_slice( + &block + .chunks_exact(FELT_MAX_BYTES) + .map(|first_bytes| { + let mut bytes = [0u8; POSEIDON_SPONGE_RATE]; + bytes[..7].copy_from_slice(first_bytes); + F::from_canonical_u64(u64::from_le_bytes(bytes)) + }) + .collect::>(), + ); + let mut row = if counter == total_length - 1 { + let tmp_row = + self.generate_trace_final_row_for_perm(state, &op, already_absorbed_elements); + already_absorbed_elements += last_non_padding_elt; + tmp_row + } else { + let tmp_row = + self.generate_trace_row_for_perm(state, &op, already_absorbed_elements); + already_absorbed_elements += FELT_MAX_BYTES * POSEIDON_SPONGE_RATE; + tmp_row + }; + row.not_padding = F::ONE; + for (i, (input_bytes_chunk, block_chunk)) in row + .input_bytes + .iter_mut() + .zip_eq(block.chunks(FELT_MAX_BYTES)) + .enumerate() + { + input_bytes_chunk.copy_from_slice( + &block_chunk[1..] + .iter() + .map(|&byte| F::from_canonical_u8(byte)) + .collect::>(), + ); + } + // Set the capacity to the digest + state[POSEIDON_SPONGE_RATE..POSEIDON_SPONGE_WIDTH].copy_from_slice( + &row.digest + .chunks(2) + .map(|x| x[0] + F::from_canonical_u64(1 << 32) * x[1]) + .collect_vec(), + ); + + rows.push(row.into()); + } + if let Some(first_row) = rows.first_mut() { + first_row[POSEIDON_COL_MAP.is_first_row_general_op] = F::ONE; + } + + rows + } + + fn generate_commons( + row: &mut PoseidonColumnsView, + input: [F; POSEIDON_SPONGE_WIDTH], + op: &PoseidonGeneralOp, + already_absorbed_elements: usize, + ) { + row.context = F::from_canonical_usize(op.base_address.context); + row.segment = F::from_canonical_usize(op.base_address.segment); + row.virt = F::from_canonical_usize(op.base_address.virt); + row.timestamp = F::from_canonical_usize(op.timestamp); + row.len = F::from_canonical_usize(op.len); + row.already_absorbed_elements = F::from_canonical_usize(already_absorbed_elements); + + Self::generate_perm(row, input); + } + // One row per permutation. + fn generate_trace_row_for_perm( + &self, + input: [F; POSEIDON_SPONGE_WIDTH], + op: &PoseidonGeneralOp, + already_absorbed_elements: usize, + ) -> PoseidonColumnsView { + let mut row = PoseidonColumnsView::default(); + row.is_full_input_block = F::ONE; + + Self::generate_commons(&mut row, input, op, already_absorbed_elements); + row + } + + fn generate_trace_final_row_for_perm( + &self, + input: [F; POSEIDON_SPONGE_WIDTH], + op: &PoseidonGeneralOp, + already_absorbed_elements: usize, + ) -> PoseidonColumnsView { + let mut row = PoseidonColumnsView::default(); + // TODO: I think we're assumming op.len is a multiple FELT_MAX_BYTES * + // POSEIDON_SPONGE_RATE + row.is_final_input_len[op.len % (FELT_MAX_BYTES * POSEIDON_SPONGE_RATE)] = F::ONE; + + Self::generate_commons(&mut row, input, op, already_absorbed_elements); + + row + } + fn generate_perm(row: &mut PoseidonColumnsView, input: [F; POSEIDON_SPONGE_WIDTH]) { // Populate the round input for the first round. row.input.copy_from_slice(&input); @@ -204,12 +454,99 @@ impl, const D: usize> Stark for PoseidonStark { let lv: &[P; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap(); let lv: &PoseidonColumnsView

= lv.borrow(); + let nv: &[P; NUM_COLUMNS] = vars.get_next_values().try_into().unwrap(); + let nv: &PoseidonColumnsView

= nv.borrow(); + + // Each flag must be boolean. + let is_full_input_block = lv.is_full_input_block; + yield_constr.constraint(is_full_input_block * (is_full_input_block - P::ONES)); + + let is_final_block: P = lv.is_final_input_len.iter().copied().sum(); + yield_constr.constraint(is_final_block * (is_final_block - P::ONES)); + + for &is_final_len in lv.is_final_input_len.iter() { + yield_constr.constraint(is_final_len * (is_final_len - P::ONES)); + } + + let is_first_row_general_op = lv.is_first_row_general_op; + yield_constr.constraint(is_first_row_general_op * (is_first_row_general_op - P::ONES)); - // Padding flag must be boolean. - let not_padding = lv.not_padding; - yield_constr.constraint(not_padding * (not_padding - P::ONES)); + // Ensure that full-input block and final block flags are not set to 1 at the + // same time. + yield_constr.constraint(is_final_block * is_full_input_block); - // Compute the input layer. + // If this is the first row, the original sponge state should have the input in + // the first `POSEIDON_SPONGE_RATE` elements followed by 0 for the + // capacity elements. The input values are checked with a CTL. + // Also, already_absorbed_elements = 0. + let already_absorbed_elements = lv.already_absorbed_elements; + yield_constr.constraint_first_row(already_absorbed_elements); + + for i in POSEIDON_SPONGE_RATE..POSEIDON_SPONGE_WIDTH { + // If the operation has len > 0 the capacity must be 0 + yield_constr.constraint_first_row(lv.len * lv.input[i]); + } + + // If this is a final row and there is an upcoming operation, then + // we make the previous checks for next row's `already_absorbed_elements` + // and the original sponge state. + yield_constr.constraint_transition(is_final_block * nv.already_absorbed_elements); + + for i in POSEIDON_SPONGE_RATE..POSEIDON_SPONGE_WIDTH { + // If the next block is a general operation (len > 0) and this is a final block, + // the capacity must be 0 for the next row. + yield_constr.constraint_transition(nv.len * is_final_block * nv.input[i]); + } + + // If this is a full-input block, the next row's address, + // time and len must match as well as its timestamp. + yield_constr.constraint_transition(is_full_input_block * (lv.context - nv.context)); + yield_constr.constraint_transition(is_full_input_block * (lv.segment - nv.segment)); + yield_constr.constraint_transition(is_full_input_block * (lv.virt - nv.virt)); + yield_constr.constraint_transition(is_full_input_block * (lv.timestamp - nv.timestamp)); + + // If this is a full-input block, the next row's already_absorbed_elements + // should be ours plus `POSEIDON_SPONGE_RATE`, and the next input's + // capacity is the current digest. + yield_constr.constraint_transition( + is_full_input_block + * (already_absorbed_elements + + P::from(FE::from_canonical_usize( + FELT_MAX_BYTES * POSEIDON_SPONGE_RATE, + )) + - nv.already_absorbed_elements), + ); + + for i in 0..POSEIDON_SPONGE_WIDTH - POSEIDON_SPONGE_RATE { + yield_constr.constraint_transition( + is_full_input_block + * (lv.digest[2 * i] + + lv.digest[2 * i + 1] * P::Scalar::from_canonical_u64(1 << 32) + - nv.input[POSEIDON_SPONGE_RATE + i]), + ); + } + + // A dummy row is always followed by another dummy row, so the prover + // can't put dummy rows "in between" to avoid the above checks. + let is_dummy = P::ONES - is_full_input_block - is_final_block; + let next_is_final_block: P = nv.is_final_input_len.iter().copied().sum(); + yield_constr + .constraint_transition(is_dummy * (nv.is_full_input_block + next_is_final_block)); + + // If len > 0 and this is a final block, is_final_input_len implies `len + // - already_absorbed == i`. + let offset = lv.len - already_absorbed_elements; + for (i, &is_final_len) in lv.is_final_input_len.iter().enumerate() { + let entry_match = offset + - P::from(FE::from_canonical_usize( + FELT_MAX_BYTES * POSEIDON_SPONGE_RATE - i, + )); + yield_constr.constraint(lv.len * is_final_len * entry_match); + } + + // Compute the input layer. We assume that, when necessary, + // input values were previously swapped before being passed + // to Poseidon. let mut state = lv.input; let mut round_ctr = 0; @@ -319,13 +656,129 @@ impl, const D: usize> Stark for PoseidonStark ) { let lv: &[ExtensionTarget; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap(); let lv: &PoseidonColumnsView> = lv.borrow(); + let nv: &[ExtensionTarget; NUM_COLUMNS] = vars.get_next_values().try_into().unwrap(); + let nv: &PoseidonColumnsView> = nv.borrow(); + + //Each flag (full-input block, final block or implied dummy flag) must be + //boolean. + let is_full_input_block = lv.is_full_input_block; + let constr = builder.mul_sub_extension( + is_full_input_block, + is_full_input_block, + is_full_input_block, + ); + yield_constr.constraint(builder, constr); + + let is_final_block = builder.add_many_extension(lv.is_final_input_len); + let constr = builder.mul_sub_extension(is_final_block, is_final_block, is_final_block); + yield_constr.constraint(builder, constr); + + for &is_final_len in lv.is_final_input_len.iter() { + let constr = builder.mul_sub_extension(is_final_len, is_final_len, is_final_len); + yield_constr.constraint(builder, constr); + } + + let one = builder.one_extension(); + let is_first_row_general_op = lv.is_first_row_general_op; + let constr = builder.mul_sub_extension( + is_first_row_general_op, + is_first_row_general_op, + is_first_row_general_op, + ); + yield_constr.constraint(builder, constr); - // Padding flag must be boolean. - let not_padding = lv.not_padding; - let constr = builder.mul_sub_extension(not_padding, not_padding, not_padding); + // Ensure that full-input block and final block flags are not set to 1 at the + // same time. + let constr = builder.mul_extension(is_final_block, is_full_input_block); yield_constr.constraint(builder, constr); - // Compute the input layer. + // If this is the first row, the original sponge state should have the input in + // the first `POSEIDON_SPONGE_RATE` elements followed by 0 for the + // capacity elements. Also, already_absorbed_elements = 0. + let already_absorbed_elements = lv.already_absorbed_elements; + yield_constr.constraint_first_row(builder, already_absorbed_elements); + + for i in POSEIDON_SPONGE_RATE..POSEIDON_SPONGE_WIDTH { + let constr = builder.mul_extension(lv.input[i], lv.len); + yield_constr.constraint_first_row(builder, constr); + } + + // If this is a final row and there is an upcoming operation, then + // we make the previous checks for next row's `already_absorbed_elements` + // and the original sponge state. + let constr = builder.mul_extension(is_final_block, nv.already_absorbed_elements); + yield_constr.constraint_transition(builder, constr); + + for i in POSEIDON_SPONGE_RATE..POSEIDON_SPONGE_WIDTH { + let mut constr = builder.mul_extension(is_final_block, nv.input[i]); + constr = builder.mul_extension(constr, nv.len); + yield_constr.constraint_transition(builder, constr); + } + + // If this is a full-input block, the next row's address, + // time and len must match as well as its timestamp. + let mut constr = builder.sub_extension(lv.context, nv.context); + constr = builder.mul_extension(is_full_input_block, constr); + yield_constr.constraint_transition(builder, constr); + let mut constr = builder.sub_extension(lv.segment, nv.segment); + constr = builder.mul_extension(is_full_input_block, constr); + yield_constr.constraint_transition(builder, constr); + let mut constr = builder.sub_extension(lv.virt, nv.virt); + constr = builder.mul_extension(is_full_input_block, constr); + yield_constr.constraint_transition(builder, constr); + let mut constr = builder.sub_extension(lv.timestamp, nv.timestamp); + constr = builder.mul_extension(is_full_input_block, constr); + yield_constr.constraint_transition(builder, constr); + + // If this is a full-input block, the next row's already_absorbed_elements + // should be ours plus `POSEIDON_SPONGE_RATE`, and the next input's + // capacity is the current output's capacity. + let diff = builder.sub_extension(already_absorbed_elements, nv.already_absorbed_elements); + let constr = builder.arithmetic_extension( + F::ONE, + F::from_canonical_usize(FELT_MAX_BYTES * POSEIDON_SPONGE_RATE), + diff, + is_full_input_block, + is_full_input_block, + ); + yield_constr.constraint_transition(builder, constr); + + for i in 0..POSEIDON_SPONGE_WIDTH - POSEIDON_SPONGE_RATE { + let mut constr = builder.mul_const_add_extension( + F::from_canonical_u64(1 << 32), + lv.digest[2 * i + 1], + lv.digest[2 * i], + ); + constr = builder.sub_extension(constr, nv.input[POSEIDON_SPONGE_RATE + i]); + constr = builder.mul_extension(is_full_input_block, constr); + yield_constr.constraint_transition(builder, constr); + } + + // A dummy row is always followed by another dummy row, so the prover can't put + // dummy rows "in between" to avoid the above checks. + let mut is_dummy = builder.add_extension(is_full_input_block, is_final_block); + is_dummy = builder.sub_extension(one, is_dummy); + let next_is_final_block = builder.add_many_extension(nv.is_final_input_len.iter()); + let mut constr = builder.add_extension(nv.is_full_input_block, next_is_final_block); + constr = builder.mul_extension(is_dummy, constr); + yield_constr.constraint_transition(builder, constr); + + // If len > 0 and this is a final block, is_final_input_len implies `len - + // already_absorbed == i` + let offset = builder.sub_extension(lv.len, already_absorbed_elements); + for (i, &is_final_len) in lv.is_final_input_len.iter().enumerate() { + let mut index = builder.constant_extension( + F::from_canonical_usize(FELT_MAX_BYTES * POSEIDON_SPONGE_RATE - i).into(), + ); + let entry_match = builder.sub_extension(offset, index); + let mut constr = builder.mul_extension(is_final_len, entry_match); + constr = builder.mul_extension(constr, lv.len); + yield_constr.constraint(builder, constr); + } + + // Compute the input layer. We assume that, when necessary, + // input values were previously swapped before being passed + // to Poseidon. let mut state = lv.input; let mut round_ctr = 0; @@ -479,6 +932,8 @@ mod tests { use anyhow::Result; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; + use itertools::Itertools; + use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::{Field, PrimeField64, Sample}; use plonky2::fri::oracle::PolynomialBatch; @@ -487,18 +942,22 @@ mod tests { use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use plonky2::timed; use plonky2::util::timing::TimingTree; + use smt_trie::code::poseidon_hash_padded_byte_vec; + use smt_trie::smt::F; + use starky::config::StarkConfig; use starky::cross_table_lookup::{CtlData, CtlZData}; - use starky::lookup::{GrandProductChallenge, GrandProductChallengeSet}; + use starky::lookup::{Column, GrandProductChallenge, GrandProductChallengeSet}; use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use crate::memory::segments::Segment; use crate::poseidon::columns::{ PoseidonColumnsView, POSEIDON_DIGEST, POSEIDON_SPONGE_RATE, POSEIDON_SPONGE_WIDTH, }; - use crate::poseidon::poseidon_stark::{PoseidonOp, PoseidonStark}; + use crate::poseidon::poseidon_stark::{ + PoseidonGeneralOp, PoseidonOp, PoseidonStark, FELT_MAX_BYTES, + }; use crate::prover::prove_single_table; use crate::witness::memory::MemoryAddress; - use crate::StarkConfig; #[test] fn test_stark_degree() -> Result<()> { @@ -528,29 +987,47 @@ mod tests { #[test] fn poseidon_correctness_test() -> Result<()> { + let input: Vec = (0..POSEIDON_SPONGE_RATE * FELT_MAX_BYTES) + .map(|_| rand::random()) + .collect(); + let int_inputs = PoseidonOp::PoseidonGeneralOp(PoseidonGeneralOp { + base_address: MemoryAddress::new( + 0, + crate::memory::segments::Segment::AccessedAddresses, + 0, + ), + input: input.clone(), + timestamp: 0, + len: POSEIDON_SPONGE_RATE * FELT_MAX_BYTES, + }); const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; + type F = GoldilocksField; type S = PoseidonStark; let stark = S { f: Default::default(), }; - let input = PoseidonOp(F::rand_array()); - let rows = stark.generate_trace_rows(vec![input], 8); + let rows = stark.generate_trace_rows(vec![int_inputs], 8); assert_eq!(rows.len(), 8); - let row: PoseidonColumnsView = rows[0].into(); - let expected = F::poseidon(input.0); - assert_eq!( - std::array::from_fn::<_, 4, _>( - |i| row.digest[2 * i] + row.digest[2 * i + 1] * F::from_canonical_u64(1 << 32) - ), - expected[0..POSEIDON_DIGEST] - ); + let last_row: &PoseidonColumnsView<_> = rows[0].borrow(); + let mut output: Vec<_> = (0..POSEIDON_DIGEST) + .map(|i| { + last_row.digest[2 * i] + F::from_canonical_u64(1 << 32) * last_row.digest[2 * i + 1] + }) + .collect(); + + let hash = poseidon_hash_padded_byte_vec(input); + assert_eq!( - row.output_partial, - expected[POSEIDON_DIGEST..POSEIDON_SPONGE_WIDTH] + output + .iter() + .map(|x| x.to_noncanonical_u64()) + .collect::>(), + hash.elements + .iter() + .map(|x| x.to_noncanonical_u64()) + .collect::>() ); Ok(()) diff --git a/evm_arithmetization/src/verifier.rs b/evm_arithmetization/src/verifier.rs index 8819ea84f..481c2bfff 100644 --- a/evm_arithmetization/src/verifier.rs +++ b/evm_arithmetization/src/verifier.rs @@ -113,6 +113,14 @@ where &[], config, )?; + verify_stark_proof_with_challenges( + poseidon_stark, + &stark_proofs[Table::Poseidon as usize].proof, + &stark_challenges[Table::Poseidon as usize], + Some(&ctl_vars_per_table[Table::Poseidon as usize]), + &[], + config, + )?; let public_values = all_proof.public_values; diff --git a/evm_arithmetization/src/witness/gas.rs b/evm_arithmetization/src/witness/gas.rs index 199b34760..60fbf0846 100644 --- a/evm_arithmetization/src/witness/gas.rs +++ b/evm_arithmetization/src/witness/gas.rs @@ -36,6 +36,7 @@ pub(crate) const fn gas_to_charge(op: Operation) -> u64 { TernaryArithmetic(SubMod) => KERNEL_ONLY_INSTR, KeccakGeneral => KERNEL_ONLY_INSTR, Poseidon => KERNEL_ONLY_INSTR, + PoseidonGeneral => KERNEL_ONLY_INSTR, ProverInput => KERNEL_ONLY_INSTR, Pop => G_BASE, Jump => G_MID, diff --git a/evm_arithmetization/src/witness/operation.rs b/evm_arithmetization/src/witness/operation.rs index 9cdfff1ae..ab555e57d 100644 --- a/evm_arithmetization/src/witness/operation.rs +++ b/evm_arithmetization/src/witness/operation.rs @@ -3,6 +3,8 @@ use itertools::Itertools; use keccak_hash::keccak; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; +use smt_trie::code::{poseidon_hash_padded_byte_vec, poseidon_pad_byte_vec}; +use smt_trie::utils::hashout2u; use super::transition::Transition; use super::util::{ @@ -18,7 +20,9 @@ use crate::cpu::simple_logic::eq_iszero::generate_pinv_diff; use crate::cpu::stack::MAX_USER_STACK_SIZE; use crate::extension_tower::BN_BASE; use crate::memory::segments::Segment; -use crate::poseidon::poseidon_stark::PoseidonOp; +use crate::memory::NUM_CHANNELS; +use crate::poseidon::columns::POSEIDON_SPONGE_RATE; +use crate::poseidon::poseidon_stark::{PoseidonGeneralOp, PoseidonOp, PoseidonSimpleOp}; use crate::util::u256_to_usize; use crate::witness::errors::MemoryError::VirtTooLarge; use crate::witness::errors::ProgramError; @@ -41,6 +45,7 @@ pub(crate) enum Operation { TernaryArithmetic(arithmetic::TernaryOperator), KeccakGeneral, Poseidon, + PoseidonGeneral, ProverInput, Pop, Jump, @@ -188,7 +193,7 @@ pub(crate) fn generate_poseidon>( log::debug!("Poseidon hashing {:?} -> {}", arr, hash); push_no_write(generation_state, hash); - state.push_poseidon(PoseidonOp(arr)); + state.push_poseidon(PoseidonOp::PoseidonSimpleOp(PoseidonSimpleOp(arr))); state.push_memory(log_in1); state.push_memory(log_in2); @@ -196,6 +201,54 @@ pub(crate) fn generate_poseidon>( Ok(()) } +pub(crate) fn generate_poseidon_general>( + state: &mut T, + mut row: CpuColumnsView, +) -> Result<(), ProgramError> { + let clock = state.get_clock(); + let generation_state = state.get_mut_generation_state(); + let [(addr, _), (len, log_in1)] = + stack_pop_with_log_and_fill::<2, _>(generation_state, &mut row)?; + let len = u256_to_usize(len)?; + + let base_address = MemoryAddress::new_bundle(addr)?; + let mut input = (0..len) + .map(|i| { + let address = MemoryAddress { + virt: base_address.virt.saturating_add(i), + ..base_address + }; + let val = generation_state.memory.get_with_init(address); + generation_state.traces.memory_ops.push(MemoryOp::new( + MemoryChannel::Code, + clock, + address, + MemoryOpKind::Read, + val.0[0].into(), + )); + + val.0[0] as u8 + }) + .collect_vec(); + + let poseidon_op = PoseidonOp::PoseidonGeneralOp(PoseidonGeneralOp { + base_address, + timestamp: clock * NUM_CHANNELS, + input: input.clone(), + len: input.len(), + }); + + let hash = hashout2u(poseidon_hash_padded_byte_vec(input.clone())); + + push_no_write(generation_state, hash); + + state.push_poseidon(poseidon_op); + + state.push_memory(log_in1); + state.push_cpu(row); + Ok(()) +} + pub(crate) fn generate_prover_input>( state: &mut T, mut row: CpuColumnsView, diff --git a/evm_arithmetization/src/witness/traces.rs b/evm_arithmetization/src/witness/traces.rs index d3eecc2d9..81e070cf0 100644 --- a/evm_arithmetization/src/witness/traces.rs +++ b/evm_arithmetization/src/witness/traces.rs @@ -10,8 +10,10 @@ use starky::util::trace_rows_to_poly_values; use crate::all_stark::{AllStark, NUM_TABLES}; use crate::arithmetic::{BinaryOperator, Operation}; use crate::byte_packing::byte_packing_stark::BytePackingOp; -use crate::cpu::columns::CpuColumnsView; +use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS}; +use crate::keccak_sponge::columns::KECCAK_WIDTH_BYTES; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeOp; +use crate::poseidon::columns::PoseidonColumnsView; use crate::poseidon::poseidon_stark::PoseidonOp; use crate::witness::memory::MemoryOp; use crate::{arithmetic, keccak, keccak_sponge, logic}; @@ -157,7 +159,8 @@ impl Traces { .byte_packing_stark .generate_trace(byte_packing_ops, cap_elements, timing) ); - let cpu_rows = cpu.into_iter().map(|x| x.into()).collect(); + let cpu_rows: Vec<[T; NUM_CPU_COLUMNS]> = cpu.into_iter().map(|x| x.into()).collect(); + let cpu_trace = trace_rows_to_poly_values(cpu_rows); let keccak_trace = timed!( timing, diff --git a/evm_arithmetization/src/witness/transition.rs b/evm_arithmetization/src/witness/transition.rs index ee8eaea62..25f6bddab 100644 --- a/evm_arithmetization/src/witness/transition.rs +++ b/evm_arithmetization/src/witness/transition.rs @@ -90,6 +90,7 @@ pub(crate) fn decode(registers: RegistersState, opcode: u8) -> Result Ok(Operation::Syscall(opcode, 2, false)), // KECCAK256 (0x21, true) => Ok(Operation::KeccakGeneral), (0x22, true) => Ok(Operation::Poseidon), + (0x23, true) => Ok(Operation::PoseidonGeneral), (0x30, _) => Ok(Operation::Syscall(opcode, 0, true)), // ADDRESS (0x31, _) => Ok(Operation::Syscall(opcode, 1, false)), // BALANCE (0x32, _) => Ok(Operation::Syscall(opcode, 0, true)), // ORIGIN @@ -182,7 +183,7 @@ pub(crate) fn fill_op_flag(op: Operation, row: &mut CpuColumnsView) Operation::BinaryArithmetic(_) => &mut flags.binary_op, Operation::TernaryArithmetic(_) => &mut flags.ternary_op, Operation::KeccakGeneral | Operation::Jumpdest => &mut flags.jumpdest_keccak_general, - Operation::Poseidon => &mut flags.poseidon, + Operation::Poseidon | Operation::PoseidonGeneral => &mut flags.poseidon, Operation::ProverInput | Operation::Push(1..) => &mut flags.push_prover_input, Operation::Jump | Operation::Jumpi => &mut flags.jumps, Operation::Pc | Operation::Push(0) => &mut flags.pc_push0, @@ -215,7 +216,7 @@ pub(crate) const fn get_op_special_length(op: Operation) -> Option { Operation::BinaryArithmetic(_) => STACK_BEHAVIORS.binary_op, Operation::TernaryArithmetic(_) => STACK_BEHAVIORS.ternary_op, Operation::KeccakGeneral | Operation::Jumpdest => STACK_BEHAVIORS.jumpdest_keccak_general, - Operation::Poseidon => STACK_BEHAVIORS.poseidon, + Operation::Poseidon | Operation::PoseidonGeneral => STACK_BEHAVIORS.poseidon, Operation::Jump => JUMP_OP, Operation::Jumpi => JUMPI_OP, Operation::GetContext | Operation::SetContext => None, @@ -255,7 +256,7 @@ pub(crate) const fn might_overflow_op(op: Operation) -> bool { Operation::BinaryArithmetic(_) => MIGHT_OVERFLOW.binary_op, Operation::TernaryArithmetic(_) => MIGHT_OVERFLOW.ternary_op, Operation::KeccakGeneral | Operation::Jumpdest => MIGHT_OVERFLOW.jumpdest_keccak_general, - Operation::Poseidon => MIGHT_OVERFLOW.poseidon, + Operation::Poseidon | Operation::PoseidonGeneral => MIGHT_OVERFLOW.poseidon, Operation::Jump | Operation::Jumpi => MIGHT_OVERFLOW.jumps, Operation::Pc | Operation::Push(0) => MIGHT_OVERFLOW.pc_push0, Operation::GetContext | Operation::SetContext => MIGHT_OVERFLOW.context_op, @@ -514,6 +515,7 @@ pub(crate) trait Transition: State { Operation::KeccakGeneral => generate_keccak_general(self, row)?, Operation::ProverInput => generate_prover_input(self, row)?, Operation::Poseidon => generate_poseidon(self, row)?, + Operation::PoseidonGeneral => generate_poseidon_general(self, row)?, Operation::Pop => generate_pop(self, row)?, Operation::Jump => self.generate_jump(row)?, Operation::Jumpi => self.generate_jumpi(row)?, diff --git a/evm_arithmetization/src/witness/util.rs b/evm_arithmetization/src/witness/util.rs index d2b2c1f4e..08a56ff05 100644 --- a/evm_arithmetization/src/witness/util.rs +++ b/evm_arithmetization/src/witness/util.rs @@ -1,5 +1,7 @@ use ethereum_types::U256; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; +use plonky2::hash::poseidon::Poseidon; use super::memory::DUMMY_MEMOP; use super::transition::Transition; @@ -13,6 +15,7 @@ use crate::keccak_sponge::columns::{KECCAK_RATE_BYTES, KECCAK_WIDTH_BYTES}; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeOp; use crate::logic; use crate::memory::segments::Segment; +use crate::poseidon::columns::{POSEIDON_SPONGE_RATE, POSEIDON_SPONGE_WIDTH}; use crate::witness::errors::ProgramError; use crate::witness::memory::{MemoryAddress, MemoryChannel, MemoryOp, MemoryOpKind}; diff --git a/smt_trie/Cargo.toml b/smt_trie/Cargo.toml index 7c01f23bb..7e3a2f090 100644 --- a/smt_trie/Cargo.toml +++ b/smt_trie/Cargo.toml @@ -26,7 +26,7 @@ num-traits = "0.2.15" uint = "0.9.5" rlp = { workspace = true } parking_lot = { version = "0.12.1", features = ["serde"] } -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2", rev = "c1728d4e43e9ff434f9297e4f6171ddf28ec8fca" } +plonky2 = "0.2.1" rand = "0.8.5" serde = { workspace = true, features = ["derive", "rc"] } diff --git a/smt_trie/src/code.rs b/smt_trie/src/code.rs index a7973e2c9..dd6b142b9 100644 --- a/smt_trie/src/code.rs +++ b/smt_trie/src/code.rs @@ -2,37 +2,47 @@ /// See `hashContractBytecode()` in https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt-utils.js for reference implementation. use ethereum_types::U256; use plonky2::field::types::Field; -use plonky2::hash::poseidon::Poseidon; +use plonky2::hash::poseidon::{self, Poseidon}; use crate::smt::{HashOut, F}; use crate::utils::hashout2u; pub fn hash_contract_bytecode(mut code: Vec) -> HashOut { - code.push(0x01); - while code.len() % 56 != 0 { - code.push(0x00); - } - *code.last_mut().unwrap() |= 0x80; + poseidon_pad_byte_vec(&mut code); + + poseidon_hash_padded_byte_vec(code) +} - let mut capacity = [F::ZERO; 4]; - for i in 0..code.len() / 56 { - let mut block = [0u8; 56]; - block.copy_from_slice(&code[i * 56..(i + 1) * 56]); - let mut arr = [F::ZERO; 12]; - for j in 0..8 { - arr[j] = block[j * 7..(j + 1) * 7] - .iter() - .enumerate() - .fold(F::ZERO, |acc, (k, x)| { - acc + (F::from_canonical_u64((*x as u64) << (k * 8))) - }); - } - arr[8..12].copy_from_slice(&capacity); - capacity = F::poseidon(arr)[0..4].try_into().unwrap(); +pub fn poseidon_hash_padded_byte_vec(bytes: Vec) -> HashOut { + let mut capacity = [F::ZERO; poseidon::SPONGE_CAPACITY]; + let mut arr = [F::ZERO; poseidon::SPONGE_WIDTH]; + for blocks in bytes.chunks_exact(poseidon::SPONGE_RATE * 7) { + arr[..poseidon::SPONGE_RATE].copy_from_slice( + &blocks + .chunks_exact(7) + .map(|block| { + let mut bytes = [0u8; poseidon::SPONGE_RATE]; + bytes[..7].copy_from_slice(block); + F::from_canonical_u64(u64::from_le_bytes(bytes)) + }) + .collect::>(), + ); + arr[poseidon::SPONGE_RATE..poseidon::SPONGE_WIDTH].copy_from_slice(&capacity); + capacity = F::poseidon(arr)[0..poseidon::SPONGE_CAPACITY] + .try_into() + .unwrap(); } HashOut { elements: capacity } } +pub fn poseidon_pad_byte_vec(bytes: &mut Vec) { + bytes.push(0x01); + while bytes.len() % 56 != 0 { + bytes.push(0x00); + } + *bytes.last_mut().unwrap() |= 0x80; +} + pub fn hash_bytecode_u256(code: Vec) -> U256 { hashout2u(hash_contract_bytecode(code)) } diff --git a/trace_decoder/Cargo.toml b/trace_decoder/Cargo.toml index 9d640c3b2..4cef95995 100644 --- a/trace_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -33,4 +33,3 @@ evm_arithmetization = { git = "https://github.com/0xPolygonZero/zk_evm", branch [dev-dependencies] pretty_env_logger = "0.5.0" - From 5dd75fab77cb926b61e5bbec83528e74aee04c15 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Mon, 8 Apr 2024 19:09:17 +0900 Subject: [PATCH 07/19] `smt_trie` library deps cleanup (#152) * Add smt_trie to CI workflow * Reuse workspace level dependencies --- .github/workflows/ci.yml | 25 +++++++++++++++++++++++++ Cargo.toml | 3 +++ mpt_trie/Cargo.toml | 6 +++--- smt_trie/Cargo.toml | 18 +++++++++--------- 4 files changed, 40 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 07f039d25..8771cbb9d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,6 +43,31 @@ jobs: CARGO_INCREMENTAL: 1 RUST_BACKTRACE: 1 + test_smt_trie: + name: Test smt_trie + runs-on: ubuntu-latest + timeout-minutes: 30 + if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Install stable toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Set up rust cache + uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + + - name: Test in smt_trie subdirectory + run: cargo test --manifest-path smt_trie/Cargo.toml + env: + RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 + RUST_LOG: 1 + CARGO_INCREMENTAL: 1 + RUST_BACKTRACE: 1 + test_trace_decoder: name: Test trace_decoder runs-on: ubuntu-latest diff --git a/Cargo.toml b/Cargo.toml index a5f1cb63a..405f45b84 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,12 +18,15 @@ hex-literal = "0.4.1" keccak-hash = "0.10.0" log = "0.4.20" num = "0.4.1" +num-traits = "0.2.15" +parking_lot = "0.12.1" rand = "0.8.5" rlp = "0.5.2" rlp-derive = "0.1.0" serde = "1.0.166" serde_json = "1.0.96" thiserror = "1.0.49" +uint = "0.9.5" # plonky2-related dependencies plonky2 = "0.2.1" diff --git a/mpt_trie/Cargo.toml b/mpt_trie/Cargo.toml index b4ef7ded2..5005d003c 100644 --- a/mpt_trie/Cargo.toml +++ b/mpt_trie/Cargo.toml @@ -19,12 +19,12 @@ enum-as-inner = { workspace = true } ethereum-types = { workspace = true } hex = { workspace = true } keccak-hash = { workspace = true } -parking_lot = { version = "0.12.1", features = ["serde"] } +parking_lot = { workspace = true, features = ["serde"] } thiserror = { workspace = true } log = { workspace = true } num = { workspace = true, optional = true } -num-traits = "0.2.15" -uint = "0.9.5" +num-traits = { workspace = true } +uint = { workspace = true } rlp = { workspace = true } serde = { workspace = true, features = ["derive", "rc"] } impl-rlp = "0.3.0" diff --git a/smt_trie/Cargo.toml b/smt_trie/Cargo.toml index 7e3a2f090..fb1335350 100644 --- a/smt_trie/Cargo.toml +++ b/smt_trie/Cargo.toml @@ -20,20 +20,20 @@ ethereum-types = { workspace = true } hex = { workspace = true } hex-literal = { workspace = true } keccak-hash = { workspace = true } -thiserror = "1.0.40" log = { workspace = true } -num-traits = "0.2.15" -uint = "0.9.5" +num-traits = { workspace = true } +parking_lot = { workspace = true, features = ["serde"] } +plonky2 = { workspace = true } +rand = { workspace = true } rlp = { workspace = true } -parking_lot = { version = "0.12.1", features = ["serde"] } -plonky2 = "0.2.1" -rand = "0.8.5" serde = { workspace = true, features = ["derive", "rc"] } +thiserror = { workspace = true } +uint = { workspace = true } [dev-dependencies] -eth_trie = "0.1.0" -pretty_env_logger = "0.4.0" +eth_trie = "0.4.0" +pretty_env_logger = "0.5.0" rlp-derive = { workspace = true } serde = { workspace = true, features = ["derive"] } -serde_json = "1.0.96" +serde_json = { workspace = true } From 3b733c1274389a80c6635a874e0a087e5d8a498d Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Tue, 9 Apr 2024 08:23:29 +0900 Subject: [PATCH 08/19] Fix broken type2 tests (#151) * Fix add11 tests * Fix log_opcode tests * Fix balance tests * Uncomment receipt tests * Use main as initial offset for clarity --- .../src/cpu/kernel/tests/account_code.rs | 4 +- .../src/cpu/kernel/tests/add11.rs | 669 ++++----- .../src/cpu/kernel/tests/balance.rs | 284 ++-- .../src/cpu/kernel/tests/receipt.rs | 1268 ++++++++--------- evm_arithmetization/tests/log_opcode.rs | 1220 ++++++++-------- 5 files changed, 1697 insertions(+), 1748 deletions(-) diff --git a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs index a06138b94..2562ea90b 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs @@ -96,7 +96,7 @@ fn prepare_interpreter( let k = nibbles_64(U256::from_big_endian( keccak(address.to_fixed_bytes()).as_bytes(), )); - // Next, execute mpt_insert_state_trie. + // Next, execute smt_insert_state. let trie_data = interpreter.get_trie_data_mut(); if trie_data.is_empty() { // In the assembly we skip over 0, knowing trie_data[0] = 0 by default. @@ -136,7 +136,7 @@ fn prepare_interpreter( ); } - // Now, execute mpt_hash_state_trie. + // Now, execute smt_hash_state. interpreter.generation_state.registers.program_counter = smt_hash_state; interpreter .push(0xDEADBEEFu32.into()) diff --git a/evm_arithmetization/src/cpu/kernel/tests/add11.rs b/evm_arithmetization/src/cpu/kernel/tests/add11.rs index 8ae88a645..44f5ae0bd 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/add11.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/add11.rs @@ -1,326 +1,343 @@ -// use std::collections::HashMap; -// use std::str::FromStr; - -// use mpt_trie::nibbles::Nibbles; -// use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie}; -// use ethereum_types::{Address, BigEndianHash, H256}; -// use hex_literal::hex; -// use keccak_hash::keccak; -// use plonky2::field::goldilocks_field::GoldilocksField as F; - -// use crate::cpu::kernel::aggregator::KERNEL; -// use crate::cpu::kernel::constants::context_metadata::ContextMetadata; -// use crate::cpu::kernel::interpreter::Interpreter; -// use crate::generation::mpt::{AccountRlp, LegacyReceiptRlp}; -// use crate::generation::TrieInputs; -// use crate::proof::{BlockHashes, BlockMetadata, TrieRoots}; -// use crate::GenerationInputs; - -// #[test] -// fn test_add11_yml() { -// let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); -// let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); -// let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - -// let beneficiary_state_key = keccak(beneficiary); -// let sender_state_key = keccak(sender); -// let to_hashed = keccak(to); - -// let beneficiary_nibbles = -// Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); -// let sender_nibbles = -// Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); -// let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - -// let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x60, 0x00, 0x55, 0x00]; -// let code_hash = keccak(code); - -// let mut contract_code = HashMap::new(); -// contract_code.insert(keccak(vec![]), vec![]); -// contract_code.insert(code_hash, code.to_vec()); - -// let beneficiary_account_before = AccountRlp { -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let sender_account_before = AccountRlp { -// balance: 0x0de0b6b3a7640000u64.into(), -// ..AccountRlp::default() -// }; -// let to_account_before = AccountRlp { -// balance: 0x0de0b6b3a7640000u64.into(), -// code_hash, -// ..AccountRlp::default() -// }; - -// let mut state_trie_before = HashedPartialTrie::from(Node::Empty); -// state_trie_before.insert( -// beneficiary_nibbles, -// rlp::encode(&beneficiary_account_before).to_vec(), -// ); -// state_trie_before.insert(sender_nibbles, -// rlp::encode(&sender_account_before).to_vec()); state_trie_before. -// insert(to_nibbles, rlp::encode(&to_account_before).to_vec()); - -// let tries_before = TrieInputs { -// state_trie: state_trie_before, -// transactions_trie: Node::Empty.into(), -// receipts_trie: Node::Empty.into(), -// storage_tries: vec![(to_hashed, Node::Empty.into())], -// }; - -// let txn = -// hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16" -// ); - -// let gas_used = 0xa868u64.into(); - -// let expected_state_trie_after = { -// let beneficiary_account_after = AccountRlp { -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let sender_account_after = AccountRlp { -// balance: 0xde0b6b3a75be550u64.into(), -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let to_account_after = AccountRlp { -// balance: 0xde0b6b3a76586a0u64.into(), -// code_hash, -// // Storage map: { 0 => 2 } -// storage_root: HashedPartialTrie::from(Node::Leaf { -// nibbles: Nibbles::from_h256_be(keccak([0u8; 32])), -// value: vec![2], -// }) -// .hash(), -// ..AccountRlp::default() -// }; - -// let mut expected_state_trie_after = -// HashedPartialTrie::from(Node::Empty); expected_state_trie_after. -// insert( beneficiary_nibbles, -// rlp::encode(&beneficiary_account_after).to_vec(), -// ); -// expected_state_trie_after -// .insert(sender_nibbles, -// rlp::encode(&sender_account_after).to_vec()); -// expected_state_trie_after.insert(to_nibbles, -// rlp::encode(&to_account_after).to_vec()); expected_state_trie_after -// }; -// let receipt_0 = LegacyReceiptRlp { -// status: true, -// cum_gas_used: gas_used, -// bloom: vec![0; 256].into(), -// logs: vec![], -// }; -// let mut receipts_trie = HashedPartialTrie::from(Node::Empty); -// receipts_trie.insert( -// Nibbles::from_str("0x80").unwrap(), -// rlp::encode(&receipt_0).to_vec(), -// ); -// let transactions_trie: HashedPartialTrie = Node::Leaf { -// nibbles: Nibbles::from_str("0x80").unwrap(), -// value: txn.to_vec(), -// } -// .into(); - -// let trie_roots_after = TrieRoots { -// state_root: expected_state_trie_after.hash(), -// transactions_root: transactions_trie.hash(), -// receipts_root: receipts_trie.hash(), -// }; - -// let block_metadata = BlockMetadata { -// block_beneficiary: Address::from(beneficiary), -// block_timestamp: 0x03e8.into(), -// block_number: 1.into(), -// block_difficulty: 0x020000.into(), -// block_random: H256::from_uint(&0x020000.into()), -// block_gaslimit: 0xff112233u32.into(), -// block_chain_id: 1.into(), -// block_base_fee: 0xa.into(), -// block_gas_used: gas_used, -// block_bloom: [0.into(); 8], -// }; - -// let tries_inputs = GenerationInputs { -// signed_txn: Some(txn.to_vec()), -// withdrawals: vec![], -// tries: tries_before, -// trie_roots_after, -// contract_code: contract_code.clone(), -// block_metadata, -// checkpoint_state_trie_root: -// HashedPartialTrie::from(Node::Empty).hash(), txn_number_before: -// 0.into(), gas_used_before: 0.into(), -// gas_used_after: gas_used, -// block_hashes: BlockHashes { -// prev_hashes: vec![H256::default(); 256], -// cur_hash: H256::default(), -// }, -// }; - -// let initial_stack = vec![]; -// let mut interpreter: Interpreter = -// Interpreter::new_with_generation_inputs_and_kernel(0, initial_stack, -// tries_inputs); - -// let route_txn_label = KERNEL.global_labels["main"]; -// // Switch context and initialize memory with the data we need for the -// tests. interpreter.generation_state.registers.program_counter = -// route_txn_label; interpreter.set_context_metadata_field(0, -// ContextMetadata::GasLimit, 1_000_000.into()); interpreter. -// set_is_kernel(true); interpreter.run().expect("Proving add11 failed."); -// } - -// #[test] -// fn test_add11_yml_with_exception() { -// // In this test, we make sure that the user code throws a stack underflow -// exception. let beneficiary = -// hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); let sender = -// hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let to = -// hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - -// let beneficiary_state_key = keccak(beneficiary); -// let sender_state_key = keccak(sender); -// let to_hashed = keccak(to); - -// let beneficiary_nibbles = -// Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); -// let sender_nibbles = -// Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); -// let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - -// let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x8e, 0x00]; -// let code_hash = keccak(code); - -// let mut contract_code = HashMap::new(); -// contract_code.insert(keccak(vec![]), vec![]); -// contract_code.insert(code_hash, code.to_vec()); - -// let beneficiary_account_before = AccountRlp { -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let sender_account_before = AccountRlp { -// balance: 0x0de0b6b3a7640000u64.into(), -// ..AccountRlp::default() -// }; -// let to_account_before = AccountRlp { -// balance: 0x0de0b6b3a7640000u64.into(), -// code_hash, -// ..AccountRlp::default() -// }; - -// let mut state_trie_before = HashedPartialTrie::from(Node::Empty); -// state_trie_before.insert( -// beneficiary_nibbles, -// rlp::encode(&beneficiary_account_before).to_vec(), -// ); -// state_trie_before.insert(sender_nibbles, -// rlp::encode(&sender_account_before).to_vec()); state_trie_before. -// insert(to_nibbles, rlp::encode(&to_account_before).to_vec()); - -// let tries_before = TrieInputs { -// state_trie: state_trie_before, -// transactions_trie: Node::Empty.into(), -// receipts_trie: Node::Empty.into(), -// storage_tries: vec![(to_hashed, Node::Empty.into())], -// }; - -// let txn = -// hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16" -// ); let txn_gas_limit = 400_000; -// let gas_price = 10; - -// // Here, since the transaction fails, it consumes its gas limit, and does -// nothing else. let expected_state_trie_after = { -// let beneficiary_account_after = beneficiary_account_before; -// // This is the only account that changes: the nonce and the balance -// are updated. let sender_account_after = AccountRlp { -// balance: sender_account_before.balance - txn_gas_limit * -// gas_price, nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let to_account_after = to_account_before; - -// let mut expected_state_trie_after = -// HashedPartialTrie::from(Node::Empty); expected_state_trie_after. -// insert( beneficiary_nibbles, -// rlp::encode(&beneficiary_account_after).to_vec(), -// ); -// expected_state_trie_after -// .insert(sender_nibbles, -// rlp::encode(&sender_account_after).to_vec()); -// expected_state_trie_after.insert(to_nibbles, -// rlp::encode(&to_account_after).to_vec()); expected_state_trie_after -// }; - -// let receipt_0 = LegacyReceiptRlp { -// status: false, -// cum_gas_used: txn_gas_limit.into(), -// bloom: vec![0; 256].into(), -// logs: vec![], -// }; -// let mut receipts_trie = HashedPartialTrie::from(Node::Empty); -// receipts_trie.insert( -// Nibbles::from_str("0x80").unwrap(), -// rlp::encode(&receipt_0).to_vec(), -// ); -// let transactions_trie: HashedPartialTrie = Node::Leaf { -// nibbles: Nibbles::from_str("0x80").unwrap(), -// value: txn.to_vec(), -// } -// .into(); - -// let trie_roots_after = TrieRoots { -// state_root: expected_state_trie_after.hash(), -// transactions_root: transactions_trie.hash(), -// receipts_root: receipts_trie.hash(), -// }; - -// let block_metadata = BlockMetadata { -// block_beneficiary: Address::from(beneficiary), -// block_timestamp: 0x03e8.into(), -// block_number: 1.into(), -// block_difficulty: 0x020000.into(), -// block_random: H256::from_uint(&0x020000.into()), -// block_gaslimit: 0xff112233u32.into(), -// block_chain_id: 1.into(), -// block_base_fee: 0xa.into(), -// block_gas_used: txn_gas_limit.into(), -// block_bloom: [0.into(); 8], -// }; - -// let tries_inputs = GenerationInputs { -// signed_txn: Some(txn.to_vec()), -// withdrawals: vec![], -// tries: tries_before, -// trie_roots_after, -// contract_code: contract_code.clone(), -// block_metadata, -// checkpoint_state_trie_root: -// HashedPartialTrie::from(Node::Empty).hash(), txn_number_before: -// 0.into(), gas_used_before: 0.into(), -// gas_used_after: txn_gas_limit.into(), -// block_hashes: BlockHashes { -// prev_hashes: vec![H256::default(); 256], -// cur_hash: H256::default(), -// }, -// }; - -// let initial_stack = vec![]; -// let mut interpreter: Interpreter = -// Interpreter::new_with_generation_inputs_and_kernel(0, initial_stack, -// tries_inputs); - -// let route_txn_label = KERNEL.global_labels["main"]; -// // Switch context and initialize memory with the data we need for the -// tests. interpreter.generation_state.registers.program_counter = -// route_txn_label; interpreter.set_context_metadata_field(0, -// ContextMetadata::GasLimit, 1_000_000.into()); interpreter. -// set_is_kernel(true); interpreter -// .run() -// .expect("Proving add11 with exception failed."); -// } +use std::collections::HashMap; +use std::str::FromStr; + +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; +use hex_literal::hex; +use keccak_hash::keccak; +use mpt_trie::nibbles::Nibbles; +use mpt_trie::partial_trie::{HashedPartialTrie, Node, PartialTrie}; +use plonky2::field::goldilocks_field::GoldilocksField as F; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::MemoryDb; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; + +use super::account_code::set_account; +use crate::cpu::kernel::aggregator::KERNEL; +use crate::cpu::kernel::constants::context_metadata::ContextMetadata; +use crate::cpu::kernel::interpreter::Interpreter; +use crate::generation::mpt::{AccountRlp, LegacyReceiptRlp}; +use crate::generation::TrieInputs; +use crate::proof::{BlockHashes, BlockMetadata, TrieRoots}; +use crate::GenerationInputs; + +#[test] +fn test_add11_yml() { + let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); + let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); + let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); + + let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x60, 0x00, 0x55, 0x00]; + let code_hash = hash_bytecode_u256(code.to_vec()); + + let mut contract_code = HashMap::new(); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); + contract_code.insert(code_hash, code.to_vec()); + + let beneficiary_account_before = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + let sender_account_before = AccountRlp { + balance: 0x0de0b6b3a7640000u64.into(), + ..AccountRlp::default() + }; + let to_account_before = AccountRlp { + balance: 0x0de0b6b3a7640000u64.into(), + code_hash, + ..AccountRlp::default() + }; + + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); + + let tries_before = TrieInputs { + state_smt: state_smt_before.serialize(), + transactions_trie: Node::Empty.into(), + receipts_trie: Node::Empty.into(), + }; + + let txn = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16"); + + let block_metadata = BlockMetadata { + block_beneficiary: Address::from(beneficiary), + block_timestamp: 0x03e8.into(), + block_number: 1.into(), + block_difficulty: 0x020000.into(), + block_random: H256::from_uint(&0x020000.into()), + block_gaslimit: 0xff112233u32.into(), + block_chain_id: 1.into(), + block_base_fee: 0xa.into(), + block_gas_used: 0xa868u64.into(), + block_bloom: [0.into(); 8], + }; + + let expected_state_smt_after = { + let mut smt = Smt::::default(); + let beneficiary_account_after = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + let sender_account_after = AccountRlp { + balance: 0xde0b6b3a75be550u64.into(), + nonce: 1.into(), + ..AccountRlp::default() + }; + let to_account_after = AccountRlp { + balance: 0xde0b6b3a76586a0u64.into(), + code_hash, + ..AccountRlp::default() + }; + + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(to), + &to_account_after, + &HashMap::from([(U256::zero(), 2.into())]), // Storage map: { 0 => 2 } + ); + + smt + }; + + let receipt_0 = LegacyReceiptRlp { + status: true, + cum_gas_used: 0xa868u64.into(), + bloom: vec![0; 256].into(), + logs: vec![], + }; + let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + receipts_trie + .insert( + Nibbles::from_str("0x80").unwrap(), + rlp::encode(&receipt_0).to_vec(), + ) + .unwrap(); + let transactions_trie: HashedPartialTrie = Node::Leaf { + nibbles: Nibbles::from_str("0x80").unwrap(), + value: txn.to_vec(), + } + .into(); + + let trie_roots_after = TrieRoots { + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), + transactions_root: transactions_trie.hash(), + receipts_root: receipts_trie.hash(), + }; + let inputs = GenerationInputs { + signed_txn: Some(txn.to_vec()), + withdrawals: vec![], + tries: tries_before, + trie_roots_after, + contract_code, + block_metadata, + checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + txn_number_before: 0.into(), + gas_used_before: 0.into(), + gas_used_after: 0xa868u64.into(), + block_hashes: BlockHashes { + prev_hashes: vec![H256::default(); 256], + cur_hash: H256::default(), + }, + }; + + let initial_offset = KERNEL.global_labels["main"]; + let initial_stack = vec![]; + let mut interpreter: Interpreter = + Interpreter::new_with_generation_inputs(initial_offset, initial_stack, inputs); + + let route_txn_label = KERNEL.global_labels["main"]; + // Switch context and initialize memory with the data we need for the tests. + interpreter.generation_state.registers.program_counter = route_txn_label; + interpreter.set_context_metadata_field(0, ContextMetadata::GasLimit, 1_000_000.into()); + interpreter.set_is_kernel(true); + interpreter.run().expect("Proving add11 failed."); +} + +#[test] +fn test_add11_yml_with_exception() { + // In this test, we make sure that the user code throws a stack underflow + // exception. + + let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); + let sender = hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); + let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); + + let code = [0x60, 0x01, 0x60, 0x01, 0x01, 0x8e, 0x00]; + let code_hash = hash_bytecode_u256(code.to_vec()); + + let mut contract_code = HashMap::new(); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); + contract_code.insert(code_hash, code.to_vec()); + + let beneficiary_account_before = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + let sender_account_before = AccountRlp { + balance: 0x0de0b6b3a7640000u64.into(), + ..AccountRlp::default() + }; + let to_account_before = AccountRlp { + balance: 0x0de0b6b3a7640000u64.into(), + code_hash, + ..AccountRlp::default() + }; + + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); + + let tries_before = TrieInputs { + state_smt: state_smt_before.serialize(), + transactions_trie: Node::Empty.into(), + receipts_trie: Node::Empty.into(), + }; + + let txn = +hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16" +); + let txn_gas_limit = 400_000; + let gas_price = 10; + + // Here, since the transaction fails, it consumes its gas limit, and does + // nothing else. + let expected_state_smt_after = { + let mut smt = Smt::::default(); + let beneficiary_account_after = beneficiary_account_before; + let to_account_after = to_account_before; + // This is the only account that changes: the nonce and the balance are updated. + let sender_account_after = AccountRlp { + balance: sender_account_before.balance - txn_gas_limit * gas_price, + nonce: 1.into(), + ..AccountRlp::default() + }; + + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to), &to_account_after, &HashMap::new()); + + smt + }; + + let receipt_0 = LegacyReceiptRlp { + status: false, + cum_gas_used: txn_gas_limit.into(), + bloom: vec![0; 256].into(), + logs: vec![], + }; + let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + receipts_trie.insert( + Nibbles::from_str("0x80").unwrap(), + rlp::encode(&receipt_0).to_vec(), + ); + let transactions_trie: HashedPartialTrie = Node::Leaf { + nibbles: Nibbles::from_str("0x80").unwrap(), + value: txn.to_vec(), + } + .into(); + + let trie_roots_after = TrieRoots { + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), + transactions_root: transactions_trie.hash(), + receipts_root: receipts_trie.hash(), + }; + + let block_metadata = BlockMetadata { + block_beneficiary: Address::from(beneficiary), + block_timestamp: 0x03e8.into(), + block_number: 1.into(), + block_difficulty: 0x020000.into(), + block_random: H256::from_uint(&0x020000.into()), + block_gaslimit: 0xff112233u32.into(), + block_chain_id: 1.into(), + block_base_fee: 0xa.into(), + block_gas_used: txn_gas_limit.into(), + block_bloom: [0.into(); 8], + }; + + let tries_inputs = GenerationInputs { + signed_txn: Some(txn.to_vec()), + withdrawals: vec![], + tries: tries_before, + trie_roots_after, + contract_code: contract_code.clone(), + block_metadata, + checkpoint_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(), + txn_number_before: 0.into(), + gas_used_before: 0.into(), + gas_used_after: txn_gas_limit.into(), + block_hashes: BlockHashes { + prev_hashes: vec![H256::default(); 256], + cur_hash: H256::default(), + }, + }; + + let initial_offset = KERNEL.global_labels["main"]; + let initial_stack = vec![]; + let mut interpreter: Interpreter = + Interpreter::new_with_generation_inputs(initial_offset, initial_stack, tries_inputs); + + let route_txn_label = KERNEL.global_labels["main"]; + // Switch context and initialize memory with the data we need for the tests. + interpreter.generation_state.registers.program_counter = route_txn_label; + interpreter.set_context_metadata_field(0, ContextMetadata::GasLimit, 1_000_000.into()); + interpreter.set_is_kernel(true); + interpreter + .run() + .expect("Proving add11 with exception failed."); +} diff --git a/evm_arithmetization/src/cpu/kernel/tests/balance.rs b/evm_arithmetization/src/cpu/kernel/tests/balance.rs index 6fffabe10..8c6dc8fd8 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/balance.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/balance.rs @@ -1,134 +1,150 @@ -// use anyhow::Result; -// use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; -// use ethereum_types::{Address, BigEndianHash, H256, U256}; -// use keccak_hash::keccak; -// use plonky2::field::goldilocks_field::GoldilocksField as F; -// use plonky2::field::types::Field; -// use rand::{thread_rng, Rng}; - -// use crate::cpu::kernel::aggregator::KERNEL; -// use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -// use crate::cpu::kernel::interpreter::Interpreter; -// use crate::cpu::kernel::tests::account_code::initialize_mpts; -// use crate::cpu::kernel::tests::mpt::nibbles_64; -// use crate::generation::mpt::AccountRlp; -// use crate::Node; - -// // Test account with a given code hash. -// fn test_account(balance: U256) -> AccountRlp { -// AccountRlp { -// nonce: U256::from(1111), -// balance, -// storage_root: HashedPartialTrie::from(Node::Empty).hash(), -// code_hash: H256::from_uint(&U256::from(8888)), -// } -// } - -// // Stolen from `tests/mpt/insert.rs` -// // Prepare the interpreter by inserting the account in the state trie. -// fn prepare_interpreter( -// interpreter: &mut Interpreter, -// address: Address, -// account: &AccountRlp, -// ) -> Result<()> { -// let mpt_insert_state_trie = -// KERNEL.global_labels["mpt_insert_state_trie"]; let mpt_hash_state_trie = -// KERNEL.global_labels["mpt_hash_state_trie"]; let mut state_trie: -// HashedPartialTrie = Default::default(); let trie_inputs = -// Default::default(); - -// initialize_mpts(interpreter, &trie_inputs); -// assert_eq!(interpreter.stack(), vec![]); - -// let k = nibbles_64(U256::from_big_endian( -// keccak(address.to_fixed_bytes()).as_bytes(), -// )); -// // Next, execute mpt_insert_state_trie. -// interpreter.generation_state.registers.program_counter = -// mpt_insert_state_trie; let trie_data = interpreter.get_trie_data_mut(); -// if trie_data.is_empty() { -// // In the assembly we skip over 0, knowing trie_data[0] = 0 by -// default. // Since we don't explicitly set it to 0, we need to do so -// here. trie_data.push(0.into()); -// } -// let value_ptr = trie_data.len(); -// trie_data.push(account.nonce); -// trie_data.push(account.balance); -// // In memory, storage_root gets interpreted as a pointer to a storage -// trie, // so we have to ensure the pointer is valid. It's easiest to set -// it to 0, // which works as an empty node, since trie_data[0] = 0 = -// MPT_TYPE_EMPTY. trie_data.push(H256::zero().into_uint()); -// trie_data.push(account.code_hash.into_uint()); -// let trie_data_len = trie_data.len().into(); -// interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, -// trie_data_len); interpreter -// .push(0xDEADBEEFu32.into()) -// .expect("The stack should not overflow"); -// interpreter -// .push(value_ptr.into()) -// .expect("The stack should not overflow"); // value_ptr -// interpreter -// .push(k.try_into_u256().unwrap()) -// .expect("The stack should not overflow"); // key - -// interpreter.run()?; -// assert_eq!( -// interpreter.stack().len(), -// 0, -// "Expected empty stack after insert, found {:?}", -// interpreter.stack() -// ); - -// // Now, execute mpt_hash_state_trie. -// interpreter.generation_state.registers.program_counter = -// mpt_hash_state_trie; interpreter -// .push(0xDEADBEEFu32.into()) -// .expect("The stack should not overflow"); -// interpreter -// .push(1.into()) // Initial trie data segment size, unused. -// .expect("The stack should not overflow"); -// interpreter.run()?; - -// assert_eq!( -// interpreter.stack().len(), -// 2, -// "Expected 2 items on stack after hashing, found {:?}", -// interpreter.stack() -// ); -// let hash = H256::from_uint(&interpreter.stack()[1]); - -// state_trie.insert(k, rlp::encode(account).to_vec()); -// let expected_state_trie_hash = state_trie.hash(); -// assert_eq!(hash, expected_state_trie_hash); - -// Ok(()) -// } - -// #[test] -// fn test_balance() -> Result<()> { -// let mut rng = thread_rng(); -// let balance = U256(rng.gen()); -// let account = test_account(balance); - -// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, -// vec![]); let address: Address = rng.gen(); -// // Prepare the interpreter by inserting the account in the state trie. -// prepare_interpreter(&mut interpreter, address, &account)?; - -// // Test `balance` -// interpreter.generation_state.registers.program_counter = -// KERNEL.global_labels["balance"]; interpreter.pop().expect("The stack -// should not be empty"); interpreter.pop().expect("The stack should not be -// empty"); assert!(interpreter.stack().is_empty()); -// interpreter -// .push(0xDEADBEEFu32.into()) -// .expect("The stack should not overflow"); -// interpreter -// .push(U256::from_big_endian(address.as_bytes())) -// .expect("The stack should not overflow"); -// interpreter.run()?; - -// assert_eq!(interpreter.stack(), vec![balance]); - -// Ok(()) -// } +use std::collections::HashMap; + +use anyhow::Result; +use ethereum_types::{Address, BigEndianHash, H256, U256}; +use keccak_hash::keccak; +use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; +use plonky2::field::goldilocks_field::GoldilocksField as F; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use rand::{thread_rng, Rng}; +use smt_trie::db::MemoryDb; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce}; +use smt_trie::smt::Smt; +use smt_trie::utils::{hashout2u, key2u}; + +use crate::cpu::kernel::aggregator::KERNEL; +use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; +use crate::cpu::kernel::interpreter::Interpreter; +use crate::cpu::kernel::tests::account_code::{initialize_mpts, set_account}; +use crate::cpu::kernel::tests::mpt::nibbles_64; +use crate::generation::mpt::AccountRlp; +use crate::Node; + +// Test account with a given code hash. +fn test_account(balance: U256) -> AccountRlp { + AccountRlp { + nonce: U256::from(1111), + balance, + code_hash: U256::from(8888), + code_length: 42.into(), // arbitrary + } +} + +// Stolen from `tests/mpt/insert.rs` +// Prepare the interpreter by inserting the account in the state trie. +fn prepare_interpreter( + interpreter: &mut Interpreter, + address: Address, + account: &AccountRlp, +) -> Result<()> { + let smt_insert_state = KERNEL.global_labels["smt_insert_state"]; + let smt_hash_state = KERNEL.global_labels["smt_hash_state"]; + let mut state_smt = Smt::::default(); + let trie_inputs = Default::default(); + + initialize_mpts(interpreter, &trie_inputs); + assert_eq!(interpreter.stack(), vec![]); + + // Next, execute smt_insert_state. + interpreter.generation_state.registers.program_counter = smt_insert_state; + let trie_data = interpreter.get_trie_data_mut(); + if trie_data.is_empty() { + // In the assembly we skip over 0, knowing trie_data[0] = 0 by default. + // Since we don't explicitly set it to 0, we need to do so here. + trie_data.push(Some(0.into())); + trie_data.push(Some(0.into())); + } + let trie_data_len = trie_data.len().into(); + interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); + for (key, value) in [ + (key_balance(address), account.balance), + (key_nonce(address), account.nonce), + (key_code(address), account.code_hash), + (key_code_length(address), account.code_length), + ] { + if value.is_zero() { + continue; + } + interpreter.generation_state.registers.program_counter = smt_insert_state; + interpreter + .push(0xDEADBEEFu32.into()) + .expect("The stack should not overflow"); + interpreter + .push(value) + .expect("The stack should not overflow"); // value_ptr + let keyu = key2u(key); + interpreter + .push(keyu) + .expect("The stack should not overflow"); // key + + interpreter.run()?; + assert_eq!( + interpreter.stack().len(), + 0, + "Expected empty stack after insert, found {:?}", + interpreter.stack() + ); + } + + interpreter.run()?; + assert_eq!( + interpreter.stack().len(), + 0, + "Expected empty stack after insert, found {:?}", + interpreter.stack() + ); + + // Now, execute smt_hash_state. + interpreter.generation_state.registers.program_counter = smt_hash_state; + interpreter + .push(0xDEADBEEFu32.into()) + .expect("The stack should not overflow"); + interpreter + .push(2.into()) // Initial trie data segment size, unused. + .expect("The stack should not overflow"); + interpreter.run()?; + + assert_eq!( + interpreter.stack().len(), + 2, + "Expected 2 items on stack after hashing, found {:?}", + interpreter.stack() + ); + let hash = interpreter.stack()[1]; + + set_account(&mut state_smt, address, account, &HashMap::new()); + let expected_state_trie_hash = hashout2u(state_smt.root); + assert_eq!(hash, expected_state_trie_hash); + + Ok(()) +} + +#[test] +fn test_balance() -> Result<()> { + let mut rng = thread_rng(); + let balance = U256(rng.gen()); + let account = test_account(balance); + + let mut interpreter: Interpreter = Interpreter::new(0, vec![]); + let address: Address = rng.gen(); + // Prepare the interpreter by inserting the account in the state trie. + prepare_interpreter(&mut interpreter, address, &account)?; + + // Test `balance` + interpreter.generation_state.registers.program_counter = KERNEL.global_labels["balance"]; + interpreter.pop().expect("The stack should not be empty"); + interpreter.pop().expect("The stack should not be empty"); + assert!(interpreter.stack().is_empty()); + interpreter + .push(0xDEADBEEFu32.into()) + .expect("The stack should not overflow"); + interpreter + .push(U256::from_big_endian(address.as_bytes())) + .expect("The stack should not overflow"); + interpreter.run()?; + + assert_eq!(interpreter.stack(), vec![balance]); + + Ok(()) +} diff --git a/evm_arithmetization/src/cpu/kernel/tests/receipt.rs b/evm_arithmetization/src/cpu/kernel/tests/receipt.rs index fa7471fa4..de6e7f1d4 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/receipt.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/receipt.rs @@ -1,650 +1,618 @@ -// use anyhow::Result; -// use ethereum_types::{Address, U256}; -// use hex_literal::hex; -// use keccak_hash::keccak; -// use plonky2::field::goldilocks_field::GoldilocksField as F; -// use rand::{thread_rng, Rng}; - -// use crate::cpu::kernel::aggregator::KERNEL; -// use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -// use crate::cpu::kernel::constants::txn_fields::NormalizedTxnField; -// use crate::cpu::kernel::interpreter::Interpreter; -// use crate::cpu::kernel::tests::account_code::initialize_mpts; -// use crate::generation::mpt::{LegacyReceiptRlp, LogRlp}; -// use crate::memory::segments::Segment; - -// #[test] -// fn test_process_receipt() -> Result<()> { -// /* Tests process_receipt, which: -// - computes the cumulative gas -// - computes the bloom filter -// - inserts the receipt data in MPT_TRIE_DATA -// - inserts a node in receipt_trie -// - resets the bloom filter to 0 for the next transaction. */ -// let process_receipt = KERNEL.global_labels["process_receipt"]; -// let success = U256::from(1); -// let leftover_gas = U256::from(4000); -// let prev_cum_gas = U256::from(1000); -// let retdest = 0xDEADBEEFu32.into(); - -// // Log. -// let address: Address = thread_rng().gen(); -// let num_topics = 1; - -// let mut topic = vec![0_u8; 32]; -// topic[31] = 4; - -// // Compute the expected Bloom filter. -// let test_logs_list = vec![(address.to_fixed_bytes().to_vec(), -// vec![topic])]; let expected_bloom = -// logs_bloom_bytes_fn(test_logs_list).to_vec(); - -// // Set memory. -// let num_nibbles = 2.into(); -// let initial_stack: Vec = vec![ -// retdest, -// num_nibbles, -// 0.into(), -// prev_cum_gas, -// leftover_gas, -// success, -// ]; -// let mut interpreter: Interpreter = -// Interpreter::new_with_kernel(process_receipt, initial_stack); -// interpreter.set_memory_segment( -// Segment::LogsData, -// vec![ -// 56.into(), // payload len -// U256::from_big_endian(&address.to_fixed_bytes()), // address -// num_topics.into(), // num_topics -// 4.into(), // topic -// 0.into(), // data_len -// ], -// ); -// interpreter.set_txn_field(NormalizedTxnField::GasLimit, -// U256::from(5000)); interpreter.set_memory_segment(Segment::TxnBloom, -// vec![0.into(); 256]); interpreter.set_memory_segment(Segment::Logs, -// vec![0.into()]); interpreter. -// set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 58.into()); -// interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, -// U256::from(1)); interpreter. -// set_global_metadata_field(GlobalMetadata::ReceiptTrieRoot, 500.into()); -// interpreter.run()?; - -// let segment_read = interpreter.get_memory_segment(Segment::TrieData); - -// // The expected TrieData has the form [payload_len, status, cum_gas_used, -// bloom_filter, logs_payload_len, num_logs, [logs]] let mut -// expected_trie_data: Vec = vec![323.into(), success, 2000.into()]; -// expected_trie_data.extend( -// expected_bloom -// .into_iter() -// .map(|elt| elt.into()) -// .collect::>(), -// ); -// expected_trie_data.push(58.into()); // logs_payload_len -// expected_trie_data.push(1.into()); // num_logs -// expected_trie_data.extend(vec![ -// 56.into(), // payload len -// U256::from_big_endian(&address.to_fixed_bytes()), // address -// num_topics.into(), // num_topics -// 4.into(), // topic -// 0.into(), // data_len -// ]); - -// assert_eq!( -// expected_trie_data, -// segment_read[0..expected_trie_data.len()] -// ); - -// Ok(()) -// } - -// /// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 -// #[test] -// fn test_receipt_encoding() -> Result<()> { -// // Initialize interpreter. -// let success = U256::from(1); - -// let retdest = 0xDEADBEEFu32.into(); -// let num_topics = 3; - -// let encode_receipt = KERNEL.global_labels["encode_receipt"]; - -// // Logs and receipt in encodable form. -// let log_1 = LogRlp { -// address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), -// topics: vec![ -// -// hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d"). -// into(), -// hex!("0000000000000000000000000000000000000000000000000000000000000004"). -// into(), -// hex!("00000000000000000000000000000000000000000000000000000000004920ea"). -// into(), ], -// data: -// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") -// .to_vec() -// .into(), -// }; - -// let receipt_1 = LegacyReceiptRlp { -// status: true, -// cum_gas_used: 0x02dcb6u64.into(), -// bloom: -// hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000" -// ).to_vec().into(), logs: vec![log_1], -// }; -// // Get the expected RLP encoding. -// let expected_rlp = rlp::encode(&rlp::encode(&receipt_1)); - -// let initial_stack: Vec = vec![retdest, 0.into(), 0.into(), -// 0.into()]; let mut interpreter: Interpreter = -// Interpreter::new_with_kernel(encode_receipt, initial_stack); - -// // Write data to memory. -// let expected_bloom_bytes = vec![ -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, -// 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, -// 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// ]; -// let expected_bloom: Vec = expected_bloom_bytes -// .into_iter() -// .map(|elt| elt.into()) -// .collect(); - -// let addr = U256::from([ -// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, -// 0x9e, 0x12, 0xca, 0xf3, 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, -// 0x77, 0xc5, 0x9d, 0x8d, ]); - -// let topic1 = U256::from([ -// 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, -// 0x49, 0x51, 0x27, 0x31, 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, -// 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, 0x67, 0x4d, -// ]); - -// let topic2 = 4.into(); -// let topic3 = 0x4920ea.into(); - -// let mut logs = vec![ -// 155.into(), // unused -// addr, -// num_topics.into(), // num_topics -// topic1, // topic1 -// topic2, // topic2 -// topic3, // topic3 -// 32.into(), // data length -// ]; -// let cur_data = -// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") -// .iter() -// .copied() -// .map(U256::from); -// logs.extend(cur_data); - -// let mut receipt = vec![423.into(), success, receipt_1.cum_gas_used]; -// receipt.extend(expected_bloom.clone()); -// receipt.push(157.into()); // logs_payload_len -// receipt.push(1.into()); // num_logs -// receipt.extend(logs.clone()); -// interpreter.set_memory_segment(Segment::LogsData, logs); - -// interpreter.set_memory_segment(Segment::TxnBloom, expected_bloom); - -// interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); -// interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 1.into()); -// interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, -// 157.into()); interpreter.set_memory_segment(Segment::TrieData, receipt); - -// interpreter.run()?; -// let rlp_pos = interpreter.pop().expect("The stack should not be empty"); - -// let rlp_read: Vec = interpreter.get_rlp_memory(); - -// assert_eq!(rlp_pos.as_usize(), expected_rlp.len()); -// for i in 0..rlp_read.len() { -// assert_eq!(rlp_read[i], expected_rlp[i]); -// } - -// Ok(()) -// } - -// /// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 -// #[test] -// fn test_receipt_bloom_filter() -> Result<()> { -// let logs_bloom = KERNEL.global_labels["logs_bloom"]; - -// let num_topics = 3; - -// // Expected bloom -// let first_bloom_bytes = vec![ -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, -// 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, ]; - -// let retdest = 0xDEADBEEFu32.into(); - -// let addr = U256::from([ -// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, -// 0x9e, 0x12, 0xca, 0xf3, 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, -// 0x77, 0xc5, 0x9d, 0x8d, ]); - -// let topic1 = U256::from([ -// 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, -// 0x49, 0x51, 0x27, 0x31, 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, -// 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, 0x67, 0x4d, -// ]); - -// let topic02 = 0x2a.into(); -// let topic03 = 0xbd9fe6.into(); - -// // Set logs memory and initialize TxnBloom and BlockBloom segments. -// let initial_stack: Vec = vec![retdest]; - -// let mut interpreter: Interpreter = -// Interpreter::new_with_kernel(logs_bloom, initial_stack); let mut logs = -// vec![ 0.into(), // unused -// addr, -// num_topics.into(), // num_topics -// topic1, // topic1 -// topic02, // topic2 -// topic03, // topic3 -// 32.into(), // data_len -// ]; -// let cur_data = -// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") -// .iter() -// .copied() -// .map(U256::from); -// logs.extend(cur_data); -// // The Bloom filter initialization is required for this test to ensure we -// have the correct length for the filters. Otherwise, some trailing zeroes -// could be missing. interpreter.set_memory_segment(Segment::TxnBloom, -// vec![0.into(); 256]); // Initialize transaction Bloom filter. -// interpreter.set_memory_segment(Segment::LogsData, logs); -// interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); -// interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, -// U256::from(1)); interpreter.run()?; - -// // Second transaction. -// let loaded_bloom_u256 = -// interpreter.get_memory_segment(Segment::TxnBloom); let loaded_bloom: -// Vec = loaded_bloom_u256 .into_iter() -// .map(|elt| elt.0[0] as u8) -// .collect(); - -// assert_eq!(first_bloom_bytes, loaded_bloom); -// let topic12 = 0x4.into(); -// let topic13 = 0x4920ea.into(); -// let mut logs2 = vec![ -// 0.into(), // unused -// addr, -// num_topics.into(), // num_topics -// topic1, // topic1 -// topic12, // topic2 -// topic13, // topic3 -// 32.into(), // data_len -// ]; -// let cur_data = -// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") -// .iter() -// .copied() -// .map(U256::from); -// logs2.extend(cur_data); - -// interpreter -// .push(retdest) -// .expect("The stack should not overflow"); -// interpreter.generation_state.registers.program_counter = logs_bloom; -// interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); -// // Initialize transaction Bloom filter. interpreter. -// set_memory_segment(Segment::LogsData, logs2); interpreter. -// set_memory_segment(Segment::Logs, vec![0.into()]); interpreter. -// set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); -// interpreter.run()?; - -// let second_bloom_bytes = vec![ -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, -// 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, -// 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// ]; - -// let second_loaded_bloom_u256 = -// interpreter.get_memory_segment(Segment::TxnBloom); -// let second_loaded_bloom: Vec = second_loaded_bloom_u256 -// .into_iter() -// .map(|elt| elt.0[0] as u8) -// .collect(); - -// assert_eq!(second_bloom_bytes, second_loaded_bloom); - -// Ok(()) -// } - -// #[test] -// fn test_mpt_insert_receipt() -> Result<()> { -// // This test simulates a receipt processing to test -// `mpt_insert_receipt_trie`. // For this, we need to set the data correctly -// in memory. // In TrieData, we need to insert a receipt of the form: -// // `[payload_len, status, cum_gas_used, bloom, logs_payload_len, -// num_logs, [logs]]`. // We also need to set TrieDataSize correctly. - -// let retdest = 0xDEADBEEFu32.into(); -// let trie_inputs = Default::default(); -// let mpt_insert = KERNEL.global_labels["mpt_insert_receipt_trie"]; -// let num_topics = 3; // Both transactions have the same number of topics. -// let payload_len = 423; // Total payload length for each receipt. -// let logs_payload_len = 157; // Payload length for all logs. -// let log_payload_len = 155; // Payload length for one log. -// let num_logs = 1; - -// // Receipt_0: -// let status_0 = 1; -// let cum_gas_used_0 = 0x016e5b; -// let logs_bloom_0_bytes = vec![ -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, -// 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, ]; - -// // Logs_0: -// let logs_bloom_0: Vec = logs_bloom_0_bytes -// .into_iter() -// .map(|elt| elt.into()) -// .collect(); - -// let addr = U256::from([ -// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, -// 0x9e, 0x12, 0xca, 0xf3, 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, -// 0x77, 0xc5, 0x9d, 0x8d, ]); - -// // The first topic is shared by the two transactions. -// let topic1 = U256::from([ -// 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, -// 0x49, 0x51, 0x27, 0x31, 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, -// 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, 0x67, 0x4d, -// ]); - -// let topic02 = 0x2a.into(); -// let topic03 = 0xbd9fe6.into(); - -// let mut logs_0 = vec![ -// log_payload_len.into(), // payload_len -// addr, -// num_topics.into(), // num_topics -// topic1, // topic1 -// topic02, // topic2 -// topic03, // topic3 -// 32.into(), // data_len -// ]; -// let cur_data = -// hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") -// .iter() -// .copied() -// .map(U256::from); -// logs_0.extend(cur_data); - -// let mut receipt: Vec = vec![423.into(), status_0.into(), -// cum_gas_used_0.into()]; receipt.extend(logs_bloom_0); -// receipt.push(logs_payload_len.into()); // logs_payload_len -// receipt.push(num_logs.into()); // num_logs -// receipt.extend(logs_0.clone()); - -// let mut interpreter: Interpreter = Interpreter::new_with_kernel(0, -// vec![]); initialize_mpts(&mut interpreter, &trie_inputs); - -// // If TrieData is empty, we need to push 0 because the first value is -// always 0. let mut cur_trie_data = -// interpreter.get_memory_segment(Segment::TrieData); if cur_trie_data. -// is_empty() { cur_trie_data.push(0.into()); -// } - -// // stack: transaction_nb, value_ptr, retdest -// let num_nibbles = 2; -// let initial_stack: Vec = vec![ -// retdest, -// cur_trie_data.len().into(), -// 0x80.into(), -// num_nibbles.into(), -// ]; -// for i in 0..initial_stack.len() { -// interpreter -// .push(initial_stack[i]) -// .expect("The stack should not overflow"); -// } - -// interpreter.generation_state.registers.program_counter = mpt_insert; - -// // Set memory. -// cur_trie_data.extend(receipt); -// interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); -// interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, -// cur_trie_data.len().into()); // First insertion. -// interpreter.run()?; - -// // receipt_1: -// let status_1 = 1; -// let cum_gas_used_1 = 0x02dcb6; -// let logs_bloom_1_bytes = vec![ -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, -// 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, -// 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, -// ]; - -// // Logs_1: -// let logs_bloom_1: Vec = logs_bloom_1_bytes -// .into_iter() -// .map(|elt| elt.into()) -// .collect(); - -// let topic12 = 4.into(); -// let topic13 = 0x4920ea.into(); - -// let mut logs_1 = vec![ -// log_payload_len.into(), // payload length -// addr, -// num_topics.into(), // nb topics -// topic1, // topic1 -// topic12, // topic2 -// topic13, // topic3 -// 32.into(), // data length -// ]; -// let cur_data = -// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") -// .iter() -// .copied() -// .map(U256::from); -// logs_1.extend(cur_data); - -// let mut receipt_1: Vec = vec![payload_len.into(), status_1.into(), -// cum_gas_used_1.into()]; receipt_1.extend(logs_bloom_1); -// receipt_1.push(logs_payload_len.into()); // logs payload len -// receipt_1.push(num_logs.into()); // nb logs -// receipt_1.extend(logs_1.clone()); - -// // Get updated TrieData segment. -// cur_trie_data = interpreter.get_memory_segment(Segment::TrieData); -// let num_nibbles = 2; -// let initial_stack2: Vec = vec![ -// retdest, -// cur_trie_data.len().into(), -// 0x01.into(), -// num_nibbles.into(), -// ]; -// for i in 0..initial_stack2.len() { -// interpreter -// .push(initial_stack2[i]) -// .expect("The stack should not overflow"); -// } -// cur_trie_data.extend(receipt_1); - -// // Set memory. -// interpreter.generation_state.registers.program_counter = mpt_insert; -// interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); -// interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, -// cur_trie_data.len().into()); interpreter.run()?; - -// // Finally, check that the hashes correspond. -// let mpt_hash_receipt = KERNEL.global_labels["mpt_hash_receipt_trie"]; -// interpreter.generation_state.registers.program_counter = -// mpt_hash_receipt; interpreter -// .push(retdest) -// .expect("The stack should not overflow"); -// interpreter -// .push(1.into()) // Initial length of the trie data segment, unused.; -// // Initial length of the trie data segment, unused. .expect("The -// stack should not overflow"); interpreter.run()?; -// assert_eq!( -// interpreter.stack()[1], -// U256::from(hex!( -// -// "da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98" )) -// ); -// Ok(()) -// } - -// #[test] -// fn test_bloom_two_logs() -> Result<()> { -// // Tests the Bloom filter computation with two logs in one transaction. - -// // address -// let to = [ -// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x09, 0x5e, 0x7b, 0xae, 0xa6, -// 0xa6, 0xc7, 0xc4, 0xc2, 0xdf, 0xeb, 0x97, 0x7e, 0xfa, 0xc3, 0x26, -// 0xaf, 0x55, 0x2d, 0x87, ]; - -// let retdest = 0xDEADBEEFu32.into(); -// let logs_bloom = KERNEL.global_labels["logs_bloom"]; - -// let initial_stack: Vec = vec![retdest]; - -// // Set memory. -// let logs = vec![ -// 0.into(), // unused -// to.into(), // address -// 0.into(), // num_topics -// 0.into(), // data_len, -// 0.into(), // unused: rlp -// to.into(), -// 2.into(), // num_topics -// 0x62.into(), -// 0x63.into(), -// 5.into(), -// [ -// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -// 0, 0, 0, 0, 0, 0xa1, 0xb2, 0xc3, 0xd4, 0xe5, -// ] -// .into(), -// ]; -// let mut interpreter: Interpreter = -// Interpreter::new_with_kernel(logs_bloom, initial_stack); interpreter. -// set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize -// transaction Bloom filter. interpreter. -// set_memory_segment(Segment::LogsData, logs); interpreter. -// set_memory_segment(Segment::Logs, vec![0.into(), 4.into()]); interpreter. -// set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(2)); -// interpreter.run()?; - -// let loaded_bloom_bytes: Vec = interpreter -// .get_memory_segment(Segment::TxnBloom) -// .into_iter() -// .map(|elt| elt.0[0] as u8) -// .collect(); - -// let expected = -// hex!("00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000004000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000400000000000040000000000000000000000000002000000000000000000000000000" -// ).to_vec(); - -// assert_eq!(expected, loaded_bloom_bytes); -// Ok(()) -// } - -// fn logs_bloom_bytes_fn(logs_list: Vec<(Vec, Vec>)>) -> [u8; 256] -// { // The first element of logs_list. -// let mut bloom = [0_u8; 256]; - -// for log in logs_list { -// let cur_addr = log.0; -// let topics = log.1; - -// add_to_bloom(&mut bloom, &cur_addr); -// for topic in topics { -// add_to_bloom(&mut bloom, &topic); -// } -// } -// bloom -// } - -// fn add_to_bloom(bloom: &mut [u8; 256], bloom_entry: &[u8]) { -// let bloom_hash = keccak(bloom_entry).to_fixed_bytes(); - -// for idx in 0..3 { -// let bit_pair = u16::from_be_bytes(bloom_hash[2 * idx..2 * (idx + -// 1)].try_into().unwrap()); let bit_to_set = 0x07FF - (bit_pair & -// 0x07FF); let byte_index = bit_to_set / 8; -// let bit_value = 1 << (7 - bit_to_set % 8); -// bloom[byte_index as usize] |= bit_value; -// } -// } +use anyhow::Result; +use ethereum_types::{Address, U256}; +use hex_literal::hex; +use keccak_hash::keccak; +use plonky2::field::goldilocks_field::GoldilocksField as F; +use rand::{thread_rng, Rng}; + +use crate::cpu::kernel::aggregator::KERNEL; +use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; +use crate::cpu::kernel::constants::txn_fields::NormalizedTxnField; +use crate::cpu::kernel::interpreter::Interpreter; +use crate::cpu::kernel::tests::account_code::initialize_mpts; +use crate::generation::mpt::{LegacyReceiptRlp, LogRlp}; +use crate::memory::segments::{Segment, SEGMENT_SCALING_FACTOR}; + +#[test] +fn test_process_receipt() -> Result<()> { + /* Tests process_receipt, which: + - computes the cumulative gas + - computes the bloom filter + - inserts the receipt data in MPT_TRIE_DATA + - inserts a node in receipt_trie + - resets the bloom filter to 0 for the next transaction. */ + let process_receipt = KERNEL.global_labels["process_receipt"]; + let success = U256::from(1); + let leftover_gas = U256::from(4000); + let prev_cum_gas = U256::from(1000); + let retdest = 0xDEADBEEFu32.into(); + + // Log. + let address: Address = thread_rng().gen(); + let num_topics = 1; + + let mut topic = vec![0_u8; 32]; + topic[31] = 4; + + // Compute the expected Bloom filter. + let test_logs_list = vec![(address.to_fixed_bytes().to_vec(), vec![topic])]; + let expected_bloom = logs_bloom_bytes_fn(test_logs_list).to_vec(); + + // Set memory. + let num_nibbles = 2.into(); + let initial_stack: Vec = vec![ + retdest, + num_nibbles, + 0.into(), + prev_cum_gas, + leftover_gas, + success, + ]; + let mut interpreter: Interpreter = Interpreter::new(process_receipt, initial_stack); + interpreter.set_memory_segment( + Segment::LogsData, + vec![ + 56.into(), // payload len + U256::from_big_endian(&address.to_fixed_bytes()), // address + num_topics.into(), // num_topics + 4.into(), // topic + 0.into(), // data_len + ], + ); + interpreter.set_txn_field(NormalizedTxnField::GasLimit, U256::from(5000)); + interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); + interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); + interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 58.into()); + interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); + interpreter.set_global_metadata_field(GlobalMetadata::ReceiptTrieRoot, 500.into()); + interpreter.run()?; + + let segment_read = interpreter.get_memory_segment(Segment::TrieData); + + // The expected TrieData has the form [payload_len, status, cum_gas_used, + // bloom_filter, logs_payload_len, num_logs, [logs]] + let mut expected_trie_data: Vec = vec![323.into(), success, 2000.into()]; + expected_trie_data.extend( + expected_bloom + .into_iter() + .map(|elt| elt.into()) + .collect::>(), + ); + expected_trie_data.push(58.into()); // logs_payload_len + expected_trie_data.push(1.into()); // num_logs + expected_trie_data.extend(vec![ + 56.into(), // payload len + U256::from_big_endian(&address.to_fixed_bytes()), // address + num_topics.into(), // num_topics + 4.into(), // topic + 0.into(), // data_len + ]); + + assert_eq!( + expected_trie_data, + segment_read[0..expected_trie_data.len()] + ); + + Ok(()) +} + +/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 +#[test] +fn test_receipt_encoding() -> Result<()> { + // Initialize interpreter. + let success = U256::from(1); + + let retdest = 0xDEADBEEFu32.into(); + let num_topics = 3; + + let encode_receipt = KERNEL.global_labels["encode_receipt"]; + + // Logs and receipt in encodable form. + let log_1 = LogRlp { + address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), + topics: vec![ + hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(), + hex!("0000000000000000000000000000000000000000000000000000000000000004").into(), + hex!("00000000000000000000000000000000000000000000000000000000004920ea").into(), + ], + data: hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .to_vec() + .into(), + }; + + let receipt_1 = LegacyReceiptRlp { + status: true, + cum_gas_used: 0x02dcb6u64.into(), + bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000").to_vec().into(), + logs: vec![log_1], + }; + // Get the expected RLP encoding. + let expected_rlp = rlp::encode(&rlp::encode(&receipt_1)); + + // Address at which the encoding is written. + let rlp_addr = U256::from(Segment::RlpRaw as usize); + let initial_stack: Vec = vec![retdest, 0.into(), 0.into(), rlp_addr]; + let mut interpreter: Interpreter = Interpreter::new(encode_receipt, initial_stack); + + // Write data to memory. + let expected_bloom_bytes = vec![ + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + ]; + let expected_bloom: Vec = expected_bloom_bytes + .into_iter() + .map(|elt| elt.into()) + .collect(); + + let addr = U256::from([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3, + 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d, + ]); + + let topic1 = U256::from([ + 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31, + 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, + 0x67, 0x4d, + ]); + + let topic2 = 4.into(); + let topic3 = 0x4920ea.into(); + + let mut logs = vec![ + 155.into(), // unused + addr, + num_topics.into(), // num_topics + topic1, // topic1 + topic2, // topic2 + topic3, // topic3 + 32.into(), // data length + ]; + let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .iter() + .copied() + .map(U256::from); + logs.extend(cur_data); + + let mut receipt = vec![423.into(), success, receipt_1.cum_gas_used]; + receipt.extend(expected_bloom.clone()); + receipt.push(157.into()); // logs_payload_len + receipt.push(1.into()); // num_logs + receipt.extend(logs.clone()); + interpreter.set_memory_segment(Segment::LogsData, logs); + + interpreter.set_memory_segment(Segment::TxnBloom, expected_bloom); + + interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); + interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 1.into()); + interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 157.into()); + interpreter.set_memory_segment(Segment::TrieData, receipt); + + interpreter.run()?; + let rlp_pos = interpreter.pop().expect("The stack should not be empty"); + + let rlp_read: &[u8] = &interpreter.get_rlp_memory(); + + assert_eq!((rlp_pos - rlp_addr).as_usize(), expected_rlp.len()); + for i in 0..rlp_read.len() { + assert_eq!(rlp_read[i], expected_rlp[i]); + } + + Ok(()) +} + +/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 +#[test] +fn test_receipt_bloom_filter() -> Result<()> { + let logs_bloom = KERNEL.global_labels["logs_bloom"]; + + let num_topics = 3; + + // Expected bloom + let first_bloom_bytes = vec![ + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x10, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + ]; + + let retdest = 0xDEADBEEFu32.into(); + + let addr = U256::from([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3, + 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d, + ]); + + let topic1 = U256::from([ + 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31, + 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, + 0x67, 0x4d, + ]); + + let topic02 = 0x2a.into(); + let topic03 = 0xbd9fe6.into(); + + // Set logs memory and initialize TxnBloom and BlockBloom segments. + let initial_stack: Vec = vec![retdest]; + + let mut interpreter: Interpreter = Interpreter::new(logs_bloom, initial_stack); + let mut logs = vec![ + 0.into(), // unused + addr, + num_topics.into(), // num_topics + topic1, // topic1 + topic02, // topic2 + topic03, // topic3 + 32.into(), // data_len + ]; + let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .iter() + .copied() + .map(U256::from); + logs.extend(cur_data); + // The Bloom filter initialization is required for this test to ensure we have + // the correct length for the filters. Otherwise, some trailing zeroes could be + // missing. + interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter. + interpreter.set_memory_segment(Segment::LogsData, logs); + interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); + interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); + interpreter.run()?; + + // Second transaction. + let loaded_bloom_u256 = interpreter.get_memory_segment(Segment::TxnBloom); + let loaded_bloom: Vec = loaded_bloom_u256 + .into_iter() + .map(|elt| elt.0[0] as u8) + .collect(); + + assert_eq!(first_bloom_bytes, loaded_bloom); + let topic12 = 0x4.into(); + let topic13 = 0x4920ea.into(); + let mut logs2 = vec![ + 0.into(), // unused + addr, + num_topics.into(), // num_topics + topic1, // topic1 + topic12, // topic2 + topic13, // topic3 + 32.into(), // data_len + ]; + let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .iter() + .copied() + .map(U256::from); + logs2.extend(cur_data); + + interpreter + .push(retdest) + .expect("The stack should not overflow"); + interpreter.generation_state.registers.program_counter = logs_bloom; + interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter. + interpreter.set_memory_segment(Segment::LogsData, logs2); + interpreter.set_memory_segment(Segment::Logs, vec![0.into()]); + interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1)); + interpreter.run()?; + + let second_bloom_bytes = vec![ + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + ]; + + let second_loaded_bloom_u256 = interpreter.get_memory_segment(Segment::TxnBloom); + let second_loaded_bloom: Vec = second_loaded_bloom_u256 + .into_iter() + .map(|elt| elt.0[0] as u8) + .collect(); + + assert_eq!(second_bloom_bytes, second_loaded_bloom); + + Ok(()) +} + +#[test] +fn test_mpt_insert_receipt() -> Result<()> { + // This test simulates a receipt processing to test `mpt_insert_receipt_trie`. + // For this, we need to set the data correctly in memory. + // In TrieData, we need to insert a receipt of the form: + // `[payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, + // [logs]]`. We also need to set TrieDataSize correctly. + + let retdest = 0xDEADBEEFu32.into(); + let trie_inputs = Default::default(); + let mpt_insert = KERNEL.global_labels["mpt_insert_receipt_trie"]; + let num_topics = 3; // Both transactions have the same number of topics. + let payload_len = 423; // Total payload length for each receipt. + let logs_payload_len = 157; // Payload length for all logs. + let log_payload_len = 155; // Payload length for one log. + let num_logs = 1; + + // Receipt_0: + let status_0 = 1; + let cum_gas_used_0 = 0x016e5b; + let logs_bloom_0_bytes = vec![ + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x10, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + ]; + + // Logs_0: + let logs_bloom_0: Vec = logs_bloom_0_bytes + .into_iter() + .map(|elt| elt.into()) + .collect(); + + let addr = U256::from([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3, + 0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d, + ]); + + // The first topic is shared by the two transactions. + let topic1 = U256::from([ + 0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31, + 0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66, + 0x67, 0x4d, + ]); + + let topic02 = 0x2a.into(); + let topic03 = 0xbd9fe6.into(); + + let mut logs_0 = vec![ + log_payload_len.into(), // payload_len + addr, + num_topics.into(), // num_topics + topic1, // topic1 + topic02, // topic2 + topic03, // topic3 + 32.into(), // data_len + ]; + let cur_data = hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") + .iter() + .copied() + .map(U256::from); + logs_0.extend(cur_data); + + let mut receipt: Vec = vec![423.into(), status_0.into(), cum_gas_used_0.into()]; + receipt.extend(logs_bloom_0); + receipt.push(logs_payload_len.into()); // logs_payload_len + receipt.push(num_logs.into()); // num_logs + receipt.extend(logs_0.clone()); + + let mut interpreter: Interpreter = Interpreter::new(0, vec![]); + initialize_mpts(&mut interpreter, &trie_inputs); + + // If TrieData is empty, we need to push 0 because the first value is always 0. + let mut cur_trie_data = interpreter.get_memory_segment(Segment::TrieData); + if cur_trie_data.is_empty() { + cur_trie_data.push(0.into()); + } + + // stack: transaction_nb, value_ptr, retdest + let num_nibbles = 2; + let initial_stack: Vec = vec![ + retdest, + cur_trie_data.len().into(), + 0x80.into(), + num_nibbles.into(), + ]; + for i in 0..initial_stack.len() { + interpreter + .push(initial_stack[i]) + .expect("The stack should not overflow"); + } + + interpreter.generation_state.registers.program_counter = mpt_insert; + + // Set memory. + cur_trie_data.extend(receipt); + interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); + interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, cur_trie_data.len().into()); + // First insertion. + interpreter.run()?; + + // receipt_1: + let status_1 = 1; + let cum_gas_used_1 = 0x02dcb6; + let logs_bloom_1_bytes = vec![ + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, + 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, + ]; + + // Logs_1: + let logs_bloom_1: Vec = logs_bloom_1_bytes + .into_iter() + .map(|elt| elt.into()) + .collect(); + + let topic12 = 4.into(); + let topic13 = 0x4920ea.into(); + + let mut logs_1 = vec![ + log_payload_len.into(), // payload length + addr, + num_topics.into(), // nb topics + topic1, // topic1 + topic12, // topic2 + topic13, // topic3 + 32.into(), // data length + ]; + let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .iter() + .copied() + .map(U256::from); + logs_1.extend(cur_data); + + let mut receipt_1: Vec = vec![payload_len.into(), status_1.into(), cum_gas_used_1.into()]; + receipt_1.extend(logs_bloom_1); + receipt_1.push(logs_payload_len.into()); // logs payload len + receipt_1.push(num_logs.into()); // nb logs + receipt_1.extend(logs_1.clone()); + + // Get updated TrieData segment. + cur_trie_data = interpreter.get_memory_segment(Segment::TrieData); + let num_nibbles = 2; + let initial_stack2: Vec = vec![ + retdest, + cur_trie_data.len().into(), + 0x01.into(), + num_nibbles.into(), + ]; + for i in 0..initial_stack2.len() { + interpreter + .push(initial_stack2[i]) + .expect("The stack should not overflow"); + } + cur_trie_data.extend(receipt_1); + + // Set memory. + interpreter.generation_state.registers.program_counter = mpt_insert; + interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone()); + let trie_data_len = cur_trie_data.len().into(); + interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, trie_data_len); + interpreter.run()?; + + // Finally, check that the hashes correspond. + let mpt_hash_receipt = KERNEL.global_labels["mpt_hash_receipt_trie"]; + interpreter.generation_state.registers.program_counter = mpt_hash_receipt; + interpreter + .push(retdest) + .expect("The stack should not overflow"); + interpreter + .push(1.into()) // Initial length of the trie data segment, unused.; // Initial length of the trie data + // segment, unused. + .expect("The stack should not overflow"); + interpreter.run()?; + assert_eq!( + interpreter.stack()[1], + U256::from(hex!( + "da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98" + )) + ); + Ok(()) +} + +#[test] +fn test_bloom_two_logs() -> Result<()> { + // Tests the Bloom filter computation with two logs in one transaction. + + // address + let to = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x09, 0x5e, 0x7b, 0xae, 0xa6, 0xa6, 0xc7, 0xc4, 0xc2, + 0xdf, 0xeb, 0x97, 0x7e, 0xfa, 0xc3, 0x26, 0xaf, 0x55, 0x2d, 0x87, + ]; + + let retdest = 0xDEADBEEFu32.into(); + let logs_bloom = KERNEL.global_labels["logs_bloom"]; + + let initial_stack: Vec = vec![retdest]; + + // Set memory. + let logs = vec![ + 0.into(), // unused + to.into(), // address + 0.into(), // num_topics + 0.into(), // data_len, + 0.into(), // unused: rlp + to.into(), + 2.into(), // num_topics + 0x62.into(), + 0x63.into(), + 5.into(), + [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xa1, + 0xb2, 0xc3, 0xd4, 0xe5, + ] + .into(), + ]; + let mut interpreter: Interpreter = Interpreter::new(logs_bloom, initial_stack); + interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter. + interpreter.set_memory_segment(Segment::LogsData, logs); + interpreter.set_memory_segment(Segment::Logs, vec![0.into(), 4.into()]); + interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(2)); + interpreter.run()?; + + let loaded_bloom_bytes: Vec = interpreter + .get_memory_segment(Segment::TxnBloom) + .into_iter() + .map(|elt| elt.0[0] as u8) + .collect(); + + let expected = hex!("00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000004000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000400000000000040000000000000000000000000002000000000000000000000000000").to_vec(); + + assert_eq!(expected, loaded_bloom_bytes); + Ok(()) +} + +fn logs_bloom_bytes_fn(logs_list: Vec<(Vec, Vec>)>) -> [u8; 256] { + // The first element of logs_list. + let mut bloom = [0_u8; 256]; + + for log in logs_list { + let cur_addr = log.0; + let topics = log.1; + + add_to_bloom(&mut bloom, &cur_addr); + for topic in topics { + add_to_bloom(&mut bloom, &topic); + } + } + bloom +} + +fn add_to_bloom(bloom: &mut [u8; 256], bloom_entry: &[u8]) { + let bloom_hash = keccak(bloom_entry).to_fixed_bytes(); + + for idx in 0..3 { + let bit_pair = u16::from_be_bytes(bloom_hash[2 * idx..2 * (idx + 1)].try_into().unwrap()); + let bit_to_set = 0x07FF - (bit_pair & 0x07FF); + let byte_index = bit_to_set / 8; + let bit_value = 1 << (7 - bit_to_set % 8); + bloom[byte_index as usize] |= bit_value; + } +} diff --git a/evm_arithmetization/tests/log_opcode.rs b/evm_arithmetization/tests/log_opcode.rs index 2a57745fc..2d97f12d9 100644 --- a/evm_arithmetization/tests/log_opcode.rs +++ b/evm_arithmetization/tests/log_opcode.rs @@ -5,14 +5,16 @@ use std::time::Duration; use bytes::Bytes; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; +use evm_arithmetization::generation::mpt::transaction_testing::{ + AddressOption, LegacyTransactionRlp, +}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp}; use evm_arithmetization::generation::{GenerationInputs, TrieInputs}; use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; use evm_arithmetization::prover::prove; use evm_arithmetization::verifier::verify_proof; -use evm_arithmetization::{AllStark, Node, StarkConfig}; +use evm_arithmetization::{AllRecursiveCircuits, AllStark, Node, StarkConfig}; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; @@ -22,6 +24,7 @@ use smt_trie::code::hash_bytecode_u256; use smt_trie::db::{Db, MemoryDb}; use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; const D: usize = 2; @@ -41,14 +44,6 @@ fn test_log_opcodes() -> anyhow::Result<()> { // Private key: DCDFF53B4F013DBCDC717F89FE3BF4D8B10512AAE282B48E01D7530470382701 let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); - let beneficiary_state_key = keccak(beneficiary); - let sender_state_key = keccak(sender); - let to_hashed = keccak(to); - - let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); - // For the first code transaction code, we consider two LOG opcodes. The first // deals with 0 topics and empty data. The second deals with two topics, and // data of length 5, stored in memory. @@ -161,25 +156,6 @@ fn test_log_opcodes() -> anyhow::Result<()> { contract_code.insert(hash_bytecode_u256(vec![]), vec![]); contract_code.insert(code_hash, code.to_vec()); - // Update the state and receipt tries after the transaction, so that we have the - // correct expected tries: Update accounts - let beneficiary_account_after = AccountRlp { - nonce: 1.into(), - ..AccountRlp::default() - }; - - let sender_balance_after = sender_balance_before - gas_used * txn_gas_price; - let sender_account_after = AccountRlp { - balance: sender_balance_after.into(), - nonce: 1.into(), - ..AccountRlp::default() - }; - let to_account_after = AccountRlp { - balance: 9000000000u64.into(), - code_hash, - ..AccountRlp::default() - }; - // Update the receipt trie. let first_log = LogRlp { address: to.into(), @@ -208,14 +184,39 @@ fn test_log_opcodes() -> anyhow::Result<()> { receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec())?; // Update the state trie. - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after.insert( - beneficiary_nibbles, - rlp::encode(&beneficiary_account_after).to_vec(), - )?; - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; + let expected_state_smt_after = { + let mut smt = Smt::::default(); + let beneficiary_account_after = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + let sender_account_after = AccountRlp { + balance: (sender_balance_before - gas_used * txn_gas_price).into(), + nonce: 1.into(), + ..AccountRlp::default() + }; + let to_account_after = AccountRlp { + balance: 9000000000u64.into(), + code_hash, + ..AccountRlp::default() + }; + + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to), &to_account_after, &HashMap::new()); + + smt + }; let transactions_trie: HashedPartialTrie = Node::Leaf { nibbles: Nibbles::from_str("0x80").unwrap(), @@ -224,7 +225,7 @@ fn test_log_opcodes() -> anyhow::Result<()> { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -251,612 +252,559 @@ fn test_log_opcodes() -> anyhow::Result<()> { let proof = prove::(&all_stark, &config, inputs, &mut timing, None)?; timing.filter(Duration::from_millis(100)).print(); - // Assert that the proof leads to the correct state and receipt roots. + verify_proof(&all_stark, proof, &config) +} + +// Tests proving two transactions, one of which with logs, and aggregating them. +#[test] +#[ignore] // Too slow to run on CI. +fn test_log_with_aggreg() -> anyhow::Result<()> { + init_logger(); + + let code = [ + 0x64, 0xA1, 0xB2, 0xC3, 0xD4, 0xE5, 0x60, 0x0, 0x52, // MSTORE(0x0, 0xA1B2C3D4E5) + 0x60, 0x0, 0x60, 0x0, 0xA0, // LOG0(0x0, 0x0) + 0x60, 99, 0x60, 98, 0x60, 5, 0x60, 27, 0xA2, // LOG2(27, 5, 98, 99) + 0x00, + ]; + + let code_gas = 3 + 3 + 3 // PUSHs and MSTORE + + 3 + 3 + 375 // PUSHs and LOG0 + + 3 + 3 + 3 + 3 + 375 + 375*2 + 8*5 // PUSHs and LOG2 + + 3 // Memory expansion + ; + let gas_used = 21_000 + code_gas; + let code_hash = hash_bytecode_u256(code.to_vec()); + + // First transaction. + let all_stark = AllStark::::default(); + let config = StarkConfig::standard_fast_config(); + + let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); + let sender_first = hex!("af1276cbb260bb13deddb4209ae99ae6e497f446"); + let to_first = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); + let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552e89"); + + let beneficiary_account_before = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + let sender_balance_before = 1000000000000000000u64.into(); + let sender_account_before = AccountRlp { + balance: sender_balance_before, + ..AccountRlp::default() + }; + let to_account_before = AccountRlp { + ..AccountRlp::default() + }; + let to_account_second_before = AccountRlp { + code_hash, + ..AccountRlp::default() + }; + + // In the first transaction, the sender account sends `txn_value` to + // `to_account`. + let gas_price = 10; + let txn_value = 0xau64; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(beneficiary), + &beneficiary_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(sender_first), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to_first), + &to_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_second_before, + &HashMap::new(), + ); + + let checkpoint_state_trie_root = H256::from_uint(&hashout2u(state_smt_before.root)); + + let tries_before = TrieInputs { + state_smt: state_smt_before.serialize(), + transactions_trie: Node::Empty.into(), + receipts_trie: Node::Empty.into(), + }; + + let txn = hex!("f85f800a82520894095e7baea6a6c7c4c2dfeb977efac326af552d870a8026a0122f370ed4023a6c253350c6bfb87d7d7eb2cd86447befee99e0a26b70baec20a07100ab1b3977f2b4571202b9f4b68850858caf5469222794600b5ce1cfb348ad"); + + let block_1_metadata = BlockMetadata { + block_beneficiary: Address::from(beneficiary), + block_timestamp: 0x03e8.into(), + block_number: 1.into(), + block_difficulty: 0x020000.into(), + block_gaslimit: 0x445566u32.into(), + block_chain_id: 1.into(), + block_base_fee: 0xa.into(), + block_gas_used: (22570 + 21000).into(), + block_bloom: [ + 0.into(), + 0.into(), + U256::from_dec_str( + "55213970774324510299479508399853534522527075462195808724319849722937344", + ) + .unwrap(), + U256::from_dec_str("1361129467683753853853498429727072845824").unwrap(), + 33554432.into(), + U256::from_dec_str("9223372036854775808").unwrap(), + U256::from_dec_str( + "3618502788666131106986593281521497120414687020801267626233049500247285563392", + ) + .unwrap(), + U256::from_dec_str("2722259584404615024560450425766186844160").unwrap(), + ], + block_random: Default::default(), + }; + + let beneficiary_account_after = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + + let sender_balance_after = sender_balance_before - gas_price * 21000 - txn_value; + let sender_account_after = AccountRlp { + balance: sender_balance_after, + nonce: 1.into(), + ..AccountRlp::default() + }; + let to_account_after = AccountRlp { + balance: txn_value.into(), + ..AccountRlp::default() + }; + + let mut contract_code = HashMap::new(); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); + contract_code.insert(code_hash, code.to_vec()); + + let expected_state_smt_after = { + let mut smt = Smt::::default(); + + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender_first), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to_first), &to_account_after, &HashMap::new()); + set_account( + &mut smt, + H160(to), + &to_account_second_before, + &HashMap::new(), + ); + + smt + }; + + // Compute new receipt trie. + let mut receipts_trie = HashedPartialTrie::from(Node::Empty); + let receipt_0 = LegacyReceiptRlp { + status: true, + cum_gas_used: 21000u64.into(), + bloom: [0x00; 256].to_vec().into(), + logs: vec![], + }; + receipts_trie.insert( + Nibbles::from_str("0x80").unwrap(), + rlp::encode(&receipt_0).to_vec(), + )?; + + let mut transactions_trie: HashedPartialTrie = Node::Leaf { + nibbles: Nibbles::from_str("0x80").unwrap(), + value: txn.to_vec(), + } + .into(); + + let tries_after = TrieRoots { + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), + transactions_root: transactions_trie.hash(), + receipts_root: receipts_trie.clone().hash(), + }; + + let block_1_hash = + H256::from_str("0x0101010101010101010101010101010101010101010101010101010101010101")?; + let mut block_hashes = vec![H256::default(); 256]; + + let inputs_first = GenerationInputs { + signed_txn: Some(txn.to_vec()), + withdrawals: vec![], + tries: tries_before, + trie_roots_after: tries_after, + contract_code, + checkpoint_state_trie_root, + block_metadata: block_1_metadata.clone(), + txn_number_before: 0.into(), + gas_used_before: 0.into(), + gas_used_after: 21000u64.into(), + block_hashes: BlockHashes { + prev_hashes: block_hashes.clone(), + cur_hash: block_1_hash, + }, + }; + + // Preprocess all circuits. + let all_circuits = AllRecursiveCircuits::::new( + &all_stark, + &[16..17, 11..15, 13..18, 14..15, 10..11, 12..13, 17..20, 6..7], + &config, + ); + + let mut timing = TimingTree::new("prove root first", log::Level::Info); + let (root_proof_first, public_values_first) = + all_circuits.prove_root(&all_stark, &config, inputs_first, &mut timing, None)?; + + timing.filter(Duration::from_millis(100)).print(); + all_circuits.verify_root(root_proof_first.clone())?; + + // The gas used and transaction number are fed to the next transaction, so the + // two proofs can be correctly aggregated. + let gas_used_second = public_values_first.extra_block_data.gas_used_after; + + // Prove second transaction. In this second transaction, the code with logs is + // executed. + + let state_smt_before = expected_state_smt_after; + + let tries_before = TrieInputs { + state_smt: state_smt_before.serialize(), + transactions_trie: transactions_trie.clone(), + receipts_trie: receipts_trie.clone(), + }; + + // Prove a transaction which carries out two LOG opcodes. + let txn_gas_price = 10; + let txn_2 = hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b"); + + let mut contract_code = HashMap::new(); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); + contract_code.insert(code_hash, code.to_vec()); + + // Update the state and receipt tries after the transaction, so that we have the + // correct expected tries: Update accounts. + let beneficiary_account_after = AccountRlp { + nonce: 1.into(), + ..AccountRlp::default() + }; + + let sender_balance_after = sender_balance_after - gas_used * txn_gas_price; + let sender_account_after = AccountRlp { + balance: sender_balance_after, + nonce: 2.into(), + ..AccountRlp::default() + }; + let balance_after = to_account_after.balance; + let to_account_after = AccountRlp { + balance: balance_after, + ..AccountRlp::default() + }; + let to_account_second_after = AccountRlp { + balance: to_account_second_before.balance, + code_hash, + ..AccountRlp::default() + }; + + // Update the receipt trie. + let first_log = LogRlp { + address: to.into(), + topics: vec![], + data: Bytes::new(), + }; + + let second_log = LogRlp { + address: to.into(), + topics: vec![ + hex!("0000000000000000000000000000000000000000000000000000000000000062").into(), /* dec: 98 */ + hex!("0000000000000000000000000000000000000000000000000000000000000063").into(), /* dec: 99 */ + ], + data: hex!("a1b2c3d4e5").to_vec().into(), + }; + + let receipt = LegacyReceiptRlp { + status: true, + cum_gas_used: (22570 + 21000).into(), + bloom: hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000001000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000800000000000000008000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000800002000000000000000000000000000").to_vec().into(), + logs: vec![first_log, second_log], + }; + + let receipt_nibbles = Nibbles::from_str("0x01").unwrap(); // RLP(1) = 0x1 + receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec())?; + + // Update the state trie. + let expected_state_smt_after = { + let mut smt = Smt::::default(); + set_account( + &mut smt, + H160(beneficiary), + &beneficiary_account_after, + &HashMap::new(), + ); + set_account( + &mut smt, + H160(sender_first), + &sender_account_after, + &HashMap::new(), + ); + set_account(&mut smt, H160(to_first), &to_account_after, &HashMap::new()); + set_account( + &mut smt, + H160(to), + &to_account_second_after, + &HashMap::new(), + ); + + smt + }; + + transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), txn_2.to_vec())?; + + let block_1_state_root = H256::from_uint(&hashout2u(expected_state_smt_after.root)); + + let trie_roots_after = TrieRoots { + state_root: block_1_state_root, + transactions_root: transactions_trie.hash(), + receipts_root: receipts_trie.hash(), + }; + + let inputs = GenerationInputs { + signed_txn: Some(txn_2.to_vec()), + withdrawals: vec![], + tries: tries_before, + trie_roots_after: trie_roots_after.clone(), + contract_code, + checkpoint_state_trie_root, + block_metadata: block_1_metadata, + txn_number_before: 1.into(), + gas_used_before: gas_used_second, + gas_used_after: receipt.cum_gas_used, + block_hashes: BlockHashes { + prev_hashes: block_hashes.clone(), + cur_hash: block_1_hash, + }, + }; + + let mut timing = TimingTree::new("prove root second", log::Level::Info); + let (root_proof_second, public_values_second) = + all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, None.clone())?; + timing.filter(Duration::from_millis(100)).print(); + + all_circuits.verify_root(root_proof_second.clone())?; + + let (agg_proof, updated_agg_public_values) = all_circuits.prove_aggregation( + false, + &root_proof_first, + public_values_first, + false, + &root_proof_second, + public_values_second, + )?; + all_circuits.verify_aggregation(&agg_proof)?; + let (first_block_proof, _block_public_values) = + all_circuits.prove_block(None, &agg_proof, updated_agg_public_values)?; + all_circuits.verify_block(&first_block_proof)?; + + // Prove the next, empty block. + + let block_2_hash = + H256::from_str("0x0123456789101112131415161718192021222324252627282930313233343536")?; + block_hashes[255] = block_1_hash; + + let block_2_metadata = BlockMetadata { + block_beneficiary: Address::from(beneficiary), + block_timestamp: 0x03e8.into(), + block_number: 2.into(), + block_difficulty: 0x020000.into(), + block_gaslimit: 0x445566u32.into(), + block_chain_id: 1.into(), + block_base_fee: 0xa.into(), + ..Default::default() + }; + + let mut contract_code = HashMap::new(); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); + + let inputs = GenerationInputs { + signed_txn: None, + withdrawals: vec![], + tries: TrieInputs { + state_smt: expected_state_smt_after.serialize(), + transactions_trie: Node::Empty.into(), + receipts_trie: Node::Empty.into(), + }, + trie_roots_after: TrieRoots { + state_root: trie_roots_after.state_root, + transactions_root: HashedPartialTrie::from(Node::Empty).hash(), + receipts_root: HashedPartialTrie::from(Node::Empty).hash(), + }, + contract_code, + checkpoint_state_trie_root: block_1_state_root, // We use block 1 as new checkpoint. + block_metadata: block_2_metadata, + txn_number_before: 0.into(), + gas_used_before: 0.into(), + gas_used_after: 0.into(), + block_hashes: BlockHashes { + prev_hashes: block_hashes, + cur_hash: block_2_hash, + }, + }; + + let (root_proof, public_values) = + all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, None)?; + all_circuits.verify_root(root_proof.clone())?; + + // We can just duplicate the initial proof as the state didn't change. + let (agg_proof, updated_agg_public_values) = all_circuits.prove_aggregation( + false, + &root_proof, + public_values.clone(), + false, + &root_proof, + public_values, + )?; + all_circuits.verify_aggregation(&agg_proof)?; + + let (second_block_proof, _block_public_values) = all_circuits.prove_block( + None, // We don't specify a previous proof, considering block 1 as the new checkpoint. + &agg_proof, + updated_agg_public_values, + )?; + all_circuits.verify_block(&second_block_proof) +} + +/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 +#[test] +fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> { + // This test checks that inserting into the transaction and receipt + // `HashedPartialTrie`s works as expected. + let mut example_txn_trie = HashedPartialTrie::from(Node::Empty); + + // We consider two transactions, with one log each. + let transaction_0 = LegacyTransactionRlp { + nonce: 157823u64.into(), + gas_price: 1000000000u64.into(), + gas: 250000u64.into(), + to: AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), + value: 0u64.into(), + data: hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000bd9fe6f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") + .to_vec() + .into(), + v: 0x1c.into(), + r: hex!("d0eeac4841caf7a894dd79e6e633efc2380553cdf8b786d1aa0b8a8dee0266f4").into(), + s: hex!("740710eed9696c663510b7fb71a553112551121595a54ec6d2ec0afcec72a973").into(), + }; + + // Insert the first transaction into the transaction trie. + example_txn_trie.insert( + Nibbles::from_str("0x80").unwrap(), // RLP(0) = 0x80 + rlp::encode(&transaction_0).to_vec(), + )?; + + let transaction_1 = LegacyTransactionRlp { + nonce: 157824u64.into(), + gas_price: 1000000000u64.into(), + gas: 250000u64.into(), + to: AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), + value: 0u64.into(), + data: hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000004920eaa814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .to_vec() + .into(), + v: 0x1b.into(), + r: hex!("a3ff39967683fc684dc7b857d6f62723e78804a14b091a058ad95cc1b8a0281f").into(), + s: hex!("51b156e05f21f499fa1ae47ebf536b15a237208f1d4a62e33956b6b03cf47742").into(), + }; + + // Insert the second transaction into the transaction trie. + example_txn_trie.insert( + Nibbles::from_str("0x01").unwrap(), + rlp::encode(&transaction_1).to_vec(), + )?; + + // Receipts: + let mut example_receipt_trie = HashedPartialTrie::from(Node::Empty); + + let log_0 = LogRlp { + address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), + topics: vec![ + hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(), + hex!("000000000000000000000000000000000000000000000000000000000000002a").into(), + hex!("0000000000000000000000000000000000000000000000000000000000bd9fe6").into(), + ], + data: hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") + .to_vec() + .into(), + }; + + let receipt_0 = LegacyReceiptRlp { + status: true, + cum_gas_used: 0x016e5bu64.into(), + bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000020000000000008000000000000000000000000").to_vec().into(), + logs: vec![log_0], + }; + + // Insert the first receipt into the receipt trie. + example_receipt_trie.insert( + Nibbles::from_str("0x80").unwrap(), // RLP(0) is 0x80 + rlp::encode(&receipt_0).to_vec(), + )?; + + let log_1 = LogRlp { + address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), + topics: vec![ + hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(), + hex!("0000000000000000000000000000000000000000000000000000000000000004").into(), + hex!("00000000000000000000000000000000000000000000000000000000004920ea").into(), + ], + data: hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") + .to_vec() + .into(), + }; + + let receipt_1 = LegacyReceiptRlp { + status: true, + cum_gas_used: 0x02dcb6u64.into(), + bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000").to_vec().into(), + logs: vec![log_1], + }; + + // Insert the second receipt into the receipt trie. + example_receipt_trie.insert( + Nibbles::from_str("0x01").unwrap(), + rlp::encode(&receipt_1).to_vec(), + )?; + + // Check that the trie hashes are correct. assert_eq!( - proof.public_values.trie_roots_after.state_root, - expected_state_trie_after.hash() + example_txn_trie.hash(), + hex!("3ab7120d12e1fc07303508542602beb7eecfe8f262b83fd71eefe7d6205242ce").into() ); assert_eq!( - proof.public_values.trie_roots_after.receipts_root, - receipts_trie.hash() + example_receipt_trie.hash(), + hex!("da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98").into() ); - verify_proof(&all_stark, proof, &config) + Ok(()) } -// TODO: fix -// Tests proving two transactions, one of which with logs, and aggregating them. -// #[test] -// #[ignore] // Too slow to run on CI. -// fn test_log_with_aggreg() -> anyhow::Result<()> { -// init_logger(); -// -// let code = [ -// 0x64, 0xA1, 0xB2, 0xC3, 0xD4, 0xE5, 0x60, 0x0, 0x52, // MSTORE(0x0, -// 0xA1B2C3D4E5) 0x60, 0x0, 0x60, 0x0, 0xA0, // LOG0(0x0, 0x0) -// 0x60, 99, 0x60, 98, 0x60, 5, 0x60, 27, 0xA2, // LOG2(27, 5, 98, 99) -// 0x00, -// ]; -// -// let code_gas = 3 + 3 + 3 // PUSHs and MSTORE -// + 3 + 3 + 375 // PUSHs and LOG0 -// + 3 + 3 + 3 + 3 + 375 + 375*2 + 8*5 // PUSHs and LOG2 -// + 3 // Memory expansion -// ; -// -// let gas_used = 21_000 + code_gas; -// -// let code_hash = hashout2u(hash_contract_bytecode(code.to_vec())); -// -// // First transaction. -// let all_stark = AllStark::::default(); -// let config = StarkConfig::standard_fast_config(); -// -// let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); -// let sender_first = hex!("af1276cbb260bb13deddb4209ae99ae6e497f446"); -// let to_first = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); -// let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552e89"); -// -// let beneficiary_state_key = keccak(beneficiary); -// let sender_state_key = keccak(sender_first); -// let to_hashed = keccak(to_first); -// let to_hashed_2 = keccak(to); -// -// let beneficiary_nibbles = -// Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap(); -// let sender_nibbles = -// Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); -// let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap(); -// let to_second_nibbles = -// Nibbles::from_bytes_be(to_hashed_2.as_bytes()).unwrap(); -// -// let beneficiary_account_before = AccountRlp { -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let sender_balance_before = 1000000000000000000u64.into(); -// let sender_account_before = AccountRlp { -// balance: sender_balance_before, -// ..AccountRlp::default() -// }; -// let to_account_before = AccountRlp { -// ..AccountRlp::default() -// }; -// let to_account_second_before = AccountRlp { -// code_hash, -// ..AccountRlp::default() -// }; -// -// // In the first transaction, the sender account sends `txn_value` to -// `to_account`. let gas_price = 10; -// let txn_value = 0xau64; -// let mut state_smt_before = Smt::::default(); -// set_account( -// &mut state_smt_before, -// H160(beneficiary), -// &beneficiary_account_before, -// &HashMap::new(), -// ); -// set_account( -// &mut state_smt_before, -// H160(sender_first), -// &sender_account_before, -// &HashMap::new(), -// ); -// set_account( -// &mut state_smt_before, -// H160(to_first), -// &to_account_before, -// &HashMap::new(), -// ); -// set_account( -// &mut state_smt_before, -// H160(to), -// &to_account_second_before, -// &HashMap::new(), -// ); -// let checkpoint_state_trie_root = -// H256::from_uint(&hashout2u(state_smt_before.root)); -// -// let tries_before = TrieInputs { -// state_smt: state_smt_before.serialize(), -// transactions_trie: Node::Empty.into(), -// receipts_trie: Node::Empty.into(), -// }; -// -// let txn = -// hex!("f85f800a82520894095e7baea6a6c7c4c2dfeb977efac326af552d870a8026a0122f370ed4023a6c253350c6bfb87d7d7eb2cd86447befee99e0a26b70baec20a07100ab1b3977f2b4571202b9f4b68850858caf5469222794600b5ce1cfb348ad" -// ); -// -// let block_1_metadata = BlockMetadata { -// block_beneficiary: Address::from(beneficiary), -// block_timestamp: 0x03e8.into(), -// block_number: 1.into(), -// block_difficulty: 0x020000.into(), -// block_gaslimit: 0x445566u32.into(), -// block_chain_id: 1.into(), -// block_base_fee: 0xa.into(), -// block_gas_used: (22570 + 21000).into(), -// block_bloom: [ -// 0.into(), -// 0.into(), -// U256::from_dec_str( -// -// "55213970774324510299479508399853534522527075462195808724319849722937344", -// ) -// .unwrap(), -// -// U256::from_dec_str("1361129467683753853853498429727072845824").unwrap(), -// 33554432.into(), -// U256::from_dec_str("9223372036854775808").unwrap(), -// U256::from_dec_str( -// -// "3618502788666131106986593281521497120414687020801267626233049500247285563392" -// , ) -// .unwrap(), -// -// U256::from_dec_str("2722259584404615024560450425766186844160").unwrap(), -// ], -// block_random: Default::default(), -// }; -// -// let beneficiary_account_after = AccountRlp { -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// -// let sender_balance_after = sender_balance_before - gas_price * 21000 - -// txn_value; let sender_account_after = AccountRlp { -// balance: sender_balance_after, -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// let to_account_after = AccountRlp { -// balance: txn_value.into(), -// ..AccountRlp::default() -// }; -// -// let mut contract_code = HashMap::new(); -// contract_code.insert(hashout2u(hash_contract_bytecode(vec![])), vec![]); -// contract_code.insert(code_hash, code.to_vec()); -// -// let mut expected_state_trie_after = Smt::::default(); -// set_account( -// &mut expected_state_trie_after, -// H160(beneficiary), -// &beneficiary_account_after, -// &HashMap::new(), -// ); -// set_account( -// &mut expected_state_trie_after, -// H160(sender_first), -// &sender_account_after, -// &HashMap::new(), -// ); -// set_account( -// &mut expected_state_trie_after, -// H160(to_first), -// &to_account_after, -// &HashMap::new(), -// ); -// set_account( -// &mut expected_state_trie_after, -// H160(to), -// &to_account_second_before, -// &HashMap::new(), -// ); -// -// // Compute new receipt trie. -// let mut receipts_trie = HashedPartialTrie::from(Node::Empty); -// let receipt_0 = LegacyReceiptRlp { -// status: true, -// cum_gas_used: 21000u64.into(), -// bloom: [0x00; 256].to_vec().into(), -// logs: vec![], -// }; -// receipts_trie.insert( -// Nibbles::from_str("0x80").unwrap(), -// rlp::encode(&receipt_0).to_vec(), -// ); -// -// let mut transactions_trie: HashedPartialTrie = Node::Leaf { -// nibbles: Nibbles::from_str("0x80").unwrap(), -// value: txn.to_vec(), -// } -// .into(); -// -// let tries_after = TrieRoots { -// state_root: -// H256::from_uint(&hashout2u(expected_state_smt_after.root)), -// transactions_root: transactions_trie.hash(), -// receipts_root: receipts_trie.clone().hash(), -// }; -// -// let block_1_hash = -// H256::from_str(" -// 0x0101010101010101010101010101010101010101010101010101010101010101")?; -// let mut block_hashes = vec![H256::default(); 256]; -// -// let inputs_first = GenerationInputs { -// signed_txn: Some(txn.to_vec()), -// withdrawals: vec![], -// tries: tries_before, -// trie_roots_after: tries_after, -// contract_code, -// checkpoint_state_trie_root, -// block_metadata: block_1_metadata.clone(), -// txn_number_before: 0.into(), -// gas_used_before: 0.into(), -// gas_used_after: 21000u64.into(), -// block_hashes: BlockHashes { -// prev_hashes: block_hashes.clone(), -// cur_hash: block_1_hash, -// }, -// }; -// -// // Preprocess all circuits. -// let all_circuits = AllRecursiveCircuits::::new( -// &all_stark, -// &[16..17, 12..15, 14..18, 14..15, 9..10, 12..13, 17..20], -// &config, -// ); -// -// let mut timing = TimingTree::new("prove root first", log::Level::Info); -// let (root_proof_first, public_values_first) = -// all_circuits.prove_root(&all_stark, &config, inputs_first, &mut -// timing, None)?; -// -// timing.filter(Duration::from_millis(100)).print(); -// all_circuits.verify_root(root_proof_first.clone())?; -// -// // The gas used and transaction number are fed to the next transaction, -// so the two proofs can be correctly aggregated. let gas_used_second = -// public_values_first.extra_block_data.gas_used_after; -// -// // Prove second transaction. In this second transaction, the code with -// logs is executed. -// -// let state_trie_before = expected_state_trie_after; -// -// let tries_before = TrieInputs { -// state_smt: state_trie_before.serialize(), -// transactions_trie: transactions_trie.clone(), -// receipts_trie: receipts_trie.clone(), -// }; -// -// // Prove a transaction which carries out two LOG opcodes. -// let txn_gas_price = 10; -// let txn_2 = -// hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b" -// ); -// -// let mut contract_code = HashMap::new(); -// contract_code.insert(keccak(vec![]), vec![]); -// contract_code.insert(code_hash, code.to_vec()); -// -// // Update the state and receipt tries after the transaction, so that we -// have the correct expected tries: // Update accounts. -// let beneficiary_account_after = AccountRlp { -// nonce: 1.into(), -// ..AccountRlp::default() -// }; -// -// let sender_balance_after = sender_balance_after - gas_used * -// txn_gas_price; let sender_account_after = AccountRlp { -// balance: sender_balance_after, -// nonce: 2.into(), -// ..AccountRlp::default() -// }; -// let balance_after = to_account_after.balance; -// let to_account_after = AccountRlp { -// balance: balance_after, -// ..AccountRlp::default() -// }; -// let to_account_second_after = AccountRlp { -// balance: to_account_second_before.balance, -// code_hash, -// ..AccountRlp::default() -// }; -// -// // Update the receipt trie. -// let first_log = LogRlp { -// address: to.into(), -// topics: vec![], -// data: Bytes::new(), -// }; -// -// let second_log = LogRlp { -// address: to.into(), -// topics: vec![ -// -// hex!("0000000000000000000000000000000000000000000000000000000000000062"). -// into(), // dec: 98 -// hex!("0000000000000000000000000000000000000000000000000000000000000063"). -// into(), // dec: 99 ], -// data: hex!("a1b2c3d4e5").to_vec().into(), -// }; -// -// let receipt = LegacyReceiptRlp { -// status: true, -// cum_gas_used: (22570 + 21000).into(), -// bloom: -// hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000001000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000800000000000000008000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000800002000000000000000000000000000" -// ).to_vec().into(), logs: vec![first_log, second_log], -// }; -// -// let receipt_nibbles = Nibbles::from_str("0x01").unwrap(); // RLP(1) = 0x1 -// -// receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec()); -// -// // Update the state trie. -// let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); -// expected_state_trie_after.insert( -// beneficiary_nibbles, -// rlp::encode(&beneficiary_account_after).to_vec(), -// ); -// expected_state_trie_after.insert(sender_nibbles, -// rlp::encode(&sender_account_after).to_vec()); expected_state_trie_after. -// insert(to_nibbles, rlp::encode(&to_account_after).to_vec()); -// expected_state_trie_after.insert( -// to_second_nibbles, -// rlp::encode(&to_account_second_after).to_vec(), -// ); -// -// transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), -// txn_2.to_vec()); -// -// let block_1_state_root = expected_state_trie_after.hash(); -// -// let trie_roots_after = TrieRoots { -// state_root: block_1_state_root, -// transactions_root: transactions_trie.hash(), -// receipts_root: receipts_trie.hash(), -// }; -// -// let inputs = GenerationInputs { -// signed_txn: Some(txn_2.to_vec()), -// withdrawals: vec![], -// tries: tries_before, -// trie_roots_after: trie_roots_after.clone(), -// contract_code, -// checkpoint_state_trie_root, -// block_metadata: block_1_metadata, -// txn_number_before: 1.into(), -// gas_used_before: gas_used_second, -// gas_used_after: receipt.cum_gas_used, -// block_hashes: BlockHashes { -// prev_hashes: block_hashes.clone(), -// cur_hash: block_1_hash, -// }, -// }; -// -// let mut timing = TimingTree::new("prove root second", log::Level::Info); -// let (root_proof_second, public_values_second) = -// all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, -// None.clone())?; timing.filter(Duration::from_millis(100)).print(); -// -// all_circuits.verify_root(root_proof_second.clone())?; -// -// let (agg_proof, updated_agg_public_values) = -// all_circuits.prove_aggregation( false, -// &root_proof_first, -// public_values_first, -// false, -// &root_proof_second, -// public_values_second, -// )?; -// all_circuits.verify_aggregation(&agg_proof)?; -// let (first_block_proof, _block_public_values) = -// all_circuits.prove_block(None, &agg_proof, -// updated_agg_public_values)?; all_circuits.verify_block(& -// first_block_proof)?; -// -// // Prove the next, empty block. -// -// let block_2_hash = -// H256::from_str(" -// 0x0123456789101112131415161718192021222324252627282930313233343536")?; -// block_hashes[255] = block_1_hash; -// -// let block_2_metadata = BlockMetadata { -// block_beneficiary: Address::from(beneficiary), -// block_timestamp: 0x03e8.into(), -// block_number: 2.into(), -// block_difficulty: 0x020000.into(), -// block_gaslimit: 0x445566u32.into(), -// block_chain_id: 1.into(), -// block_base_fee: 0xa.into(), -// ..Default::default() -// }; -// -// let mut contract_code = HashMap::new(); -// contract_code.insert(keccak(vec![]), vec![]); -// -// let inputs = GenerationInputs { -// signed_txn: None, -// withdrawals: vec![], -// tries: TrieInputs { -// state_trie: expected_state_trie_after, -// transactions_trie: Node::Empty.into(), -// receipts_trie: Node::Empty.into(), -// storage_tries: vec![], -// }, -// trie_roots_after: TrieRoots { -// state_root: trie_roots_after.state_root, -// transactions_root: HashedPartialTrie::from(Node::Empty).hash(), -// receipts_root: HashedPartialTrie::from(Node::Empty).hash(), -// }, -// contract_code, -// checkpoint_state_trie_root: block_1_state_root, // We use block 1 as -// new checkpoint. block_metadata: block_2_metadata, -// txn_number_before: 0.into(), -// gas_used_before: 0.into(), -// gas_used_after: 0.into(), -// block_hashes: BlockHashes { -// prev_hashes: block_hashes, -// cur_hash: block_2_hash, -// }, -// }; -// -// let (root_proof, public_values) = -// all_circuits.prove_root(&all_stark, &config, inputs, &mut timing, -// None)?; all_circuits.verify_root(root_proof.clone())?; -// -// // We can just duplicate the initial proof as the state didn't change. -// let (agg_proof, updated_agg_public_values) = -// all_circuits.prove_aggregation( false, -// &root_proof, -// public_values.clone(), -// false, -// &root_proof, -// public_values, -// )?; -// all_circuits.verify_aggregation(&agg_proof)?; -// -// let (second_block_proof, _block_public_values) = -// all_circuits.prove_block( None, // We don't specify a previous proof, -// considering block 1 as the new checkpoint. &agg_proof, -// updated_agg_public_values, -// )?; -// all_circuits.verify_block(&second_block_proof) -// } -// -// /// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000 -// #[test] -// fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> { -// // This test checks that inserting into the transaction and receipt -// `HashedPartialTrie`s works as expected. let mut example_txn_trie = -// HashedPartialTrie::from(Node::Empty); -// -// // We consider two transactions, with one log each. -// let transaction_0 = LegacyTransactionRlp { -// nonce: 157823u64.into(), -// gas_price: 1000000000u64.into(), -// gas: 250000u64.into(), -// to: -// AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), -// value: 0u64.into(), -// data: -// hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000bd9fe6f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58" -// ) .to_vec() -// .into(), -// v: 0x1c.into(), -// r: -// hex!("d0eeac4841caf7a894dd79e6e633efc2380553cdf8b786d1aa0b8a8dee0266f4"). -// into(), s: -// hex!("740710eed9696c663510b7fb71a553112551121595a54ec6d2ec0afcec72a973"). -// into(), }; -// -// // Insert the first transaction into the transaction trie. -// example_txn_trie.insert( -// Nibbles::from_str("0x80").unwrap(), // RLP(0) = 0x80 -// rlp::encode(&transaction_0).to_vec(), -// ); -// -// let transaction_1 = LegacyTransactionRlp { -// nonce: 157824u64.into(), -// gas_price: 1000000000u64.into(), -// gas: 250000u64.into(), -// to: -// AddressOption(Some(hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into())), -// value: 0u64.into(), -// data: -// hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000004920eaa814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243" -// ) .to_vec() -// .into(), -// v: 0x1b.into(), -// r: -// hex!("a3ff39967683fc684dc7b857d6f62723e78804a14b091a058ad95cc1b8a0281f"). -// into(), s: -// hex!("51b156e05f21f499fa1ae47ebf536b15a237208f1d4a62e33956b6b03cf47742"). -// into(), }; -// -// // Insert the second transaction into the transaction trie. -// example_txn_trie.insert( -// Nibbles::from_str("0x01").unwrap(), -// rlp::encode(&transaction_1).to_vec(), -// ); -// -// // Receipts: -// let mut example_receipt_trie = HashedPartialTrie::from(Node::Empty); -// -// let log_0 = LogRlp { -// address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), -// topics: vec![ -// -// hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d"). -// into(), -// hex!("000000000000000000000000000000000000000000000000000000000000002a"). -// into(), -// hex!("0000000000000000000000000000000000000000000000000000000000bd9fe6"). -// into(), ], -// data: -// hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58") -// .to_vec() -// .into(), -// }; -// -// let receipt_0 = LegacyReceiptRlp { -// status: true, -// cum_gas_used: 0x016e5bu64.into(), -// bloom: -// hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000020000000000008000000000000000000000000" -// ).to_vec().into(), logs: vec![log_0], -// }; -// -// // Insert the first receipt into the receipt trie. -// example_receipt_trie.insert( -// Nibbles::from_str("0x80").unwrap(), // RLP(0) is 0x80 -// rlp::encode(&receipt_0).to_vec(), -// ); -// -// let log_1 = LogRlp { -// address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(), -// topics: vec![ -// -// hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d"). -// into(), -// hex!("0000000000000000000000000000000000000000000000000000000000000004"). -// into(), -// hex!("00000000000000000000000000000000000000000000000000000000004920ea"). -// into(), ], -// data: -// hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243") -// .to_vec() -// .into(), -// }; -// -// let receipt_1 = LegacyReceiptRlp { -// status: true, -// cum_gas_used: 0x02dcb6u64.into(), -// bloom: -// hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000" -// ).to_vec().into(), logs: vec![log_1], -// }; -// -// // Insert the second receipt into the receipt trie. -// example_receipt_trie.insert( -// Nibbles::from_str("0x01").unwrap(), -// rlp::encode(&receipt_1).to_vec(), -// ); -// -// // Check that the trie hashes are correct. -// assert_eq!( -// example_txn_trie.hash(), -// hex!(" -// 3ab7120d12e1fc07303508542602beb7eecfe8f262b83fd71eefe7d6205242ce").into() -// ); -// -// assert_eq!( -// example_receipt_trie.hash(), -// hex!(" -// da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98").into() -// ); -// -// Ok(()) -// } - fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } From a7f6784b2137f7fcb52aae9695e588363d3b136c Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Tue, 9 Apr 2024 14:03:45 +0900 Subject: [PATCH 09/19] Update `proof_gen` (partially) to accomodate for type2 changes (#153) --- proof_gen/Cargo.toml | 4 ++-- proof_gen/src/constants.rs | 2 ++ proof_gen/src/proof_gen.rs | 9 ++++++--- proof_gen/src/proof_types.rs | 4 ++-- proof_gen/src/prover_state.rs | 4 ++++ 5 files changed, 16 insertions(+), 7 deletions(-) diff --git a/proof_gen/Cargo.toml b/proof_gen/Cargo.toml index 2b5376465..0785d41f4 100644 --- a/proof_gen/Cargo.toml +++ b/proof_gen/Cargo.toml @@ -17,5 +17,5 @@ plonky2 = { workspace = true } serde = { workspace = true } # Local dependencies -trace_decoder = "0.2.0" -evm_arithmetization = "0.1.2" # TODO: adapt with type2 and bring back paths +# trace_decoder = "0.2.0" # TODO: adapt with type2 and bring back paths +evm_arithmetization = { version = "0.1.3", path = "../evm_arithmetization" } diff --git a/proof_gen/src/constants.rs b/proof_gen/src/constants.rs index 808f9f2b7..fb26de4f2 100644 --- a/proof_gen/src/constants.rs +++ b/proof_gen/src/constants.rs @@ -16,3 +16,5 @@ pub(crate) const DEFAULT_KECCAK_SPONGE_RANGE: Range = 9..25; pub(crate) const DEFAULT_LOGIC_RANGE: Range = 12..28; /// Default range to be used for the `MemoryStark` table. pub(crate) const DEFAULT_MEMORY_RANGE: Range = 17..30; +/// Default range to be used for the `PoseidonStark` table. +pub(crate) const DEFAULT_POSEIDON_RANGE: Range = 4..25; diff --git a/proof_gen/src/proof_gen.rs b/proof_gen/src/proof_gen.rs index 58435d0dc..59f14a24d 100644 --- a/proof_gen/src/proof_gen.rs +++ b/proof_gen/src/proof_gen.rs @@ -3,15 +3,16 @@ use std::sync::{atomic::AtomicBool, Arc}; -use evm_arithmetization::{AllStark, StarkConfig}; +use evm_arithmetization::{AllStark, GenerationInputs, StarkConfig}; use plonky2::{ gates::noop::NoopGate, iop::witness::PartialWitness, plonk::{circuit_builder::CircuitBuilder, circuit_data::CircuitConfig}, util::timing::TimingTree, }; -use trace_decoder::types::TxnProofGenIR; +// TODO: bring back import from trace_decoder once SMT logic is implemented +// use trace_decoder::types::TxnProofGenIR; use crate::{ proof_types::{AggregatableProof, GeneratedAggProof, GeneratedBlockProof, GeneratedTxnProof}, prover_state::ProverState, @@ -44,7 +45,9 @@ impl From for ProofGenError { /// Generates a transaction proof from some IR data. pub fn generate_txn_proof( p_state: &ProverState, - gen_inputs: TxnProofGenIR, + // gen_inputs: TxnProofGenIR, // TODO: bring back import from trace_decoder once SMT logic is + // implemented + gen_inputs: GenerationInputs, abort_signal: Option>, ) -> ProofGenResult { let (intern, p_vals) = p_state diff --git a/proof_gen/src/proof_types.rs b/proof_gen/src/proof_types.rs index acac783a8..11a8290b2 100644 --- a/proof_gen/src/proof_types.rs +++ b/proof_gen/src/proof_types.rs @@ -3,8 +3,8 @@ use evm_arithmetization::proof::PublicValues; use serde::{Deserialize, Serialize}; -use trace_decoder::types::BlockHeight; +// use trace_decoder::types::BlockHeight; use crate::types::PlonkyProofIntern; /// A transaction proof along with its public values, for proper connection with @@ -35,7 +35,7 @@ pub struct GeneratedAggProof { #[derive(Clone, Debug, Deserialize, Serialize)] pub struct GeneratedBlockProof { /// Associated block height. - pub b_height: BlockHeight, + pub b_height: u64, // TODO: replace by BlockHeight once trace_decoder is updated /// Underlying plonky2 proof. pub intern: PlonkyProofIntern, } diff --git a/proof_gen/src/prover_state.rs b/proof_gen/src/prover_state.rs index efb3413f4..09d6599a1 100644 --- a/proof_gen/src/prover_state.rs +++ b/proof_gen/src/prover_state.rs @@ -29,6 +29,7 @@ pub struct ProverStateBuilder { pub(crate) keccak_sponge_circuit_size: Range, pub(crate) logic_circuit_size: Range, pub(crate) memory_circuit_size: Range, + pub(crate) poseidon_circuit_size: Range, } impl Default for ProverStateBuilder { @@ -48,6 +49,7 @@ impl Default for ProverStateBuilder { keccak_sponge_circuit_size: DEFAULT_KECCAK_SPONGE_RANGE, logic_circuit_size: DEFAULT_LOGIC_RANGE, memory_circuit_size: DEFAULT_MEMORY_RANGE, + poseidon_circuit_size: DEFAULT_POSEIDON_RANGE, } } } @@ -73,6 +75,7 @@ impl ProverStateBuilder { define_set_circuit_size_method!(keccak_sponge); define_set_circuit_size_method!(logic); define_set_circuit_size_method!(memory); + define_set_circuit_size_method!(poseidon); // TODO: Consider adding async version? /// Instantiate the prover state from the builder. Note that this is a very @@ -90,6 +93,7 @@ impl ProverStateBuilder { self.keccak_sponge_circuit_size, self.logic_circuit_size, self.memory_circuit_size, + self.poseidon_circuit_size, ], &StarkConfig::standard_fast_config(), ); From ffeef170edb9405f4fa7959952a23b00848e65cf Mon Sep 17 00:00:00 2001 From: BGluth Date: Wed, 1 May 2024 12:59:15 -0600 Subject: [PATCH 10/19] Added a feature flag to disable `jemalloc` (#204) * Added a feature flag to disable `jemalloc` - `trace_decoder` now needs to link against two versions of `evm_arithmetization`. - Because `#[global_allocator]` can only appear once per build, we need a way to prevent this from occuring twice. - It also seems to be best practice to only use `#[global_allocator]` in binaries, so maybe this is something that we should look at removing from this library. However, for the time being, this will allow us to do two links against this crate. * Requested changes for PR #204 --- evm_arithmetization/Cargo.toml | 1 + evm_arithmetization/src/lib.rs | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/evm_arithmetization/Cargo.toml b/evm_arithmetization/Cargo.toml index e4370cb86..374db3860 100644 --- a/evm_arithmetization/Cargo.toml +++ b/evm_arithmetization/Cargo.toml @@ -56,6 +56,7 @@ sha2 = "0.10.6" [features] default = ["parallel"] asmtools = ["hex"] +disable_jemalloc = [] parallel = [ "plonky2/parallel", "plonky2_maybe_rayon/parallel", diff --git a/evm_arithmetization/src/lib.rs b/evm_arithmetization/src/lib.rs index 282add0f4..60a888dd4 100644 --- a/evm_arithmetization/src/lib.rs +++ b/evm_arithmetization/src/lib.rs @@ -217,7 +217,12 @@ pub mod util; use jemallocator::Jemalloc; use mpt_trie::partial_trie::HashedPartialTrie; -#[cfg(not(target_env = "msvc"))] +// TODO: We are currently re-evaluating if jemalloc brings better performance +// overall, and we might switch back to the default allocator down the road. For +// the time being, it will be able to be disabled with a feature flag +// (`disable_jemalloc`) in order to allow users to use their own allocator if +// needed. +#[cfg(not(any(target_env = "msvc", disable_jemalloc)))] #[global_allocator] static GLOBAL: Jemalloc = Jemalloc; From 70f93b487b03850d1d3021c9172fd70d0ebe3dd4 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 2 May 2024 17:46:33 +0200 Subject: [PATCH 11/19] SMT KV store + SMT delete (#211) * SMT KV store + SMT delete * Fmt * Minor --- smt_trie/src/smt.rs | 32 ++++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/smt_trie/src/smt.rs b/smt_trie/src/smt.rs index acf77d995..bc22e6232 100644 --- a/smt_trie/src/smt.rs +++ b/smt_trie/src/smt.rs @@ -1,5 +1,7 @@ #![allow(clippy::needless_range_loop)] +use std::collections::HashMap; + use ethereum_types::U256; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::types::{Field, PrimeField64}; @@ -82,6 +84,7 @@ impl Node { #[derive(Debug, Clone, PartialEq, Eq, Default)] pub struct Smt { pub db: D, + pub kv_store: HashMap, pub root: HashOut, } @@ -128,11 +131,21 @@ impl Smt { let found_rem_key = Key(sibling.0[0..4].try_into().unwrap()); let found_val = limbs2f(found_val_a); let found_key = Key::join(acc_key, found_rem_key); - if found_key == key { - return found_val; + return if found_key == key { + assert_eq!( + found_val, + self.kv_store.get(&key).copied().unwrap_or_default() + ); + found_val } else { - return U256::zero(); - } + assert!(self + .kv_store + .get(&key) + .copied() + .unwrap_or_default() + .is_zero()); + U256::zero() + }; } else { let b = keys.get_bit(level as usize); r = Key(sibling.0[b as usize * 4..(b as usize + 1) * 4] @@ -149,6 +162,11 @@ impl Smt { /// If the value is 0 and the key is in the SMT, the key is removed from the /// SMT. Reference implementation in https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt.js. pub fn set(&mut self, key: Key, value: U256) { + if value.is_zero() { + self.kv_store.remove(&key); + } else { + self.kv_store.insert(key, value); + } let mut r = Key(self.root.elements); let mut new_root = self.root; let keys = key.split(); @@ -325,6 +343,12 @@ impl Smt { self.root = new_root; } + /// Delete the key in the SMT. + pub fn delete(&mut self, key: Key) { + self.kv_store.remove(&key); + self.set(key, U256::zero()); + } + /// Serialize the SMT into a vector of U256. /// Starts with a [0, 0] for convenience, that way `ptr=0` is a canonical /// empty node. Therefore the root of the SMT is at `ptr=2`. From 8da835472c097e51b0c4a99e32c970f5bbb80a2f Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Fri, 3 May 2024 10:20:48 +0200 Subject: [PATCH 12/19] SMT hash node (#212) * SMT hash node * Comment * Fmt --- smt_trie/src/smt.rs | 56 ++++++++++++++++++++++- smt_trie/src/smt_test.rs | 96 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 150 insertions(+), 2 deletions(-) diff --git a/smt_trie/src/smt.rs b/smt_trie/src/smt.rs index bc22e6232..1f060e0b6 100644 --- a/smt_trie/src/smt.rs +++ b/smt_trie/src/smt.rs @@ -349,6 +349,57 @@ impl Smt { self.set(key, U256::zero()); } + /// Set the key to the hash in the SMT. + /// Needs to be called before any call to `set` to avoid issues. + pub fn set_hash(&mut self, key: Bits, hash: HashOut) { + let mut r = Key(self.root.elements); + let mut new_root = self.root; + let mut level = 0isize; + let mut siblings = vec![]; + + for _ in 0..key.count { + let sibling = self.db.get_node(&r).unwrap_or(&Node([F::ZERO; 12])); + siblings.push(*sibling); + if sibling.is_one_siblings() { + panic!("Hit a leaf node."); + } else { + let b = key.get_bit(level as usize); + r = Key(sibling.0[b as usize * 4..(b as usize + 1) * 4] + .try_into() + .unwrap()); + level += 1; + } + } + level -= 1; + assert_eq!( + r, + Key([F::ZERO; 4]), + "Tried to insert a hash node in a non-empty node." + ); + + if level >= 0 { + let b = key.get_bit(level as usize) as usize * 4; + siblings[level as usize].0[b..b + 4].copy_from_slice(&hash.elements); + } else { + new_root = hash; + } + siblings.truncate((level + 1) as usize); + + while level >= 0 { + new_root = F::poseidon(siblings[level as usize].0)[0..4] + .try_into() + .unwrap(); + self.db + .set_node(Key(new_root.elements), siblings[level as usize]); + level -= 1; + if level >= 0 { + let b = key.get_bit(level as usize) as usize * 4; + siblings[level as usize].0[b..b + 4].copy_from_slice(&new_root.elements); + } + } + self.root = new_root; + } + /// Serialize the SMT into a vector of U256. /// Starts with a [0, 0] for convenience, that way `ptr=0` is a canonical /// empty node. Therefore the root of the SMT is at `ptr=2`. @@ -405,7 +456,10 @@ fn serialize(smt: &Smt, key: Key, v: &mut Vec) -> usize { index } } else { - todo!("Add a hash node here."); + let index = v.len(); + v.push(HASH_TYPE.into()); + v.push(key2u(key)); + index } } diff --git a/smt_trie/src/smt_test.rs b/smt_trie/src/smt_test.rs index fca18375f..ae5373cde 100644 --- a/smt_trie/src/smt_test.rs +++ b/smt_trie/src/smt_test.rs @@ -1,7 +1,11 @@ use ethereum_types::U256; use plonky2::field::types::{Field, Sample}; -use rand::{thread_rng, Rng}; +use plonky2::hash::hash_types::HashOut; +use rand::seq::SliceRandom; +use rand::{random, thread_rng, Rng}; +use crate::bits::Bits; +use crate::db::Db; use crate::{ db::MemoryDb, smt::{hash_serialize, Key, Smt, F}, @@ -272,3 +276,93 @@ fn test_no_write_0() { let ser = smt.serialize(); assert_eq!(hash_serialize(&ser), smt.root); } + +#[test] +fn test_set_hash_first_level() { + let mut smt = Smt::::default(); + + let kvs = (0..128) + .map(|_| { + let k = Key(F::rand_array()); + let v = U256(random()); + smt.set(k, v); + (k, v) + }) + .collect::>(); + for &(k, v) in &kvs { + smt.set(k, v); + } + + let first_level = smt.db.get_node(&Key(smt.root.elements)).unwrap(); + let mut hash_smt = Smt::::default(); + let zero = Bits { + count: 1, + packed: U256::zero(), + }; + let one = Bits { + count: 1, + packed: U256::one(), + }; + hash_smt.set_hash( + zero, + HashOut { + elements: first_level.0[0..4].try_into().unwrap(), + }, + ); + hash_smt.set_hash( + one, + HashOut { + elements: first_level.0[4..8].try_into().unwrap(), + }, + ); + + assert_eq!(smt.root, hash_smt.root); + + let ser = hash_smt.serialize(); + assert_eq!(hash_serialize(&ser), hash_smt.root); +} + +#[test] +fn test_set_hash_order() { + let mut smt = Smt::::default(); + + let level = 4; + + let mut khs = (1..1 << level) + .map(|i| { + let k = Bits { + count: level, + packed: i.into(), + }; + let hash = HashOut { + elements: F::rand_array(), + }; + (k, hash) + }) + .collect::>(); + for &(k, v) in &khs { + smt.set_hash(k, v); + } + let key = loop { + // Forgive my laziness + let key = Key(F::rand_array()); + let keys = key.split(); + if (0..level).all(|i| !keys.get_bit(i)) { + break key; + } + }; + let val = U256(random()); + smt.set(key, val); + + let mut second_smt = Smt::::default(); + khs.shuffle(&mut thread_rng()); + for (k, v) in khs { + second_smt.set_hash(k, v); + } + second_smt.set(key, val); + + assert_eq!(smt.root, second_smt.root); + + let ser = second_smt.serialize(); + assert_eq!(hash_serialize(&ser), second_smt.root); +} From e76f6d4152afc824f38ab36c842973d7071ce751 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Sun, 5 May 2024 00:21:14 +0900 Subject: [PATCH 13/19] [CI] Update labeler job for feat/type2 branch (#222) --- .github/labeler.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/labeler.yml b/.github/labeler.yml index 4c1141a73..fbe45dc71 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -3,6 +3,11 @@ - changed-files: - any-glob-to-any-file: mpt_trie/** +# Add 'crate: smt_trie' label to any changes within 'smt_trie' folder. +'crate: smt_trie': +- changed-files: + - any-glob-to-any-file: smt_trie/** + # Add 'crate: evm_arithmetization' label to any changes within 'evm_arithmetization' folder. 'crate: evm_arithmetization': - changed-files: From 7043125d45f1f32daae850c65ecb0025f109b0c9 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 6 May 2024 10:29:29 +0200 Subject: [PATCH 14/19] SMT pruning (#215) * SMT pruning * Comments * Minor * Minor * Clippy --- smt_trie/src/smt.rs | 64 ++++++++++++++++++++++++++++++---------- smt_trie/src/smt_test.rs | 41 +++++++++++++++++++++++++ 2 files changed, 90 insertions(+), 15 deletions(-) diff --git a/smt_trie/src/smt.rs b/smt_trie/src/smt.rs index 1f060e0b6..356c8c3e9 100644 --- a/smt_trie/src/smt.rs +++ b/smt_trie/src/smt.rs @@ -1,6 +1,7 @@ #![allow(clippy::needless_range_loop)] -use std::collections::HashMap; +use std::borrow::Borrow; +use std::collections::{HashMap, HashSet}; use ethereum_types::U256; use plonky2::field::goldilocks_field::GoldilocksField; @@ -14,9 +15,9 @@ use crate::utils::{ f2limbs, get_unique_sibling, hash0, hash_key_hash, hashout2u, key2u, limbs2f, u2h, u2k, }; -const HASH_TYPE: u8 = 0; -const INTERNAL_TYPE: u8 = 1; -const LEAF_TYPE: u8 = 2; +pub(crate) const HASH_TYPE: u8 = 0; +pub(crate) const INTERNAL_TYPE: u8 = 1; +pub(crate) const LEAF_TYPE: u8 = 2; pub type F = GoldilocksField; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -400,32 +401,66 @@ impl Smt { self.root = new_root; } - /// Serialize the SMT into a vector of U256. + /// Serialize and prune the SMT into a vector of U256. /// Starts with a [0, 0] for convenience, that way `ptr=0` is a canonical /// empty node. Therefore the root of the SMT is at `ptr=2`. + /// `keys` is a list of keys whose prefixes will not be hashed-out in the + /// serialization. /// Serialization rules: /// ```pseudocode /// serialize( HashNode { h } ) = [HASH_TYPE, h] /// serialize( InternalNode { left, right } ) = [INTERNAL_TYPE, serialize(left).ptr, serialize(right).ptr] /// serialize( LeafNode { rem_key, value } ) = [LEAF_TYPE, rem_key, value] /// ``` - pub fn serialize(&self) -> Vec { + pub fn serialize_and_prune, I: IntoIterator>( + &self, + keys: I, + ) -> Vec { let mut v = vec![U256::zero(); 2]; // For empty hash node. let key = Key(self.root.elements); - serialize(self, key, &mut v); + + let mut keys_to_include = HashSet::new(); + for key in keys.into_iter() { + let mut bits = key.borrow().split(); + loop { + keys_to_include.insert(bits); + if bits.is_empty() { + break; + } + bits.pop_next_bit(); + } + } + + serialize(self, key, &mut v, Bits::empty(), &keys_to_include); if v.len() == 2 { v.extend([U256::zero(); 2]); } v } + + pub fn serialize(&self) -> Vec { + // Include all keys. + self.serialize_and_prune(self.kv_store.keys()) + } } -fn serialize(smt: &Smt, key: Key, v: &mut Vec) -> usize { +fn serialize( + smt: &Smt, + key: Key, + v: &mut Vec, + cur_bits: Bits, + keys_to_include: &HashSet, +) -> usize { if key.0.iter().all(F::is_zero) { return 0; // `ptr=0` is an empty node. } - if let Some(node) = smt.db.get_node(&key) { + if !keys_to_include.contains(&cur_bits) || smt.db.get_node(&key).is_none() { + let index = v.len(); + v.push(HASH_TYPE.into()); + v.push(key2u(key)); + index + } else if let Some(node) = smt.db.get_node(&key) { if node.0.iter().all(F::is_zero) { panic!("wtf?"); } @@ -449,17 +484,16 @@ fn serialize(smt: &Smt, key: Key, v: &mut Vec) -> usize { v.push(INTERNAL_TYPE.into()); v.push(U256::zero()); v.push(U256::zero()); - let i_left = serialize(smt, key_left, v).into(); + let i_left = + serialize(smt, key_left, v, cur_bits.add_bit(false), keys_to_include).into(); v[index + 1] = i_left; - let i_right = serialize(smt, key_right, v).into(); + let i_right = + serialize(smt, key_right, v, cur_bits.add_bit(true), keys_to_include).into(); v[index + 2] = i_right; index } } else { - let index = v.len(); - v.push(HASH_TYPE.into()); - v.push(key2u(key)); - index + unreachable!() } } diff --git a/smt_trie/src/smt_test.rs b/smt_trie/src/smt_test.rs index ae5373cde..c086e17dc 100644 --- a/smt_trie/src/smt_test.rs +++ b/smt_trie/src/smt_test.rs @@ -6,6 +6,8 @@ use rand::{random, thread_rng, Rng}; use crate::bits::Bits; use crate::db::Db; +use crate::smt::HASH_TYPE; +use crate::utils::hashout2u; use crate::{ db::MemoryDb, smt::{hash_serialize, Key, Smt, F}, @@ -366,3 +368,42 @@ fn test_set_hash_order() { let ser = second_smt.serialize(); assert_eq!(hash_serialize(&ser), second_smt.root); } + +#[test] +fn test_serialize_and_prune() { + let mut smt = Smt::::default(); + + for _ in 0..128 { + let k = Key(F::rand_array()); + let v = U256(random()); + smt.set(k, v); + } + + let ser = smt.serialize(); + assert_eq!(hash_serialize(&ser), smt.root); + + let subset = { + let r: u128 = random(); + smt.kv_store + .keys() + .enumerate() + .filter_map(|(i, k)| if r & (1 << i) != 0 { Some(*k) } else { None }) + .collect::>() + }; + + let pruned_ser = smt.serialize_and_prune(subset); + assert_eq!(hash_serialize(&pruned_ser), smt.root); + assert!(pruned_ser.len() <= ser.len()); + + let trivial_ser = smt.serialize_and_prune::>(vec![]); + assert_eq!( + trivial_ser, + vec![ + U256::zero(), + U256::zero(), + HASH_TYPE.into(), + hashout2u(smt.root) + ] + ); + assert_eq!(hash_serialize(&trivial_ser), smt.root); +} From fe187a4cafb4621bd8893e9ebdce405093b191a8 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Wed, 22 May 2024 07:41:02 -0400 Subject: [PATCH 15/19] fix(type2): specify conditional attribute properly (#243) * fix: Specify conditional attribute properly --- .github/CODEOWNERS | 4 +- .github/workflows/audit.yml | 13 ++ CHANGELOG.md | 1 + evm_arithmetization/src/cpu/cpu_stark.rs | 4 +- .../src/cpu/kernel/asm/core/access_lists.asm | 44 +++-- .../src/cpu/kernel/asm/core/call_gas.asm | 8 +- .../src/cpu/kernel/asm/core/process_txn.asm | 2 - .../cpu/kernel/asm/core/touched_addresses.asm | 1 - .../asm/curve/secp256k1/inverse_scalar.asm | 1 - .../cpu/kernel/asm/curve/secp256k1/moddiv.asm | 1 - .../cpu/kernel/asm/hash/sha2/compression.asm | 9 +- .../kernel/asm/hash/sha2/message_schedule.asm | 75 ++++---- .../src/cpu/kernel/asm/hash/sha2/ops.asm | 120 +++++-------- .../cpu/kernel/asm/hash/sha2/temp_words.asm | 24 ++- .../cpu/kernel/asm/hash/sha2/write_length.asm | 24 ++- .../kernel/asm/mpt/delete/delete_branch.asm | 1 - .../src/cpu/kernel/asm/util/assertions.asm | 10 +- .../src/cpu/kernel/interpreter.rs | 57 +++--- .../src/cpu/kernel/tests/ecc/ecrecover.rs | 20 ++- .../src/generation/prover_input.rs | 11 -- evm_arithmetization/src/generation/state.rs | 23 ++- evm_arithmetization/src/lib.rs | 2 +- evm_arithmetization/src/memory/mod.rs | 3 - evm_arithmetization/src/witness/operation.rs | 22 ++- evm_arithmetization/src/witness/transition.rs | 16 +- mpt_trie/src/debug_tools/diff.rs | 23 +-- mpt_trie/src/debug_tools/query.rs | 23 +-- mpt_trie/src/nibbles.rs | 83 +++++++++ mpt_trie/src/partial_trie.rs | 19 ++ mpt_trie/src/trie_ops.rs | 32 +++- mpt_trie/src/trie_subsets.rs | 33 +++- .../compact/compact_prestate_processing.rs | 168 ++++++++---------- .../src/compact/compact_to_partial_trie.rs | 44 ++--- .../src/compact/complex_test_payloads.rs | 3 +- trace_decoder/src/decoding.rs | 92 +++++----- trace_decoder/src/lib.rs | 6 - trace_decoder/src/processed_block_trace.rs | 61 ++++--- trace_decoder/src/types.rs | 34 ++-- trace_decoder/src/utils.rs | 19 +- 39 files changed, 617 insertions(+), 519 deletions(-) create mode 100644 .github/workflows/audit.yml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9c7403044..b0c2d81e6 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ -* @muursh @Nashtare -/evm_arithmetization/ @wborgeaud @muursh @Nashtare +* @muursh @Nashtare @cpubot +/evm_arithmetization/ @wborgeaud @muursh @Nashtare @cpubot diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml new file mode 100644 index 000000000..d8866a675 --- /dev/null +++ b/.github/workflows/audit.yml @@ -0,0 +1,13 @@ +name: Security audit +on: + push: + paths: + - '**/Cargo.toml' +jobs: + security_audit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: rustsec/audit-check@v1.4.1 + with: + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 2fbf89674..ca08f80a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Changed +- Add a few QoL useability functions to the interface ([#169](https://github.com/0xPolygonZero/zk_evm/pull/169)) ## [0.3.1] - 2024-04-22 diff --git a/evm_arithmetization/src/cpu/cpu_stark.rs b/evm_arithmetization/src/cpu/cpu_stark.rs index 55c453567..ad82a2c66 100644 --- a/evm_arithmetization/src/cpu/cpu_stark.rs +++ b/evm_arithmetization/src/cpu/cpu_stark.rs @@ -17,7 +17,7 @@ use starky::stark::Stark; use super::columns::CpuColumnsView; use super::halt; use super::kernel::constants::context_metadata::ContextMetadata; -use super::membus::NUM_GP_CHANNELS; +use super::membus::{NUM_CHANNELS, NUM_GP_CHANNELS}; use crate::all_stark::{EvmStarkFrame, Table}; use crate::cpu::columns::{COL_MAP, NUM_CPU_COLUMNS}; use crate::cpu::{ @@ -25,7 +25,7 @@ use crate::cpu::{ modfp254, pc, push0, shift, simple_logic, stack, syscalls_exceptions, }; use crate::memory::segments::Segment; -use crate::memory::{NUM_CHANNELS, VALUE_LIMBS}; +use crate::memory::VALUE_LIMBS; /// Creates the vector of `Columns` corresponding to the General Purpose /// channels when calling the Keccak sponge: the CPU reads the output of the diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/access_lists.asm b/evm_arithmetization/src/cpu/kernel/asm/core/access_lists.asm index 5d0512a12..f89938326 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/access_lists.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/access_lists.asm @@ -63,17 +63,22 @@ global init_access_lists: POP %endmacro -// Multiply the ptr at the top of the stack by 2 -// and abort if 2*ptr - @SEGMENT_ACCESSED_ADDRESSES >= @GLOBAL_METADATA_ACCESSED_ADDRESSES_LEN -// In this way ptr must be pointing to the begining of a node. +// Multiply the value at the top of the stack, denoted by ptr/2, by 2 +// and abort if ptr/2 >= mem[@GLOBAL_METADATA_ACCESSED_ADDRESSES_LEN]/2 +// In this way 2*ptr/2 must be pointing to the begining of a node. %macro get_valid_addr_ptr - // stack: ptr + // stack: ptr/2 + DUP1 + // stack: ptr/2, ptr/2 + %mload_global_metadata(@GLOBAL_METADATA_ACCESSED_ADDRESSES_LEN) + // @GLOBAL_METADATA_ACCESSED_ADDRESSES_LEN must be an even number because + // both @SEGMENT_ACCESSED_ADDRESSES and the unscaled access addresses list len + // must be even numbers + %div_const(2) + // stack: scaled_len/2, ptr/2, ptr/2 + %assert_gt %mul_const(2) - PUSH @SEGMENT_ACCESSED_ADDRESSES - DUP2 - SUB - %assert_lt_const(@GLOBAL_METADATA_ACCESSED_ADDRESSES_LEN) - // stack: 2*ptr + // stack: ptr %endmacro @@ -205,17 +210,20 @@ global remove_accessed_addresses: // stack: cold_access, value_ptr %endmacro -// Multiply the ptr at the top of the stack by 4 -// and abort if 4*ptr - SEGMENT_ACCESSED_STORAGE_KEYS >= @GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN -// In this way ptr must be pointing to the beginning of a node. +// Multiply the ptr at the top of the stack, denoted by ptr/4, by 4 +// and abort if ptr/4 >= @GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN/4 +// In this way 4*ptr/4 be pointing to the beginning of a node. %macro get_valid_storage_ptr - // stack: ptr + // stack: ptr/4 + DUP1 + %mload_global_metadata(@GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN) + // By construction, both @SEGMENT_ACCESSED_STORAGE_KEYS and the unscaled list len + // must be multiples of 4 + %div_const(4) + // stack: scaled_len/4, ptr/4, ptr/4 + %assert_gt %mul_const(4) - PUSH @SEGMENT_ACCESSED_STORAGE_KEYS - DUP2 - SUB - %assert_lt_const(@GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN) - // stack: 2*ptr + // stack: ptr %endmacro /// Inserts the storage key into the access list if it is not already present. diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/call_gas.asm b/evm_arithmetization/src/cpu/kernel/asm/core/call_gas.asm index 5945c1e17..c70de697b 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/call_gas.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/call_gas.asm @@ -71,7 +71,13 @@ global xfer_cost: %jump(after_xfer_cost) xfer_cost_nonzero: // stack: cost, is_call_or_staticcall, is_call_or_callcode, address, gas, kexit_info, value, retdest - %add_const(@GAS_CALLVALUE) + SWAP5 + // stack: kexit_info, is_call_or_staticcall, is_call_or_callcode, address, gas, cost, value, retdest + PUSH @GAS_CALLVALUE + // stack: call_value_gas, kexit_info, is_call_or_staticcall, is_call_or_callcode, address, gas, cost, value, retdest + %charge_gas + // stack: kexit_info, is_call_or_staticcall, is_call_or_callcode, address, gas, cost, value, retdest + SWAP5 // stack: cost, is_call_or_staticcall, is_call_or_callcode, address, gas, kexit_info, value, retdest %jump(after_xfer_cost) diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm b/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm index 1c77b01e0..6dee436be 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm @@ -1,8 +1,6 @@ // After the transaction data has been parsed into a normalized set of fields // (see NormalizedTxnField), this routine processes the transaction. -// TODO: Save checkpoints in @CTX_METADATA_STATE_TRIE_CHECKPOINT_PTR and @SEGMENT_STORAGE_TRIE_CHECKPOINT_PTRS. - // Pre stack: retdest // Post stack: success, leftover_gas global process_normalized_txn: diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/touched_addresses.asm b/evm_arithmetization/src/cpu/kernel/asm/core/touched_addresses.asm index d9c70f47a..a8e926e77 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/touched_addresses.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/touched_addresses.asm @@ -47,7 +47,6 @@ insert_touched_addresses_found: /// Remove the address from the list. /// Panics if the address is not in the list. -/// TODO: Unused? global remove_touched_addresses: // stack: addr, retdest %mload_global_metadata(@GLOBAL_METADATA_TOUCHED_ADDRESSES_LEN) diff --git a/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/inverse_scalar.asm b/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/inverse_scalar.asm index 6e1563e2f..f10242bc3 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/inverse_scalar.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/inverse_scalar.asm @@ -1,5 +1,4 @@ /// Division modulo 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141, the Secp256k1 scalar field order -/// To replace with more efficient method using non-determinism later. %macro mulmodn_secp_scalar // stack: x, y diff --git a/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/moddiv.asm b/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/moddiv.asm index d878dc140..f9fbd624b 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/moddiv.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/curve/secp256k1/moddiv.asm @@ -1,5 +1,4 @@ /// Division modulo 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f, the Secp256k1 base field order -/// To replace with more efficient method using non-determinism later. // Returns y * (x^-1) where the inverse is taken modulo N %macro moddiv_secp_base diff --git a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/compression.asm b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/compression.asm index a9467a00b..6ff84301b 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/compression.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/compression.asm @@ -65,10 +65,11 @@ compression_loop: %mload_kernel_code_u32 // stack: K[i], W[i], a[i], b[i], c[i], d[i], e[i], f[i], g[i], h[i], num_blocks, scratch_space_addr, message_schedule_addr, i, a[0]..h[0], retdest DUP10 - DUP10 - DUP10 - DUP10 - // stack: e[i], f[i], g[i], h[i], K[i], W[i], a[i], b[i], c[i], d[i], e[i], f[i], g[i], h[i], num_blocks, scratch_space_addr, message_schedule_addr, i, a[0]..h[0], retdest + DUP8 + DUP11 + DUP11 + DUP11 + // stack: e[i], f[i], g[i], e[i], h[i], K[i], W[i], a[i], b[i], c[i], d[i], e[i], f[i], g[i], h[i], num_blocks, scratch_space_addr, message_schedule_addr, i, a[0]..h[0], retdest %sha2_temp_word1 // stack: T1[i], a[i], b[i], c[i], d[i], e[i], f[i], g[i], h[i], num_blocks, scratch_space_addr, message_schedule_addr, i, a[0]..h[0], retdest DUP4 diff --git a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/message_schedule.asm b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/message_schedule.asm index 66fa67a9b..3bcd7dbfc 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/message_schedule.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/message_schedule.asm @@ -13,13 +13,11 @@ gen_message_schedule_from_block: // stack: block_addr, output_addr, retdest DUP1 // stack: block_addr, block_addr, output_addr, retdest - %add_const(32) - // stack: block_addr + 32, block_addr, output_addr, retdest - SWAP1 - // stack: block_addr, block_addr + 32, output_addr, retdest %mload_u256 - // stack: block[0], block_addr + 32, output_addr, retdest + // stack: block[0], block_addr, output_addr, retdest SWAP1 + // stack: block_addr, block[0], output_addr, retdest + %add_const(32) // stack: block_addr + 32, block[0], output_addr, retdest %mload_u256 // stack: block[1], block[0], output_addr, retdest @@ -45,27 +43,22 @@ gen_message_schedule_from_block_0_loop: // stack: output_addr, block[0] % (1 << 32), block[0] >> 32, output_addr, counter, block[1], retdest %mstore_u32 // stack: block[0] >> 32, output_addr, counter, block[1], retdest - SWAP1 - // stack: output_addr, block[0] >> 32, counter, block[1], retdest - %sub_const(4) - // stack: output_addr - 4, block[0] >> 32, counter, block[1], retdest - SWAP1 - // stack: block[0] >> 32, output_addr - 4, counter, block[1], retdest + %stack (block0_shifted, output_addr, counter) -> (output_addr, 4, 1, counter, block0_shifted) + SUB + // stack: output_addr - 4, 1, counter, block[0] >> 32, block[1], retdest SWAP2 - // stack: counter, output_addr - 4, block[0] >> 32, block[1], retdest - %decrement + SUB + // stack: counter - 1, output_addr - 4, block[0] >> 32, block[1], retdest DUP1 %jumpi(gen_message_schedule_from_block_0_loop) gen_message_schedule_from_block_0_end: // stack: old counter=0, output_addr, block[0], block[1], retdest POP // stack: output_addr, block[0], block[1], retdest - %stack (out, b0, b1) -> (out, 8, b1, b0) - // stack: output_addr, counter=8, block[1], block[0], retdest %add_const(64) - // stack: output_addr + 64, counter, block[1], block[0], retdest - SWAP1 - // stack: counter, output_addr + 64, block[1], block[0], retdest + // stack: output_addr + 64, block[0], block[1], retdest + %stack (out, b0, b1) -> (8, out, b1, b0) + // stack: counter=8, output_addr + 64, block[1], block[0], retdest gen_message_schedule_from_block_1_loop: // Split the second half (256 bits) of the block into the next eight (32-bit) chunks of the message sdchedule. // stack: counter, output_addr, block[1], block[0], retdest @@ -83,29 +76,22 @@ gen_message_schedule_from_block_1_loop: // stack: output_addr, block[1] % (1 << 32), block[1] >> 32, output_addr, counter, block[0], retdest %mstore_u32 // stack: block[1] >> 32, output_addr, counter, block[0], retdest - SWAP1 - // stack: output_addr, block[1] >> 32, counter, block[0], retdest - %sub_const(4) - // stack: output_addr - 4, block[1] >> 32, counter, block[0], retdest - SWAP1 - // stack: block[1] >> 32, output_addr - 4, counter, block[0], retdest + %stack (block1_shifted, output_addr, counter) -> (output_addr, 4, 1, counter, block1_shifted) + SUB + // stack: output_addr - 4, 1, counter, block[1] >> 32, block[0], retdest SWAP2 - // stack: counter, output_addr - 4, block[1] >> 32, block[0], retdest - %decrement + SUB + // stack: counter - 1, output_addr - 4, block[1] >> 32, block[0], retdest DUP1 %jumpi(gen_message_schedule_from_block_1_loop) gen_message_schedule_from_block_1_end: // stack: old counter=0, output_addr, block[1], block[0], retdest POP // stack: output_addr, block[0], block[1], retdest - PUSH 48 - // stack: counter=48, output_addr, block[0], block[1], retdest - SWAP1 - // stack: output_addr, counter, block[0], block[1], retdest %add_const(36) - // stack: output_addr + 36, counter, block[0], block[1], retdest - SWAP1 - // stack: counter, output_addr + 36, block[0], block[1], retdest + // stack: output_addr + 36, block[0], block[1], retdest + PUSH 48 + // stack: counter=48, output_addr + 36, block[0], block[1], retdest gen_message_schedule_remaining_loop: // Generate the next 48 chunks of the message schedule, one at a time, from prior chunks. // stack: counter, output_addr, block[0], block[1], retdest @@ -153,9 +139,10 @@ gen_message_schedule_remaining_loop: // stack: output_addr, x[output_addr - 16*4], sigma_0(x[output_addr - 15*4]), x[output_addr - 7*4], sigma_1(x[output_addr - 2*4]), counter, block[0], block[1], retdest SWAP4 // stack: sigma_1(x[output_addr - 2*4]), x[output_addr - 16*4], sigma_0(x[output_addr - 15*4]), x[output_addr - 7*4], output_addr, counter, block[0], block[1], retdest - %add_u32 - %add_u32 - %add_u32 + ADD + ADD + ADD + %as_u32 // stack: sigma_1(x[output_addr - 2*4]) + x[output_addr - 16*4] + sigma_0(x[output_addr - 15*4]) + x[output_addr - 7*4], output_addr, counter, block[0], block[1], retdest DUP2 // stack: output_addr, sigma_1(x[output_addr - 2*4]) + x[output_addr - 16*4] + sigma_0(x[output_addr - 15*4]) + x[output_addr - 7*4], output_addr, counter, block[0], block[1], retdest @@ -174,7 +161,7 @@ gen_message_schedule_remaining_end: %pop4 JUMP -// Precodition: memory, starting at 0, contains num_blocks, block0[0], ..., block0[63], block1[0], ..., blocklast[63] +// Precondition: memory, starting at 0, contains num_blocks, block0[0], ..., block0[63], block1[0], ..., blocklast[63] // stack contains output_addr // Postcondition: starting at output_addr, set of 256 bytes per block // each contains the 64 32-bit chunks of the message schedule for that block (in four-byte increments) @@ -182,12 +169,14 @@ global sha2_gen_all_message_schedules: // stack: output_addr, retdest DUP1 // stack: output_addr, output_addr, retdest - %mload_current_general_no_offset - // stack: num_blocks, output_addr, output_addr, retdest - PUSH 1 - // stack: cur_offset = 1, counter = num_blocks, output_addr, output_addr, retdest - %build_current_general_address - // stack: cur_addr, counter, output_addr, output_addr, retdest + %build_current_general_address_no_offset + DUP1 + // stack: base_addr, base_addr, output_addr, output_addr, retdest + MLOAD_GENERAL + // stack: num_blocks, base_addr, output_addr, output_addr, retdest + SWAP1 + %increment + // stack: cur_addr (offset = 1), counter = num_blocks, output_addr, output_addr, retdest gen_all_message_schedules_loop: // stack: cur_addr, counter, cur_output_addr, output_addr, retdest PUSH gen_all_message_schedules_loop_end diff --git a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/ops.asm b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/ops.asm index d50e5c9a8..f0da871a5 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/ops.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/ops.asm @@ -1,14 +1,14 @@ // 32-bit right rotation %macro rotr(rot) // stack: value + DUP1 + // stack: value, value PUSH $rot - // stack: rot, value - DUP2 - DUP2 - // stack: rot, value, rot, value + // stack: rot, value, value SHR - // stack: value >> rot, rot, value - %stack (shifted, rot, value) -> (rot, value, shifted) + // stack: value >> rot, value + SWAP1 + PUSH $rot // stack: rot, value, value >> rot PUSH 32 SUB @@ -26,16 +26,14 @@ // stack: x, x %rotr(7) // stack: rotr(x, 7), x - SWAP1 - // stack: x, rotr(x, 7) DUP1 - // stack: x, x, rotr(x, 7) - %rotr(18) - // stack: rotr(x, 18), x, rotr(x, 7) - SWAP1 - // stack: x, rotr(x, 18), rotr(x, 7) + // stack: rotr(x, 7), rotr(x, 7), x + %rotr(11) + // stack: rotr(x, 18), rotr(x, 7), x + SWAP2 + // stack: x, rotr(x, 7), rotr(x, 18) %shr_const(3) - // stack: shr(x, 3), rotr(x, 18), rotr(x, 7) + // stack: shr(x, 3), rotr(x, 7), rotr(x, 18) XOR XOR %endmacro @@ -46,36 +44,30 @@ // stack: x, x %rotr(17) // stack: rotr(x, 17), x - SWAP1 - // stack: x, rotr(x, 17) DUP1 - // stack: x, x, rotr(x, 17) - %rotr(19) - // stack: rotr(x, 19), x, rotr(x, 17) - SWAP1 - // stack: x, rotr(x, 19), rotr(x, 17) + // stack: rotr(x, 17), rotr(x, 17), x + %rotr(2) + // stack: rotr(x, 19), rotr(x, 17), x + SWAP2 + // stack: x, rotr(x, 17), rotr(x, 19) PUSH 10 SHR - // stack: shr(x, 10), rotr(x, 19), rotr(x, 17) + // stack: shr(x, 10), rotr(x, 17), rotr(x, 19) XOR XOR %endmacro %macro sha2_bigsigma_0 // stack: x - DUP1 - // stack: x, x %rotr(2) - // stack: rotr(x, 2), x - SWAP1 - // stack: x, rotr(x, 2) + // stack: rotr(x, 2) DUP1 - // stack: x, x, rotr(x, 2) - %rotr(13) - // stack: rotr(x, 13), x, rotr(x, 2) - SWAP1 - // stack: x, rotr(x, 13), rotr(x, 2) - %rotr(22) + // stack: rotr(x, 2), rotr(x, 2) + %rotr(11) + // stack: rotr(x, 13), rotr(x, 2) + DUP1 + // stack: rotr(x, 13), rotr(x, 13), rotr(x, 2) + %rotr(9) // stack: rotr(x, 22), rotr(x, 13), rotr(x, 2) XOR XOR @@ -83,19 +75,15 @@ %macro sha2_bigsigma_1 // stack: x - DUP1 - // stack: x, x %rotr(6) - // stack: rotr(x, 6), x - SWAP1 - // stack: x, rotr(x, 6) + // stack: rotr(x, 6) DUP1 - // stack: x, x, rotr(x, 6) - %rotr(11) - // stack: rotr(x, 11), x, rotr(x, 6) - SWAP1 - // stack: x, rotr(x, 11), rotr(x, 6) - %rotr(25) + // stack: rotr(x, 6), rotr(x, 6) + %rotr(5) + // stack: rotr(x, 11), rotr(x, 6) + DUP1 + // stack: rotr(x, 11), rotr(x, 11), rotr(x, 6) + %rotr(14) // stack: rotr(x, 25), rotr(x, 11), rotr(x, 6) XOR XOR @@ -103,41 +91,31 @@ %macro sha2_choice // stack: x, y, z - DUP1 - // stack: x, x, y, z - NOT - // stack: not x, x, y, z SWAP1 - // stack: x, not x, y, z - SWAP3 - // stack: z, not x, y, x - AND - // stack: (not x) and z, y, x - SWAP2 - // stack: x, y, (not x) and z + // stack: y, x, z + DUP3 + // stack: z, y, x, z + XOR + // stack: z xor y, x, z AND - // stack: x and y, (not x) and z - OR + // stack: (z xor y) and x, z + XOR + // stack: ((z xor y) and x) xor z == (x and y) xor (not x and z) %endmacro %macro sha2_majority // stack: x, y, z - DUP1 - // stack: x, x, y, z - DUP3 - // stack: y, x, x, y, z - DUP5 - // stack: z, y, x, x, y, z - AND - // stack: z and y, x, x, y, z - SWAP4 - // stack: z, x, x, y, z and y + DUP2 + DUP2 AND - // stack: z and x, x, y, z and y + // stack: x and y, x, y, z SWAP2 - // stack: y, x, z and x, z and y - AND - // stack: y and x, z and x, z and y + // stack: y, x, x and y, z OR + // stack: y or x, x and y, z + %stack(y_or_x, x_and_y, z) -> (z, y_or_x, x_and_y) + AND + // stack: z and (y or x), x and y OR + // stack: (z and (y or x) or (x and y) == (x and y) or (x and z) or (y and z) %endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/temp_words.asm b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/temp_words.asm index ed610947f..0f3fb4b7a 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/temp_words.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/temp_words.asm @@ -1,18 +1,16 @@ // "T_1" in the SHA-256 spec %macro sha2_temp_word1 - // stack: e, f, g, h, K[i], W[i] - DUP1 - // stack: e, e, f, g, h, K[i], W[i] - %sha2_bigsigma_1 - // stack: Sigma_1(e), e, f, g, h, K[i], W[i] - %stack (sig, e, f, g) -> (e, f, g, sig) - // stack: e, f, g, Sigma_1(e), h, K[i], W[i] + // stack: e, f, g, e, h, K[i], W[i] %sha2_choice - // stack: Ch(e, f, g), Sigma_1(e), h, K[i], W[i] - %add_u32 - %add_u32 - %add_u32 - %add_u32 + // stack: Ch(e, f, g), e, h, K[i], W[i] + SWAP1 + // stack: e, Ch(e, f, g), h, K[i], W[i] + %sha2_bigsigma_1 + // stack: Sigma_1(e), Ch(e, f, g), h, K[i], W[i] + ADD + ADD + ADD + ADD // stack: Ch(e, f, g) + Sigma_1(e) + h + K[i] + W[i] %endmacro @@ -27,6 +25,6 @@ // stack: c, a, b, Sigma_0(a) %sha2_majority // stack: Maj(c, a, b), Sigma_0(a) - %add_u32 + ADD // stack: Maj(c, a, b) + Sigma_0(a) %endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/write_length.asm b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/write_length.asm index 9c2707b8d..a2a0216a6 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/write_length.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/hash/sha2/write_length.asm @@ -3,14 +3,13 @@ %build_current_general_address SWAP1 // stack: length, last_addr - DUP1 - // stack: length, length, last_addr + DUP2 + DUP2 + // stack: length, last_addr, length, last_addr %and_const(0xff) - // stack: length % (1 << 8), length, last_addr - DUP3 - // stack: last_addr, length % (1 << 8), length, last_addr - %swap_mstore - + // stack: length % (1 << 8), last_addr, length, last_addr + MSTORE_GENERAL + %rep 7 // For i = 0 to 6 // stack: length >> (8 * i), last_addr - i - 1 @@ -20,14 +19,13 @@ // stack: length >> (8 * i), last_addr - i - 2 %shr_const(8) // stack: length >> (8 * (i + 1)), last_addr - i - 2 - PUSH 256 DUP2 - // stack: length >> (8 * (i + 1)), 256, length >> (8 * (i + 1)), last_addr - i - 2 - MOD - // stack: (length >> (8 * (i + 1))) % (1 << 8), length >> (8 * (i + 1)), last_addr - i - 2 + PUSH 256 DUP3 - // stack: last_addr - i - 2, (length >> (8 * (i + 1))) % (1 << 8), length >> (8 * (i + 1)), last_addr - i - 2 - %swap_mstore + // stack: length >> (8 * (i + 1)), 256, last_addr - i - 2, length >> (8 * (i + 1)), last_addr - i - 2 + MOD + // stack: (length >> (8 * (i + 1))) % (1 << 8), last_addr - i - 2, length >> (8 * (i + 1)), last_addr - i - 2 + MSTORE_GENERAL %endrep %pop2 diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete_branch.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete_branch.asm index 64187ac83..f67861d6f 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete_branch.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/delete/delete_branch.asm @@ -6,7 +6,6 @@ // - If there are more than one, update the branch node and return it. // - If there is exactly one, transform the branch node into an leaf/extension node and return it. // Assumes that `num_nibbles>0` and that the value of the branch node is zero. -// TODO: May need to revisit these assumptions depending on how the receipt trie is implemented. global mpt_delete_branch: // stack: node_type, node_payload_ptr, num_nibbles, key, retdest POP diff --git a/evm_arithmetization/src/cpu/kernel/asm/util/assertions.asm b/evm_arithmetization/src/cpu/kernel/asm/util/assertions.asm index dc73721b3..1b3439475 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/util/assertions.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/util/assertions.asm @@ -39,8 +39,8 @@ global panic: %endmacro %macro assert_lt(ret) - GE - %assert_zero($ret) + LT + %assert_nonzero($ret) %endmacro %macro assert_le @@ -56,10 +56,8 @@ global panic: %endmacro %macro assert_gt - // %assert_zero is cheaper than %assert_nonzero, so we will leverage the - // fact that (x > y) == !(x <= y). - LE - %assert_zero + GT + %assert_nonzero %endmacro %macro assert_gt(ret) diff --git a/evm_arithmetization/src/cpu/kernel/interpreter.rs b/evm_arithmetization/src/cpu/kernel/interpreter.rs index ab53e8e2d..5b3b11e0d 100644 --- a/evm_arithmetization/src/cpu/kernel/interpreter.rs +++ b/evm_arithmetization/src/cpu/kernel/interpreter.rs @@ -8,10 +8,9 @@ use core::cmp::Ordering; use std::collections::{BTreeSet, HashMap}; -use anyhow::{anyhow, bail}; -use ethereum_types::{BigEndianHash, H160, H256, U256, U512}; -use itertools::Itertools; -use keccak_hash::keccak; +use anyhow::anyhow; +use ethereum_types::{BigEndianHash, U256}; +use log::Level; use mpt_trie::partial_trie::PartialTrie; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::types::{Field, PrimeField64}; @@ -341,31 +340,14 @@ impl Interpreter { } } + // As this relies on the underlying `GenerationState` method, stacks containing + // more than 10 elements will be truncated. As such, new tests that would need + // to access more elements would require special handling. pub(crate) fn stack(&self) -> Vec { - match self.stack_len().cmp(&1) { - Ordering::Greater => { - let mut stack = self.generation_state.memory.contexts[self.context()].segments - [Segment::Stack.unscale()] - .content - .iter() - .filter_map(|&opt_elt| opt_elt) - .collect::>(); - stack.truncate(self.stack_len() - 1); - stack.push( - self.stack_top() - .expect("The stack is checked to be nonempty"), - ); - stack - } - Ordering::Equal => { - vec![self - .stack_top() - .expect("The stack is checked to be nonempty")] - } - Ordering::Less => { - vec![] - } - } + let mut stack = self.generation_state.stack(); + stack.reverse(); + + stack } fn stack_segment_mut(&mut self) -> &mut Vec> { @@ -517,7 +499,10 @@ impl State for Interpreter { } fn get_stack(&self) -> Vec { - self.stack() + let mut stack = self.stack(); + stack.reverse(); + + stack } fn get_halt_offsets(&self) -> Vec { @@ -537,7 +522,7 @@ impl State for Interpreter { if registers.is_kernel { log_kernel_instruction(self, op); } else { - log::debug!("User instruction: {:?}", op); + self.log_debug(format!("User instruction: {:?}", op)); } let generation_state = self.get_mut_generation_state(); @@ -556,6 +541,18 @@ impl State for Interpreter { self.perform_state_op(op, row) } + + fn log_debug(&self, msg: String) { + if !self.is_jumpdest_analysis { + log::debug!("{}", msg); + } + } + + fn log(&self, level: Level, msg: String) { + if !self.is_jumpdest_analysis { + log::log!(level, "{}", msg); + } + } } impl Transition for Interpreter { diff --git a/evm_arithmetization/src/cpu/kernel/tests/ecc/ecrecover.rs b/evm_arithmetization/src/cpu/kernel/tests/ecc/ecrecover.rs index 9a381daef..5d811123f 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/ecc/ecrecover.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/ecc/ecrecover.rs @@ -1,6 +1,7 @@ +use std::collections::HashMap; use std::str::FromStr; -use anyhow::Result; +use anyhow::{anyhow, Result}; use ethereum_types::U256; use plonky2::field::goldilocks_field::GoldilocksField as F; @@ -32,17 +33,18 @@ fn test_invalid_ecrecover(hash: &str, v: &str, r: &str, s: &str) -> Result<()> { #[test] fn test_ecrecover_real_block() -> Result<()> { let f = include_str!("ecrecover_test_data"); - let convert_v = |v| match v { - // TODO: do this properly. - "0" => "0x1b", - "1" => "0x1c", - "37" => "0x1b", - "38" => "0x1c", - _ => panic!("Invalid v."), + let convert_v = { + let mut map = HashMap::new(); + map.insert("0", "0x1b"); + map.insert("1", "0x1c"); + map.insert("37", "0x1b"); + map.insert("38", "0x1c"); + + move |v: &str| map.get(v).copied().ok_or_else(|| anyhow!("Invalid v.")) }; for line in f.lines().filter(|s| !s.starts_with("//")) { let line = line.split_whitespace().collect::>(); - test_valid_ecrecover(line[4], convert_v(line[0]), line[1], line[2], line[3])?; + test_valid_ecrecover(line[4], convert_v(line[0])?, line[1], line[2], line[3])?; } Ok(()) } diff --git a/evm_arithmetization/src/generation/prover_input.rs b/evm_arithmetization/src/generation/prover_input.rs index 6eb7d0f7f..048fd3ce8 100644 --- a/evm_arithmetization/src/generation/prover_input.rs +++ b/evm_arithmetization/src/generation/prover_input.rs @@ -283,7 +283,6 @@ impl GenerationState { if self.jumpdest_table.is_none() { self.generate_jumpdest_table()?; - log::debug!("jdt = {:?}", self.jumpdest_table); } let Some(jumpdest_table) = &mut self.jumpdest_table else { @@ -297,11 +296,6 @@ impl GenerationState { if let Some(ctx_jumpdest_table) = jumpdest_table.get_mut(&context) && let Some(next_jumpdest_address) = ctx_jumpdest_table.pop() { - log::debug!( - "jumpdest_table_len = {:?}, ctx_jumpdest_table.len = {:?}", - jd_len, - ctx_jumpdest_table.len() - ); Ok((next_jumpdest_address + 1).into()) } else { jumpdest_table.remove(&context); @@ -323,11 +317,6 @@ impl GenerationState { if let Some(ctx_jumpdest_table) = jumpdest_table.get_mut(&context) && let Some(next_jumpdest_proof) = ctx_jumpdest_table.pop() { - log::debug!( - "jumpdest_table_len = {:?}, ctx_jumpdest_table.len = {:?}", - jd_len, - ctx_jumpdest_table.len() - ); Ok(next_jumpdest_proof.into()) } else { Err(ProgramError::ProverInputError( diff --git a/evm_arithmetization/src/generation/state.rs b/evm_arithmetization/src/generation/state.rs index d69a8d05d..bcc84d319 100644 --- a/evm_arithmetization/src/generation/state.rs +++ b/evm_arithmetization/src/generation/state.rs @@ -5,6 +5,7 @@ use anyhow::{anyhow, bail}; use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use itertools::Itertools; use keccak_hash::keccak; +use log::Level; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use smt_trie::code::{hash_bytecode_u256, hash_contract_bytecode}; @@ -170,7 +171,7 @@ pub(crate) trait State { } } else { #[cfg(not(test))] - log::info!("CPU halted after {} cycles", self.get_clock()); + self.log_info(format!("CPU halted after {} cycles", self.get_clock())); return Ok(()); } } @@ -262,6 +263,24 @@ pub(crate) trait State { let opcode = read_code_memory(generation_state, &mut row); (row, opcode) } + + /// Logs `msg` in `debug` mode. + #[inline] + fn log_debug(&self, msg: String) { + log::debug!("{}", msg); + } + + /// Logs `msg` in `info` mode. + #[inline] + fn log_info(&self, msg: String) { + log::info!("{}", msg); + } + + /// Logs `msg` at `level`. + #[inline] + fn log(&self, level: Level, msg: String) { + log::log!(level, "{}", msg); + } } #[derive(Debug)] @@ -501,7 +520,7 @@ impl State for GenerationState { if registers.is_kernel { log_kernel_instruction(self, op); } else { - log::debug!("User instruction: {:?}", op); + self.log_debug(format!("User instruction: {:?}", op)); } fill_op_flag(op, &mut row); diff --git a/evm_arithmetization/src/lib.rs b/evm_arithmetization/src/lib.rs index 60a888dd4..fdb0c9e4a 100644 --- a/evm_arithmetization/src/lib.rs +++ b/evm_arithmetization/src/lib.rs @@ -222,7 +222,7 @@ use mpt_trie::partial_trie::HashedPartialTrie; // the time being, it will be able to be disabled with a feature flag // (`disable_jemalloc`) in order to allow users to use their own allocator if // needed. -#[cfg(not(any(target_env = "msvc", disable_jemalloc)))] +#[cfg(not(any(target_env = "msvc", feature = "disable_jemalloc")))] #[global_allocator] static GLOBAL: Jemalloc = Jemalloc; diff --git a/evm_arithmetization/src/memory/mod.rs b/evm_arithmetization/src/memory/mod.rs index 977eec198..2ec1d5f86 100644 --- a/evm_arithmetization/src/memory/mod.rs +++ b/evm_arithmetization/src/memory/mod.rs @@ -7,9 +7,6 @@ pub mod columns; pub mod memory_stark; pub mod segments; -// TODO: Move to CPU module, now that channels have been removed from the memory -// table. -pub(crate) const NUM_CHANNELS: usize = crate::cpu::membus::NUM_CHANNELS; /// The number of limbs holding the value at a memory address. /// Eight limbs of 32 bits can hold a `U256`. pub(crate) const VALUE_LIMBS: usize = 8; diff --git a/evm_arithmetization/src/witness/operation.rs b/evm_arithmetization/src/witness/operation.rs index ab555e57d..665c86696 100644 --- a/evm_arithmetization/src/witness/operation.rs +++ b/evm_arithmetization/src/witness/operation.rs @@ -16,11 +16,11 @@ use crate::cpu::columns::CpuColumnsView; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::assembler::BYTES_PER_OFFSET; use crate::cpu::kernel::constants::context_metadata::ContextMetadata; +use crate::cpu::membus::NUM_CHANNELS; use crate::cpu::simple_logic::eq_iszero::generate_pinv_diff; use crate::cpu::stack::MAX_USER_STACK_SIZE; use crate::extension_tower::BN_BASE; use crate::memory::segments::Segment; -use crate::memory::NUM_CHANNELS; use crate::poseidon::columns::POSEIDON_SPONGE_RATE; use crate::poseidon::poseidon_stark::{PoseidonGeneralOp, PoseidonOp, PoseidonSimpleOp}; use crate::util::u256_to_usize; @@ -160,11 +160,12 @@ pub(crate) fn generate_keccak_general>( val.low_u32() as u8 }) .collect_vec(); - log::debug!("Hashing {:?}", input); let hash = keccak(&input); push_no_write(generation_state, hash.into_uint()); + state.log_debug(format!("Hashing {:?}", input)); + keccak_sponge_log(state, base_address, input); state.push_memory(log_in1); @@ -787,7 +788,10 @@ pub(crate) fn generate_syscall>( push_with_write(state, &mut row, syscall_info)?; - log::debug!("Syscall to {}", KERNEL.offset_name(new_program_counter)); + state.log_debug(format!( + "Syscall to {}", + KERNEL.offset_name(new_program_counter) + )); byte_packing_log(state, base_address, bytes); state.push_arithmetic(range_check_op); @@ -833,11 +837,10 @@ pub(crate) fn generate_exit_kernel>( generation_state.registers.program_counter = program_counter; generation_state.registers.is_kernel = is_kernel_mode; generation_state.registers.gas_used = gas_used_val; - log::debug!( + state.log_debug(format!( "Exiting to {}, is_kernel={}", - program_counter, - is_kernel_mode - ); + program_counter, is_kernel_mode + )); state.push_cpu(row); @@ -1056,7 +1059,10 @@ pub(crate) fn generate_exception>( push_with_write(generation_state, &mut row, exc_info)?; byte_packing_log(state, base_address, bytes); - log::debug!("Exception to {}", KERNEL.offset_name(new_program_counter)); + state.log_debug(format!( + "Exception to {}", + KERNEL.offset_name(new_program_counter) + )); state.push_arithmetic(range_check_op); state.push_cpu(row); diff --git a/evm_arithmetization/src/witness/transition.rs b/evm_arithmetization/src/witness/transition.rs index 339f39efc..499b4b58a 100644 --- a/evm_arithmetization/src/witness/transition.rs +++ b/evm_arithmetization/src/witness/transition.rs @@ -282,14 +282,16 @@ pub(crate) fn log_kernel_instruction>(state: &mut S, o } else { log::Level::Trace }; - log::log!( + state.log( level, - "Cycle {}, ctx={}, pc={}, instruction={:?}, stack={:?}", - state.get_clock(), - state.get_context(), - KERNEL.offset_name(pc), - op, - state.get_generation_state().stack(), + format!( + "Cycle {}, ctx={}, pc={}, instruction={:?}, stack={:?}", + state.get_clock(), + state.get_context(), + KERNEL.offset_name(pc), + op, + state.get_generation_state().stack(), + ), ); assert!(pc < KERNEL.code.len(), "Kernel PC is out of range: {}", pc); diff --git a/mpt_trie/src/debug_tools/diff.rs b/mpt_trie/src/debug_tools/diff.rs index 3a4318c23..6ceec00a1 100644 --- a/mpt_trie/src/debug_tools/diff.rs +++ b/mpt_trie/src/debug_tools/diff.rs @@ -113,14 +113,15 @@ impl DiffPoint { } } -// TODO: Redo display method so this is more readable... impl Display for DiffPoint { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Point Diff {{depth: {}, ", self.depth)?; - write!(f, "Path: ({}), ", self.path)?; - write!(f, "Key: {:x} ", self.key)?; - write!(f, "A info: {} ", self.a_info)?; - write!(f, "B info: {}}}", self.b_info) + writeln!(f, "Point Diff {{")?; + writeln!(f, " Depth: {},", self.depth)?; + writeln!(f, " Path: ({}),", self.path)?; + writeln!(f, " Key: {:x},", self.key)?; + writeln!(f, " A info: {},", self.a_info)?; + writeln!(f, " B info: {}", self.b_info)?; + write!(f, "}}") } } @@ -136,18 +137,20 @@ pub struct NodeInfo { hash: H256, } -// TODO: Redo display method so this is more readable... impl Display for NodeInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "(key: {:x} ", self.key)?; + write!(f, "NodeInfo {{ Key: 0x{:x}, ", self.key)?; match &self.value { Some(v) => write!(f, "Value: 0x{}, ", hex::encode(v))?, None => write!(f, "Value: N/A, ")?, } - write!(f, "Node type: {} ", self.node_type)?; - write!(f, "Trie hash: {:x})", self.hash) + write!( + f, + "Node type: {}, Trie hash: 0x{:x} }}", + self.node_type, self.hash + ) } } diff --git a/mpt_trie/src/debug_tools/query.rs b/mpt_trie/src/debug_tools/query.rs index e6a988cee..c531dd4e8 100644 --- a/mpt_trie/src/debug_tools/query.rs +++ b/mpt_trie/src/debug_tools/query.rs @@ -206,17 +206,13 @@ impl DebugQueryOutput { } } - // TODO: Make the output easier to read... fn fmt_query_header(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "Query Result {{")?; - - writeln!(f, "Queried Key: {}", self.k)?; - writeln!(f, "Node found: {}", self.node_found)?; - + writeln!(f, " Queried Key: {}", self.k)?; + writeln!(f, " Node Found: {}", self.node_found)?; writeln!(f, "}}") } - // TODO: Make the output easier to read... fn fmt_node_based_on_debug_params( f: &mut fmt::Formatter<'_>, seg: &TrieSegment, @@ -224,30 +220,25 @@ impl DebugQueryOutput { params: &DebugQueryParams, ) -> fmt::Result { let node_type = seg.node_type(); + let mut components = Vec::new(); if params.include_node_type { - write!(f, "{}", node_type)?; + components.push(format!("{}", node_type)); } - write!(f, "(")?; - if params.include_key_piece_per_node { if let Some(k_piece) = seg.get_key_piece_from_seg_if_present() { - write!(f, "key: {}", k_piece)?; + components.push(format!("Key: {}", k_piece)); } } if params.include_node_specific_values { if let Some(extra_seg_info) = extra_seg_info { - if params.include_key_piece_per_node { - write!(f, ", ")?; - } - - write!(f, "Extra Seg Info: {}", extra_seg_info)?; + components.push(format!("Extra Seg Info: {}", extra_seg_info)); } } - write!(f, ")")?; + write!(f, "({})", components.join(", "))?; Ok(()) } diff --git a/mpt_trie/src/nibbles.rs b/mpt_trie/src/nibbles.rs index d7422a93d..e388cb78e 100644 --- a/mpt_trie/src/nibbles.rs +++ b/mpt_trie/src/nibbles.rs @@ -118,6 +118,7 @@ macro_rules! impl_as_u64s_for_primitive { }; } +impl_as_u64s_for_primitive!(usize); impl_as_u64s_for_primitive!(u8); impl_as_u64s_for_primitive!(u16); impl_as_u64s_for_primitive!(u32); @@ -178,6 +179,7 @@ macro_rules! impl_to_nibbles { }; } +impl_to_nibbles!(usize); impl_to_nibbles!(u8); impl_to_nibbles!(u16); impl_to_nibbles!(u32); @@ -227,6 +229,19 @@ impl TryInto for Nibbles { } } +impl From for H256 { + fn from(val: Nibbles) -> Self { + let mut nib_bytes = val.bytes_be(); + if nib_bytes.len() < 32 { + for _ in nib_bytes.len()..32 { + nib_bytes.insert(0, 0); + } + } + + H256::from_slice(&nib_bytes) + } +} + impl From for NibblesIntern { fn from(val: U256) -> Self { let arr = val.as_u64s(); @@ -895,6 +910,23 @@ impl Nibbles { } } + /// Returns a slice of the internal bytes of packed nibbles. + /// Only the relevant bytes (up to `count` nibbles) are considered valid. + pub fn as_byte_slice(&self) -> &[u8] { + // Calculate the number of full bytes needed to cover 'count' nibbles + let bytes_needed = (self.count + 1) / 2; // each nibble is half a byte + + // Safe because we are ensuring the slice size does not exceed the bounds of the + // array + unsafe { + // Convert the pointer to `packed` to a pointer to `u8` + let packed_ptr = self.packed.0.as_ptr() as *const u8; + + // Create a slice from this pointer and the number of needed bytes + std::slice::from_raw_parts(packed_ptr, bytes_needed) + } + } + const fn nibble_append_safety_asserts(&self, n: Nibble) { assert!( self.count < 64, @@ -1552,6 +1584,20 @@ mod tests { ); } + #[test] + fn nibbles_into_h256_works() { + let nibbles: Nibbles = Nibbles::from(0x0); + let h256_value: H256 = nibbles.into(); + assert_eq!(format!("0x{:x}", h256_value), ZERO_NIBS_64); + + let nibbles: Nibbles = Nibbles::from(2048); + let h256_value: H256 = nibbles.into(); + assert_eq!( + format!("0x{:x}", h256_value), + "0x0000000000000000000000000000000000000000000000000000000000000800", + ); + } + #[test] fn nibbles_from_str_works() { assert_eq!(format!("{:x}", Nibbles::from_str("0x0").unwrap()), "0x0"); @@ -1589,6 +1635,12 @@ mod tests { format!("{:x}", 0x1234_u64.to_nibbles_byte_padded()), "0x1234" ); + + assert_eq!(format!("{:x}", 0x1234_usize.to_nibbles()), "0x1234"); + assert_eq!( + format!("{:x}", 0x1234_usize.to_nibbles_byte_padded()), + "0x1234" + ); } #[test] @@ -1600,4 +1652,35 @@ mod tests { Nibbles::from_hex_prefix_encoding(&buf).unwrap(); } + + #[test] + fn nibbles_as_byte_slice_works() -> Result<(), StrToNibblesError> { + let cases = [ + (0x0, vec![]), + (0x1, vec![0x01]), + (0x12, vec![0x12]), + (0x123, vec![0x23, 0x01]), + ]; + + for case in cases.iter() { + let nibbles = Nibbles::from(case.0 as u64); + let byte_vec = nibbles.as_byte_slice().to_vec(); + assert_eq!(byte_vec, case.1.clone(), "Failed for input 0x{:X}", case.0); + } + + let input = "3ab76c381c0f8ea617ea96780ffd1e165c754b28a41a95922f9f70682c581351"; + let nibbles = Nibbles::from_str(input)?; + + let byte_vec = nibbles.as_byte_slice().to_vec(); + let mut expected_vec: Vec = hex::decode(input).expect("Invalid hex string"); + expected_vec.reverse(); + assert_eq!( + byte_vec, + expected_vec.clone(), + "Failed for input 0x{}", + input + ); + + Ok(()) + } } diff --git a/mpt_trie/src/partial_trie.rs b/mpt_trie/src/partial_trie.rs index 3d29e8c05..a593f57c8 100644 --- a/mpt_trie/src/partial_trie.rs +++ b/mpt_trie/src/partial_trie.rs @@ -107,6 +107,11 @@ pub trait PartialTrie: /// Returns an iterator over the trie that returns all values for every /// `Leaf` and `Hash` node. fn values(&self) -> impl Iterator; + + /// Returns `true` if the trie contains an element with the given key. + fn contains(&self, k: K) -> bool + where + K: Into; } /// Part of the trait that is not really part of the public interface but @@ -261,6 +266,13 @@ impl PartialTrie for StandardTrie { fn values(&self) -> impl Iterator { self.0.trie_values() } + + fn contains(&self, k: K) -> bool + where + K: Into, + { + self.0.trie_has_item_by_key(k) + } } impl TrieNodeIntern for StandardTrie { @@ -381,6 +393,13 @@ impl PartialTrie for HashedPartialTrie { fn values(&self) -> impl Iterator { self.node.trie_values() } + + fn contains(&self, k: K) -> bool + where + K: Into, + { + self.node.trie_has_item_by_key(k) + } } impl TrieNodeIntern for HashedPartialTrie { diff --git a/mpt_trie/src/trie_ops.rs b/mpt_trie/src/trie_ops.rs index 64f7f70b4..2f0e794d7 100644 --- a/mpt_trie/src/trie_ops.rs +++ b/mpt_trie/src/trie_ops.rs @@ -364,7 +364,7 @@ impl Node { where K: Into, { - let k = k.into(); + let k: Nibbles = k.into(); trace!("Deleting a leaf node with key {} if it exists", k); delete_intern(&self.clone(), k)?.map_or(Ok(None), |(updated_root, deleted_val)| { @@ -391,6 +391,14 @@ impl Node { pub(crate) fn trie_values(&self) -> impl Iterator { self.trie_items().map(|(_, v)| v) } + + pub(crate) fn trie_has_item_by_key(&self, k: K) -> bool + where + K: Into, + { + let k = k.into(); + self.trie_items().any(|(key, _)| key == k) + } } fn insert_into_trie_rec( @@ -1105,6 +1113,28 @@ mod tests { Ok(()) } + #[test] + fn existent_node_key_contains_returns_true() -> TrieOpResult<()> { + common_setup(); + + let mut trie = StandardTrie::default(); + trie.insert(0x1234, vec![91])?; + assert!(trie.contains(0x1234)); + + Ok(()) + } + + #[test] + fn non_existent_node_key_contains_returns_false() -> TrieOpResult<()> { + common_setup(); + + let mut trie = StandardTrie::default(); + trie.insert(0x1234, vec![91])?; + assert!(!trie.contains(0x5678)); + + Ok(()) + } + #[test] fn deleting_from_an_empty_trie_returns_none() -> TrieOpResult<()> { common_setup(); diff --git a/mpt_trie/src/trie_subsets.rs b/mpt_trie/src/trie_subsets.rs index 0f1ff138a..74f50d769 100644 --- a/mpt_trie/src/trie_subsets.rs +++ b/mpt_trie/src/trie_subsets.rs @@ -207,32 +207,33 @@ impl TrackedNodeInfo { } } -// TODO: Make this interface also work with &[ ... ]... /// Create a [`PartialTrie`] subset from a base trie given an iterator of keys /// of nodes that may or may not exist in the trie. All nodes traversed by the /// keys will not be hashed out in the trie subset. If the key does not exist in /// the trie at all, this is not considered an error and will still record which /// nodes were visited. -pub fn create_trie_subset(trie: &N, keys_involved: I) -> SubsetTrieResult +pub fn create_trie_subset( + trie: &N, + keys_involved: impl IntoIterator, +) -> SubsetTrieResult where N: PartialTrie, K: Into, - I: IntoIterator, { let mut tracked_trie = TrackedNode::new(trie); create_trie_subset_intern(&mut tracked_trie, keys_involved.into_iter()) } -// TODO: Make this interface also work with &[ ... ]... /// Create [`PartialTrie`] subsets from a given base `PartialTrie` given a /// iterator of keys per subset needed. See [`create_trie_subset`] for more /// info. -pub fn create_trie_subsets(base_trie: &N, keys_involved: O) -> SubsetTrieResult> +pub fn create_trie_subsets( + base_trie: &N, + keys_involved: impl IntoIterator>, +) -> SubsetTrieResult> where N: PartialTrie, K: Into, - I: IntoIterator, - O: IntoIterator, { let mut tracked_trie = TrackedNode::new(base_trie); @@ -804,6 +805,24 @@ mod tests { Ok(()) } + #[test] + fn sub_trie_existent_key_contains_returns_true() { + let trie = create_trie_with_large_entry_nodes(&[0x0]).unwrap(); + + let partial_trie = create_trie_subset(&trie, [0x1234]).unwrap(); + + assert!(partial_trie.contains(0x0)); + } + + #[test] + fn sub_trie_non_existent_key_contains_returns_false() { + let trie = create_trie_with_large_entry_nodes(&[0x0]).unwrap(); + + let partial_trie = create_trie_subset(&trie, [0x1234]).unwrap(); + + assert!(!partial_trie.contains(0x1)); + } + fn assert_all_keys_do_not_exist(trie: &TrieType, ks: impl Iterator) { for k in ks { assert!(trie.get(k).is_none()); diff --git a/trace_decoder/src/compact/compact_prestate_processing.rs b/trace_decoder/src/compact/compact_prestate_processing.rs index f44c2b30a..84e821b8e 100644 --- a/trace_decoder/src/compact/compact_prestate_processing.rs +++ b/trace_decoder/src/compact/compact_prestate_processing.rs @@ -14,7 +14,8 @@ use ethereum_types::{H256, U256}; use log::trace; use mpt_trie::{ nibbles::{FromHexPrefixError, Nibbles}, - partial_trie::{HashedPartialTrie, PartialTrie}, + partial_trie::HashedPartialTrie, + trie_ops::TrieOpError, }; use serde::de::DeserializeOwned; use thiserror::Error; @@ -26,7 +27,7 @@ use super::compact_to_partial_trie::{ use crate::{ decoding::TrieType, trace_protocol::TrieCompact, - types::{CodeHash, HashedAccountAddr, TrieRootHash}, + types::{HashedAccountAddr, TrieRootHash}, }; /// Result alias for any error that can occur when processing encoded compact @@ -120,10 +121,26 @@ pub enum CompactParsingError { /// Error when constructing a key from bytes. #[error("Unable to create key nibbles from bytes {0}")] KeyError(#[from] FromHexPrefixError), + + /// Failure due to an incompatible version. + #[error("Incompatible version, expected: {0}, actual: {1}")] + IncompatibleVersion(u8, u8), + + /// Failure due to a trie operation error. + #[error("Trie operation error: {0}")] + TrieOpError(TrieOpError), } +impl From for CompactParsingError { + fn from(err: TrieOpError) -> Self { + CompactParsingError::TrieOpError(err) + } +} + +/// Represents detailed error information about issues encountered +/// while processing byte streams with a cursor. #[derive(Debug)] -pub(crate) struct CursorBytesErrorInfo { +pub struct CursorBytesErrorInfo { error_start_pos: usize, bad_bytes_hex: String, } @@ -167,9 +184,12 @@ enum Opcode { EmptyRoot = 0x06, } +/// Compact witness entry. #[derive(Clone, Debug, EnumAsInner)] -pub(crate) enum WitnessEntry { +pub enum WitnessEntry { + /// An instruction. Instruction(Instruction), + /// A node. Node(NodeEntry), } @@ -182,15 +202,22 @@ impl Display for WitnessEntry { } } -// TODO: Ignore `NEW_TRIE` for now... +/// A type alias for a list of witness entries. #[derive(Clone, Debug, Eq, PartialEq)] -enum Instruction { +pub enum Instruction { + /// A leaf node. Leaf(Nibbles, RawValue), + /// An extension node. Extension(Nibbles), + /// A branch node. Branch(BranchMask), + /// A hash node. Hash(HashValue), + /// A code node. Code(RawCode), + /// An account leaf node. AccountLeaf(Nibbles, Nonce, Balance, HasCode, HasStorage), + /// An empty root node. EmptyRoot, } @@ -220,13 +247,20 @@ impl From for WitnessEntry { } } +/// A node witness entry. #[derive(Clone, Debug)] -pub(crate) enum NodeEntry { +pub enum NodeEntry { + /// A branch node. Branch([Option>; 16]), + /// A code node. Code(Vec), + /// An empty node. Empty, + /// A hash node. Hash(HashValue), + /// A leaf node. Leaf(Nibbles, LeafNodeData), + /// An extension node. Extension(Nibbles, Box), } @@ -243,8 +277,9 @@ impl Display for NodeEntry { } } +/// A value of a node data. #[derive(Clone, Debug)] -pub(super) struct ValueNodeData(pub(super) Vec); +pub struct ValueNodeData(pub(super) Vec); impl From> for ValueNodeData { fn from(v: Vec) -> Self { @@ -252,15 +287,21 @@ impl From> for ValueNodeData { } } +/// A leaf node data. #[derive(Clone, Debug)] -pub(super) enum LeafNodeData { +pub enum LeafNodeData { + /// A value node. Value(ValueNodeData), + /// An account node. Account(AccountNodeData), } +/// An account node code. #[derive(Clone, Debug)] -pub(super) enum AccountNodeCode { +pub enum AccountNodeCode { + /// A code node. CodeNode(Vec), + /// A hash node. HashNode(TrieRootHash), } @@ -276,12 +317,17 @@ impl From for AccountNodeCode { } } +/// An account node data. #[derive(Clone, Debug)] -pub(super) struct AccountNodeData { - pub(super) nonce: Nonce, - pub(super) balance: Balance, - pub(super) storage_trie: Option, - pub(super) account_node_code: Option, +pub struct AccountNodeData { + /// The nonce of the account. + pub nonce: Nonce, + /// The balance of the account. + pub balance: Balance, + /// The storage trie of the account. + pub storage_trie: Option, + /// The code of the account. + pub account_node_code: Option, } impl AccountNodeData { @@ -300,9 +346,11 @@ impl AccountNodeData { } } +/// A witness header. #[derive(Debug)] -pub(crate) struct Header { - version: u8, +pub struct Header { + /// The version of the witness. + pub version: u8, } impl Display for Header { @@ -426,7 +474,7 @@ impl ParserState { traverser, NodeEntry::Extension(k, Box::new(node.clone())), ), - _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), + _ => Self::invalid_witness_err(2, traverser), } } WitnessEntry::Instruction(Instruction::Code(c)) => { @@ -549,7 +597,7 @@ impl ParserState { WitnessEntry::Node(node) => { Self::try_create_and_insert_partial_trie_from_node(&node, None, 2, traverser) } - _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), + _ => Self::invalid_witness_err(2, traverser), } } @@ -566,7 +614,7 @@ impl ParserState { WitnessEntry::Node(NodeEntry::Hash(h)) => { Ok((2, Some(AccountNodeCode::HashNode(h)), None)) } - _ => Self::invalid_witness_err(2, TraverserDirection::Backwards, traverser), + _ => Self::invalid_witness_err(2, traverser), } } @@ -593,7 +641,7 @@ impl ParserState { traverser, ) } - _ => Self::invalid_witness_err(3, TraverserDirection::Backwards, traverser), + _ => Self::invalid_witness_err(3, traverser), } } @@ -608,7 +656,7 @@ impl ParserState { let s_trie_out = create_storage_partial_trie_from_compact_node(storage_root_node)?; Ok((n, account_node_code, Some(s_trie_out.trie))) } - None => Self::invalid_witness_err(n, TraverserDirection::Backwards, traverser), + None => Self::invalid_witness_err(n, traverser), } } @@ -621,24 +669,9 @@ impl ParserState { fn invalid_witness_err( n: usize, - t_dir: TraverserDirection, traverser: &mut CollapsableWitnessEntryTraverser, ) -> CompactParsingResult { - let adjacent_elems_buf = match t_dir { - TraverserDirection::Forwards => traverser.get_next_n_elems(n).cloned().collect(), - TraverserDirection::Backwards => traverser.get_prev_n_elems(n).cloned().collect(), - TraverserDirection::Both => { - let prev_elems = traverser.get_prev_n_elems(n); - let next_elems_including_curr = traverser.get_next_n_elems(n + 1); - let prev_elems_vec: Vec<_> = prev_elems.collect(); - - prev_elems_vec - .into_iter() - .chain(next_elems_including_curr) - .cloned() - .collect() - } - }; + let adjacent_elems_buf = traverser.get_prev_n_elems(n).cloned().collect(); Err(CompactParsingError::InvalidWitnessFormat( adjacent_elems_buf, @@ -684,10 +717,7 @@ impl WitnessBytes { Ok((header, self.instrs)) } - // TODO: Look at removing code duplication... - // TODO: Move behind a feature flag... - // TODO: Fairly hacky... - // TODO: Replace `unwrap()`s with `Result`s? + #[allow(dead_code)] fn process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure( self, ) -> (InstructionAndBytesParsedFromBuf, CompactParsingResult<()>) { @@ -697,6 +727,7 @@ impl WitnessBytes { (instr_and_bytes_buf.into(), res) } + #[allow(dead_code)] fn process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure_intern( mut self, instr_and_bytes_buf: &mut Vec<(Instruction, Vec)>, @@ -815,9 +846,6 @@ impl WitnessBytes { self.byte_cursor.read_t::("code size") })?; - // TODO: process actual storage trie probably? Wait until we know what is going - // on here. - self.push_entry(Instruction::AccountLeaf( key, nonce, @@ -1009,7 +1037,6 @@ impl CompactCursorFast { #[derive(Debug)] struct DebugCompactCursor(CompactCursorFast); -// TODO: There are some decent opportunities to reduce code duplication here... impl CompactCursor for DebugCompactCursor { fn new(bytes: Vec) -> Self { Self(CompactCursorFast::new(bytes)) @@ -1136,8 +1163,6 @@ struct CollapsableWitnessEntryTraverser<'a> { entry_cursor: CursorMut<'a, WitnessEntry>, } -// TODO: For now, lets just use pure values in the buffer, but we probably want -// to switch over to references later... impl<'a> CollapsableWitnessEntryTraverser<'a> { fn advance(&mut self) { self.entry_cursor.move_next(); @@ -1185,15 +1210,6 @@ impl<'a> CollapsableWitnessEntryTraverser<'a> { buf.extend(self.get_next_n_elems(n).cloned()); } - // Inclusive. - fn replace_next_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { - for _ in 0..n { - self.entry_cursor.remove_current(); - } - - self.entry_cursor.insert_after(entry) - } - // Inclusive. fn replace_prev_n_entries_with_single_entry(&mut self, n: usize, entry: WitnessEntry) { for _ in 0..n { @@ -1221,13 +1237,6 @@ const fn try_get_node_entry_from_witness_entry(entry: &WitnessEntry) -> Option<& } } -#[derive(Debug)] -enum TraverserDirection { - Forwards, - Backwards, - Both, -} - #[derive(Debug, Default)] pub(crate) struct PartialTriePreImages { pub state: HashedPartialTrie, @@ -1284,6 +1293,7 @@ fn process_compact_prestate_common( // TODO: Move behind a feature flag just used for debugging (but probably not // `debug`)... +#[allow(dead_code)] fn parse_just_to_instructions(bytes: Vec) -> CompactParsingResult> { let witness_bytes = WitnessBytes::::new(bytes); let (_, entries) = witness_bytes.process_into_instructions_and_header()?; @@ -1328,6 +1338,7 @@ impl Display for InstructionAndBytesParsedFromBuf { } // TODO: Also move behind a feature flag... +#[allow(dead_code)] fn parse_to_instructions_and_bytes_for_instruction( bytes: Vec, ) -> (InstructionAndBytesParsedFromBuf, CompactParsingResult<()>) { @@ -1336,7 +1347,6 @@ fn parse_to_instructions_and_bytes_for_instruction( .process_into_instructions_and_keep_bytes_parsed_to_instruction_and_bail_on_first_failure() } -// TODO: This could probably be made a bit faster... fn key_bytes_to_nibbles(bytes: &[u8]) -> Nibbles { let mut key = Nibbles::default(); @@ -1427,15 +1437,12 @@ fn get_bytes_from_cursor(cursor: &mut C, cursor_start_pos: u64 #[cfg(test)] mod tests { - use mpt_trie::{nibbles::Nibbles, partial_trie::PartialTrie}; + use mpt_trie::nibbles::Nibbles; use super::{key_bytes_to_nibbles, parse_just_to_instructions, Instruction}; - use crate::compact::{ - compact_prestate_processing::ParserState, - complex_test_payloads::{ - TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3, TEST_PAYLOAD_4, TEST_PAYLOAD_5, - TEST_PAYLOAD_6, - }, + use crate::compact::complex_test_payloads::{ + TEST_PAYLOAD_1, TEST_PAYLOAD_2, TEST_PAYLOAD_3, TEST_PAYLOAD_4, TEST_PAYLOAD_5, + TEST_PAYLOAD_6, }; const SIMPLE_PAYLOAD_STR: &str = "01004110443132333400411044313233340218300042035044313233350218180158200000000000000000000000000000000000000000000000000000000000000012"; @@ -1453,23 +1460,6 @@ mod tests { hex::decode(b_str).unwrap() } - // TODO: Refactor (or remove?) this test as it will crash when it tries to - // deserialize the trie leaves into `AccountRlp`... - #[test] - #[ignore] - fn simple_full() { - init(); - - let bytes = hex::decode(SIMPLE_PAYLOAD_STR).unwrap(); - let (header, parser) = ParserState::create_and_extract_header(bytes).unwrap(); - - assert_eq!(header.version, 1); - let _ = match parser.parse() { - Ok(trie) => trie, - Err(err) => panic!("{}", err), - }; - } - #[test] fn simple_instructions_are_parsed_correctly() { init(); diff --git a/trace_decoder/src/compact/compact_to_partial_trie.rs b/trace_decoder/src/compact/compact_to_partial_trie.rs index d7fac0644..e38834793 100644 --- a/trace_decoder/src/compact/compact_to_partial_trie.rs +++ b/trace_decoder/src/compact/compact_to_partial_trie.rs @@ -1,10 +1,7 @@ //! Logic to convert the decoded compact into a `mpt_trie` //! [`HashedPartialTrie`]. This is the final stage in the decoding process. -use std::{ - collections::HashMap, - fmt::{self, Display}, -}; +use std::collections::HashMap; use evm_arithmetization::generation::mpt::AccountRlp; use log::trace; @@ -19,11 +16,8 @@ use super::compact_prestate_processing::{ }; use crate::{ decoding::TrieType, - types::{ - CodeHash, HashedAccountAddr, HashedAccountAddrNibbles, TrieRootHash, EMPTY_CODE_HASH, - EMPTY_TRIE_HASH, - }, - utils::{h_addr_nibs_to_h256, hash}, + types::{CodeHash, HashedAccountAddr, HashedAccountAddrNibbles, TrieRootHash, EMPTY_CODE_HASH}, + utils::hash, }; /// A trait to represent building either a state or storage trie from compact @@ -64,7 +58,7 @@ trait CompactToPartialTrieExtractionOutput { /// Insert a hash node with our key that we constructed so far from /// traversing down the trie. fn process_hash(&mut self, curr_key: Nibbles, hash: TrieRootHash) -> CompactParsingResult<()> { - self.trie().insert(curr_key, hash); + self.trie().insert(curr_key, hash)?; Ok(()) } @@ -96,9 +90,12 @@ trait CompactToPartialTrieExtractionOutput { fn trie(&mut self) -> &mut HashedPartialTrie; } +/// An error that occurs when we encounter a node type that we did not expect. #[derive(Debug)] -pub(super) enum UnexpectedCompactNodeType { +pub enum UnexpectedCompactNodeType { + /// We expected a storage node, but got account leaf instead. AccountLeaf, + /// We expected a storage node, but got a code leaf instead. Code, } @@ -157,7 +154,7 @@ pub(super) struct StorageTrieExtractionOutput { } impl CompactToPartialTrieExtractionOutput for StorageTrieExtractionOutput { - fn process_code(&mut self, c_bytes: Vec) -> CompactParsingResult<()> { + fn process_code(&mut self, _: Vec) -> CompactParsingResult<()> { Err(CompactParsingError::UnexpectedNodeForTrieType( UnexpectedCompactNodeType::Code, TrieType::Storage, @@ -170,8 +167,8 @@ impl CompactToPartialTrieExtractionOutput for StorageTrieExtractionOutput { leaf_key: &Nibbles, leaf_node_data: &LeafNodeData, ) -> CompactParsingResult<()> { - /// If we encounter an `AccountLeaf` when processing a storage trie, - /// then something is wrong. + // If we encounter an `AccountLeaf` when processing a storage trie, + // then something is wrong. process_leaf_common( &mut self.trie, curr_key, @@ -191,7 +188,7 @@ impl CompactToPartialTrieExtractionOutput for StorageTrieExtractionOutput { } } -fn process_leaf_common CompactParsingResult>>( +fn process_leaf_common CompactParsingResult>( trie: &mut HashedPartialTrie, curr_key: Nibbles, leaf_key: &Nibbles, @@ -202,10 +199,12 @@ fn process_leaf_common CompactParsingRes let l_val = match leaf_node_data { LeafNodeData::Value(v_bytes) => rlp::encode(&v_bytes.0).to_vec(), - LeafNodeData::Account(acc_data) => account_leaf_proc_f(acc_data, &full_k)?, + LeafNodeData::Account(acc_data) => { + rlp::encode(&account_leaf_proc_f(acc_data, &full_k)?).to_vec() + } }; - trie.insert(full_k, l_val); + trie.insert(full_k, l_val)?; Ok(()) } @@ -235,8 +234,6 @@ where Ok(output) } -// TODO: Consider putting in some asserts that invalid nodes are not appearing -// in the wrong trie type (eg. account ) fn create_partial_trie_from_compact_node_rec( curr_key: Nibbles, curr_node: &NodeEntry, @@ -262,7 +259,7 @@ fn process_account_node( h_addr_nibs: &HashedAccountAddrNibbles, c_hash_to_code: &mut HashMap>, h_addr_to_storage_trie: &mut HashMap, -) -> Vec { +) -> AccountRlp { let code_hash = match &acc_data.account_node_code { Some(AccountNodeCode::CodeNode(c_bytes)) => { let c_hash = hash(c_bytes); @@ -281,13 +278,10 @@ fn process_account_node( h_addr_to_storage_trie.insert(h_addr, s_trie); - let account = AccountRlp { + AccountRlp { nonce: acc_data.nonce, balance: acc_data.balance, storage_root, code_hash, - }; - - // TODO: Avoid the unnecessary allocation... - rlp::encode(&account).into() + } } diff --git a/trace_decoder/src/compact/complex_test_payloads.rs b/trace_decoder/src/compact/complex_test_payloads.rs index 2f1f4bd32..785b452a2 100644 --- a/trace_decoder/src/compact/complex_test_payloads.rs +++ b/trace_decoder/src/compact/complex_test_payloads.rs @@ -4,7 +4,7 @@ use mpt_trie::partial_trie::PartialTrie; use super::{ compact_prestate_processing::{ process_compact_prestate, process_compact_prestate_debug, CompactParsingResult, - PartialTriePreImages, ProcessedCompactOutput, + ProcessedCompactOutput, }, compact_to_partial_trie::StateTrieExtractionOutput, }; @@ -40,6 +40,7 @@ pub(crate) struct TestProtocolInputAndRoot { } impl TestProtocolInputAndRoot { + #[allow(dead_code)] pub(crate) fn parse_and_check_hash_matches(self) { self.parse_and_check_hash_matches_common(process_compact_prestate); } diff --git a/trace_decoder/src/decoding.rs b/trace_decoder/src/decoding.rs index 97ed2c21c..6941bd4aa 100644 --- a/trace_decoder/src/decoding.rs +++ b/trace_decoder/src/decoding.rs @@ -14,19 +14,20 @@ use mpt_trie::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, Node, PartialTrie}, special_query::path_for_query, - trie_ops::TrieOpError, + trie_ops::{TrieOpError, TrieOpResult}, trie_subsets::{create_trie_subset, SubsetTrieError}, - utils::{IntoTrieKey, TriePath, TrieSegment}, + utils::{IntoTrieKey, TriePath}, }; use thiserror::Error; use crate::{ + compact::compact_prestate_processing::CompactParsingError, processed_block_trace::{ NodesUsedByTxn, ProcessedBlockTrace, ProcessedTxnInfo, StateTrieWrites, TxnMetaState, }, types::{ HashedAccountAddr, HashedNodeAddr, HashedStorageAddr, HashedStorageAddrNibbles, - OtherBlockData, TriePathIter, TrieRootHash, TxnIdx, EMPTY_ACCOUNT_BYTES_RLPED, + OtherBlockData, TrieRootHash, TxnIdx, EMPTY_ACCOUNT_BYTES_RLPED, ZERO_STORAGE_SLOT_VAL_RLPED, }, utils::{hash, optional_field, optional_field_hex, update_val_if_some}, @@ -165,6 +166,10 @@ pub enum TraceParsingErrorReason { /// Failure due to a trie operation error. #[error("Trie operation error: {0}")] TrieOpError(TrieOpError), + + /// Failure due to a compact parsing error. + #[error("Compact parsing error: {0}")] + CompactParsingError(CompactParsingError), } impl From for TraceParsingError { @@ -174,6 +179,13 @@ impl From for TraceParsingError { } } +impl From for TraceParsingError { + fn from(err: CompactParsingError) -> Self { + // Convert CompactParsingError into TraceParsingError + TraceParsingError::new(TraceParsingErrorReason::CompactParsingError(err)) + } +} + /// An enum to cover all Ethereum trie types (see https://ethereum.github.io/yellowpaper/paper.pdf for details). #[derive(Debug)] pub enum TrieType { @@ -294,13 +306,13 @@ impl ProcessedBlockTrace { trie_state: &mut PartialTrieState, meta: &TxnMetaState, txn_idx: TxnIdx, - ) { + ) -> TrieOpResult<()> { let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); - trie_state.txn.insert(txn_k, meta.txn_bytes()); + trie_state.txn.insert(txn_k, meta.txn_bytes())?; trie_state .receipt - .insert(txn_k, meta.receipt_node_bytes.as_ref()); + .insert(txn_k, meta.receipt_node_bytes.as_ref()) } /// If the account does not have a storage trie or does but is not @@ -342,7 +354,7 @@ impl ProcessedBlockTrace { )?; let txn_k = Nibbles::from_bytes_be(&rlp::encode(&txn_idx)).unwrap(); - // TODO: Replace cast once `mpt_trie` supports `into` for `usize... + let transactions_trie = create_trie_subset_wrapped(&curr_block_tries.txn, once(txn_k), TrieType::Txn)?; @@ -351,7 +363,6 @@ impl ProcessedBlockTrace { let storage_tries = create_minimal_storage_partial_tries( &curr_block_tries.storage, - &nodes_used_by_txn.state_accounts_with_no_accesses_but_storage_tries, nodes_used_by_txn.storage_accesses.iter(), &delta_application_out.additional_storage_trie_paths_to_not_hash, )?; @@ -367,20 +378,18 @@ impl ProcessedBlockTrace { fn apply_deltas_to_trie_state( trie_state: &mut PartialTrieState, deltas: &NodesUsedByTxn, - meta: &TxnMetaState, ) -> TraceParsingResult { let mut out = TrieDeltaApplicationOutput::default(); for (hashed_acc_addr, storage_writes) in deltas.storage_writes.iter() { - let mut storage_trie = - trie_state.storage.get_mut(hashed_acc_addr).ok_or_else(|| { - let hashed_acc_addr = *hashed_acc_addr; - let mut e = TraceParsingError::new( - TraceParsingErrorReason::MissingAccountStorageTrie(hashed_acc_addr), - ); - e.h_addr(hashed_acc_addr); - e - })?; + let storage_trie = trie_state.storage.get_mut(hashed_acc_addr).ok_or_else(|| { + let hashed_acc_addr = *hashed_acc_addr; + let mut e = TraceParsingError::new( + TraceParsingErrorReason::MissingAccountStorageTrie(hashed_acc_addr), + ); + e.h_addr(hashed_acc_addr); + e + })?; for (slot, val) in storage_writes .iter() @@ -401,6 +410,7 @@ impl ProcessedBlockTrace { storage_trie, &slot, ) + .map_err(TraceParsingError::from)? { out.additional_storage_trie_paths_to_not_hash .entry(*hashed_acc_addr) @@ -432,7 +442,8 @@ impl ProcessedBlockTrace { let updated_account_bytes = rlp::encode(&account); trie_state .state - .insert(val_k, updated_account_bytes.to_vec()); + .insert(val_k, updated_account_bytes.to_vec()) + .map_err(TraceParsingError::from)?; } // Remove any accounts that self-destructed. @@ -456,6 +467,7 @@ impl ProcessedBlockTrace { &mut trie_state.state, &k, ) + .map_err(TraceParsingError::from)? { out.additional_state_trie_paths_to_not_hash .push(remaining_account_key); @@ -475,12 +487,14 @@ impl ProcessedBlockTrace { fn delete_node_and_report_remaining_key_if_branch_collapsed( trie: &mut HashedPartialTrie, delete_k: &Nibbles, - ) -> Option { + ) -> TrieOpResult> { let old_trace = Self::get_trie_trace(trie, delete_k); - trie.delete(*delete_k); + trie.delete(*delete_k)?; let new_trace = Self::get_trie_trace(trie, delete_k); - Self::node_deletion_resulted_in_a_branch_collapse(&old_trace, &new_trace) + Ok(Self::node_deletion_resulted_in_a_branch_collapse( + &old_trace, &new_trace, + )) } /// Comparing the path of the deleted key before and after the deletion, @@ -605,7 +619,9 @@ impl ProcessedBlockTrace { acc_data.balance += amt; - state.insert(h_addr_nibs, rlp::encode(&acc_data).to_vec()); + state + .insert(h_addr_nibs, rlp::encode(&acc_data).to_vec()) + .map_err(TraceParsingError::from)?; } Ok(()) @@ -642,13 +658,11 @@ impl ProcessedBlockTrace { // do this clone every iteration. let tries_at_start_of_txn = curr_block_tries.clone(); - Self::update_txn_and_receipt_tries(curr_block_tries, &txn_info.meta, txn_idx); + Self::update_txn_and_receipt_tries(curr_block_tries, &txn_info.meta, txn_idx) + .map_err(TraceParsingError::from)?; - let delta_out = Self::apply_deltas_to_trie_state( - curr_block_tries, - &txn_info.nodes_used_by_txn, - &txn_info.meta, - )?; + let delta_out = + Self::apply_deltas_to_trie_state(curr_block_tries, &txn_info.nodes_used_by_txn)?; let tries = Self::create_minimal_partial_tries_needed_by_txn( &tries_at_start_of_txn, @@ -754,25 +768,6 @@ fn create_dummy_gen_input( create_dummy_gen_input_common(other_data, extra_data, sub_tries) } -fn create_dummy_gen_input_with_state_addrs_accessed( - other_data: &OtherBlockData, - extra_data: &ExtraBlockData, - final_tries: &PartialTrieState, - account_addrs_accessed: impl Iterator, -) -> TraceParsingResult { - let sub_tries = create_dummy_proof_trie_inputs( - final_tries, - create_minimal_state_partial_trie( - &final_tries.state, - account_addrs_accessed, - iter::empty(), - )?, - ); - Ok(create_dummy_gen_input_common( - other_data, extra_data, sub_tries, - )) -} - fn create_dummy_gen_input_common( other_data: &OtherBlockData, extra_data: &ExtraBlockData, @@ -855,7 +850,6 @@ fn create_minimal_state_partial_trie( // trie somewhere else! This is a big hack! fn create_minimal_storage_partial_tries<'a>( storage_tries: &HashMap, - state_accounts_with_no_accesses_but_storage_tries: &HashMap, accesses_per_account: impl Iterator)>, additional_storage_trie_paths_to_not_hash: &HashMap>, ) -> TraceParsingResult> { diff --git a/trace_decoder/src/lib.rs b/trace_decoder/src/lib.rs index 90eea0844..ff83fe4eb 100644 --- a/trace_decoder/src/lib.rs +++ b/trace_decoder/src/lib.rs @@ -115,9 +115,6 @@ #![deny(rustdoc::broken_intra_doc_links)] #![deny(missing_debug_implementations)] #![deny(missing_docs)] -// TODO: address these lints -#![allow(unused)] -#![allow(private_interfaces)] /// Provides debugging tools and a compact representation of state and storage /// tries, used in tests. @@ -133,6 +130,3 @@ pub mod trace_protocol; pub mod types; /// Defines useful functions necessary to the other modules. pub mod utils; - -use trace_protocol::{BlockTrace, TxnInfo}; -use types::OtherBlockData; diff --git a/trace_decoder/src/processed_block_trace.rs b/trace_decoder/src/processed_block_trace.rs index b0f952a99..9da370a4e 100644 --- a/trace_decoder/src/processed_block_trace.rs +++ b/trace_decoder/src/processed_block_trace.rs @@ -9,21 +9,21 @@ use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use crate::compact::compact_prestate_processing::{ - process_compact_prestate_debug, PartialTriePreImages, ProcessedCompactOutput, + process_compact_prestate_debug, CompactParsingError, CompactParsingResult, + PartialTriePreImages, ProcessedCompactOutput, }; -use crate::decoding::TraceParsingResult; +use crate::decoding::{TraceParsingError, TraceParsingResult}; use crate::trace_protocol::{ BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, TrieCompact, TrieUncompressed, TxnInfo, }; use crate::types::{ - CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, HashedStorageAddr, - HashedStorageAddrNibbles, OtherBlockData, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH, + CodeHash, CodeHashResolveFunc, HashedAccountAddr, HashedNodeAddr, HashedStorageAddrNibbles, + OtherBlockData, TrieRootHash, EMPTY_CODE_HASH, EMPTY_TRIE_HASH, }; use crate::utils::{ - h_addr_nibs_to_h256, hash, print_value_and_hash_nodes_of_storage_trie, - print_value_and_hash_nodes_of_trie, + hash, print_value_and_hash_nodes_of_storage_trie, print_value_and_hash_nodes_of_trie, }; #[derive(Debug)] @@ -47,7 +47,7 @@ impl BlockTrace { F: CodeHashResolveFunc, { let processed_block_trace = - self.into_processed_block_trace(p_meta, other_data.b_data.withdrawals.clone()); + self.into_processed_block_trace(p_meta, other_data.b_data.withdrawals.clone())?; processed_block_trace.into_txn_proof_gen_ir(other_data) } @@ -56,13 +56,13 @@ impl BlockTrace { self, p_meta: &ProcessingMeta, withdrawals: Vec<(Address, U256)>, - ) -> ProcessedBlockTrace + ) -> TraceParsingResult where F: CodeHashResolveFunc, { // The compact format is able to provide actual code, so if it does, we should // take advantage of it. - let pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images); + let pre_image_data = process_block_trace_trie_pre_images(self.trie_pre_images)?; print_value_and_hash_nodes_of_trie(&pre_image_data.tries.state); @@ -75,12 +75,8 @@ impl BlockTrace { .state .items() .filter_map(|(addr, data)| { - data.as_val().map(|data| { - ( - h_addr_nibs_to_h256(&addr), - rlp::decode::(data).unwrap(), - ) - }) + data.as_val() + .map(|data| (addr.into(), rlp::decode::(data).unwrap())) }) .collect(); @@ -115,11 +111,11 @@ impl BlockTrace { }) .collect::>(); - ProcessedBlockTrace { + Ok(ProcessedBlockTrace { tries: pre_image_data.tries, txn_info, withdrawals, - } + }) } } @@ -146,27 +142,31 @@ impl From for ProcessedBlockTracePreImages { fn process_block_trace_trie_pre_images( block_trace_pre_images: BlockTraceTriePreImages, -) -> ProcessedBlockTracePreImages { +) -> TraceParsingResult { match block_trace_pre_images { BlockTraceTriePreImages::Separate(t) => process_separate_trie_pre_images(t), BlockTraceTriePreImages::Combined(t) => process_combined_trie_pre_images(t), } } -fn process_combined_trie_pre_images(tries: CombinedPreImages) -> ProcessedBlockTracePreImages { - process_compact_trie(tries.compact) +fn process_combined_trie_pre_images( + tries: CombinedPreImages, +) -> TraceParsingResult { + Ok(process_compact_trie(tries.compact).map_err(TraceParsingError::from)?) } -fn process_separate_trie_pre_images(tries: SeparateTriePreImages) -> ProcessedBlockTracePreImages { +fn process_separate_trie_pre_images( + tries: SeparateTriePreImages, +) -> TraceParsingResult { let tries = PartialTriePreImages { state: process_state_trie(tries.state), storage: process_storage_tries(tries.storage), }; - ProcessedBlockTracePreImages { + Ok(ProcessedBlockTracePreImages { tries, extra_code_hash_mappings: None, - } + }) } fn process_state_trie(trie: SeparateTriePreImage) -> HashedPartialTrie { @@ -197,14 +197,17 @@ fn process_multiple_storage_tries( todo!() } -fn process_compact_trie(trie: TrieCompact) -> ProcessedBlockTracePreImages { - // TODO: Wrap in proper result type... - let out = process_compact_prestate_debug(trie).unwrap(); +fn process_compact_trie(trie: TrieCompact) -> CompactParsingResult { + let out = process_compact_prestate_debug(trie)?; - // TODO: Make this into a result... - assert!(out.header.version_is_compatible(COMPATIBLE_HEADER_VERSION)); + if !out.header.version_is_compatible(COMPATIBLE_HEADER_VERSION) { + return Err(CompactParsingError::IncompatibleVersion( + COMPATIBLE_HEADER_VERSION, + out.header.version, + )); + } - out.into() + Ok(out.into()) } /// Structure storing a function turning a `CodeHash` into bytes. diff --git a/trace_decoder/src/types.rs b/trace_decoder/src/types.rs index b1e2da5ca..3078ac22e 100644 --- a/trace_decoder/src/types.rs +++ b/trace_decoder/src/types.rs @@ -1,39 +1,33 @@ use ethereum_types::{Address, H256, U256}; -use evm_arithmetization::{ - generation::GenerationInputs, - proof::{BlockHashes, BlockMetadata}, -}; -use mpt_trie::{nibbles::Nibbles, partial_trie::HashedPartialTrie}; +use evm_arithmetization::proof::{BlockHashes, BlockMetadata}; +use mpt_trie::nibbles::Nibbles; use serde::{Deserialize, Serialize}; -// TODO: Make these types in the doc comments point to the actual types... -/// A type alias for `[U256; 8]` of a bloom filter. +/// A type alias for `[`[`U256`]`; 8]` of a bloom filter. pub type Bloom = [U256; 8]; -/// A type alias for `H256` of a code hash. +/// A type alias for [`H256`] of a code hash. pub type CodeHash = H256; -/// A type alias for `H256` of an account address's hash. +/// A type alias for [`H256`] of an account address's hash. pub type HashedAccountAddr = H256; -/// A type alias for `Nibbles` of an account address's hash. +/// A type alias for [`Nibbles`] of an account address's hash. pub type HashedAccountAddrNibbles = Nibbles; -/// A type alias for `H256` of a node address's hash. +/// A type alias for [`H256`] of a node address's hash. pub type HashedNodeAddr = H256; -/// A type alias for `H256` of a storage address's hash. +/// A type alias for [`H256`] of a storage address's hash. pub type HashedStorageAddr = H256; -/// A type alias for `Nibbles` of a hashed storage address's nibbles. +/// A type alias for [`Nibbles`] of a hashed storage address's nibbles. pub type HashedStorageAddrNibbles = Nibbles; -/// A type alias for `H256` of a storage address. +/// A type alias for [`H256`] of a storage address. pub type StorageAddr = H256; -/// A type alias for `H256` of a storage address's nibbles. +/// A type alias for [`H256`] of a storage address's nibbles. pub type StorageAddrNibbles = H256; -/// A type alias for `U256` of a storage value. +/// A type alias for [`U256`] of a storage value. pub type StorageVal = U256; -/// A type alias for `H256` of a trie root hash. +/// A type alias for [`H256`] of a trie root hash. pub type TrieRootHash = H256; -/// A type alias for `usize` of a transaction's index within a block. +/// A type alias for [`usize`] of a transaction's index within a block. pub type TxnIdx = usize; -pub(crate) type TriePathIter = mpt_trie::special_query::TriePathIter; - /// A function which turns a code hash into bytes. pub trait CodeHashResolveFunc = Fn(&CodeHash) -> Vec; diff --git a/trace_decoder/src/utils.rs b/trace_decoder/src/utils.rs index ee48e770f..7abfe00df 100644 --- a/trace_decoder/src/utils.rs +++ b/trace_decoder/src/utils.rs @@ -1,8 +1,7 @@ use ethereum_types::H256; use keccak_hash::keccak; -use log::debug; +use log::trace; use mpt_trie::{ - nibbles::Nibbles, partial_trie::{HashedPartialTrie, PartialTrie}, trie_ops::ValOrHash, }; @@ -22,7 +21,7 @@ pub(crate) fn update_val_if_some(target: &mut T, opt: Option) { // TODO: Move under a feature flag... pub(crate) fn print_value_and_hash_nodes_of_trie(trie: &HashedPartialTrie) { let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); - println!("State trie {:#?}", trie_elems); + trace!("State trie {:#?}", trie_elems); } // TODO: Move under a feature flag... @@ -31,7 +30,7 @@ pub(crate) fn print_value_and_hash_nodes_of_storage_trie( trie: &HashedPartialTrie, ) { let trie_elems = print_value_and_hash_nodes_of_trie_common(trie); - debug!("Storage trie for {:x}: {:#?}", s_trie_addr, trie_elems); + trace!("Storage trie for {:x}: {:#?}", s_trie_addr, trie_elems); } // TODO: Move under a feature flag... @@ -47,18 +46,6 @@ fn print_value_and_hash_nodes_of_trie_common(trie: &HashedPartialTrie) -> Vec H256 { - // TODO: HACK! This fix really needs to be in `mpt_trie`... - let mut nib_bytes = h_addr_nibs.bytes_be(); - if nib_bytes.len() < 32 { - for _ in nib_bytes.len()..32 { - nib_bytes.insert(0, 0); - } - } - - H256::from_slice(&nib_bytes) -} - pub(crate) fn optional_field(label: &str, value: Option) -> String { value.map_or(String::new(), |v| format!("{}: {:?}\n", label, v)) } From 24639a7562debf5c17f16067f9c330e2eb18c3ee Mon Sep 17 00:00:00 2001 From: Robin Salen Date: Thu, 13 Jun 2024 11:17:09 -0400 Subject: [PATCH 16/19] fix: Adapt Fibonacci benchmark to SMT format --- .../benches/fibonacci_25m_gas.rs | 82 ++++++++++++------- 1 file changed, 54 insertions(+), 28 deletions(-) diff --git a/evm_arithmetization/benches/fibonacci_25m_gas.rs b/evm_arithmetization/benches/fibonacci_25m_gas.rs index d27b7e5ce..02b9f8670 100644 --- a/evm_arithmetization/benches/fibonacci_25m_gas.rs +++ b/evm_arithmetization/benches/fibonacci_25m_gas.rs @@ -9,7 +9,7 @@ use std::str::FromStr; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV}; -use ethereum_types::{Address, H256, U256}; +use ethereum_types::{Address, BigEndianHash, H160, H256, U256}; use evm_arithmetization::cpu::kernel::aggregator::KERNEL; use evm_arithmetization::cpu::kernel::opcodes::{get_opcode, get_push_opcode}; use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; @@ -18,10 +18,14 @@ use evm_arithmetization::proof::{BlockHashes, BlockMetadata, TrieRoots}; use evm_arithmetization::prover::testing::simulate_execution; use evm_arithmetization::Node; use hex_literal::hex; -use keccak_hash::keccak; use mpt_trie::nibbles::Nibbles; use mpt_trie::partial_trie::{HashedPartialTrie, PartialTrie}; use plonky2::field::goldilocks_field::GoldilocksField; +use smt_trie::code::hash_bytecode_u256; +use smt_trie::db::{Db, MemoryDb}; +use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; +use smt_trie::smt::Smt; +use smt_trie::utils::hashout2u; type F = GoldilocksField; @@ -50,12 +54,6 @@ fn prepare_setup() -> anyhow::Result { let sender = hex!("8943545177806ED17B9F23F0a21ee5948eCaa776"); let to = hex!("159271B89fea49aF29DFaf8b4eCE7D042D5d6f07"); - let sender_state_key = keccak(sender); - let to_state_key = keccak(to); - - let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap(); - let to_nibbles = Nibbles::from_bytes_be(to_state_key.as_bytes()).unwrap(); - let push1 = get_push_opcode(1); let push4 = get_push_opcode(4); let add = get_opcode("ADD"); @@ -66,35 +64,39 @@ fn prepare_setup() -> anyhow::Result { let code = [ push1, 1, push1, 1, jumpdest, dup2, add, swap1, push4, 0, 0, 0, 4, jump, ]; - let code_hash = keccak(code); - - let empty_trie_root = HashedPartialTrie::from(Node::Empty).hash(); + let code_hash = hash_bytecode_u256(code.to_vec()); let sender_account_before = AccountRlp { nonce: 169.into(), balance: U256::from_dec_str("999999999998417410153631615")?, - storage_root: empty_trie_root, - code_hash: keccak(vec![]), + code_hash: hash_bytecode_u256(vec![]), + code_length: 0.into(), }; let to_account_before = AccountRlp { nonce: 1.into(), balance: 0.into(), - storage_root: empty_trie_root, code_hash, + code_length: code.len().into(), }; - let mut state_trie_before = HashedPartialTrie::from(Node::Empty); - state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec())?; - state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec())?; + let mut state_smt_before = Smt::::default(); + set_account( + &mut state_smt_before, + H160(sender), + &sender_account_before, + &HashMap::new(), + ); + set_account( + &mut state_smt_before, + H160(to), + &to_account_before, + &HashMap::new(), + ); let tries_before = TrieInputs { - state_trie: state_trie_before, + state_smt: state_smt_before.serialize(), transactions_trie: Node::Empty.into(), receipts_trie: Node::Empty.into(), - storage_tries: vec![ - (sender_state_key, Node::Empty.into()), - (to_state_key, Node::Empty.into()), - ], }; let gas_used = U256::from(0x17d7840_u32); @@ -118,7 +120,7 @@ fn prepare_setup() -> anyhow::Result { }; let mut contract_code = HashMap::new(); - contract_code.insert(keccak(vec![]), vec![]); + contract_code.insert(hash_bytecode_u256(vec![]), vec![]); contract_code.insert(code_hash, code.to_vec()); let sender_account_after = AccountRlp { @@ -128,10 +130,19 @@ fn prepare_setup() -> anyhow::Result { }; let to_account_after = to_account_before; - let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty); - expected_state_trie_after - .insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec())?; - expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec())?; + let mut expected_state_smt_after = Smt::::default(); + set_account( + &mut expected_state_smt_after, + H160(sender), + &sender_account_after, + &HashMap::new(), + ); + set_account( + &mut expected_state_smt_after, + H160(to), + &to_account_after, + &HashMap::new(), + ); let receipt_0 = LegacyReceiptRlp { status: false, @@ -151,7 +162,7 @@ fn prepare_setup() -> anyhow::Result { .into(); let trie_roots_after = TrieRoots { - state_root: expected_state_trie_after.hash(), + state_root: H256::from_uint(&hashout2u(expected_state_smt_after.root)), transactions_root: transactions_trie.hash(), receipts_root: receipts_trie.hash(), }; @@ -176,6 +187,21 @@ fn prepare_setup() -> anyhow::Result { }) } +fn set_account( + smt: &mut Smt, + addr: Address, + account: &AccountRlp, + storage: &HashMap, +) { + smt.set(key_balance(addr), account.balance); + smt.set(key_nonce(addr), account.nonce); + smt.set(key_code(addr), account.code_hash); + smt.set(key_code_length(addr), account.code_length); + for (&k, &v) in storage { + smt.set(key_storage(addr, k), v); + } +} + fn init_logger() { let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info")); } From 0800e7a064252df95bcb0e5002ac4f0a40d8b402 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Tue, 16 Jul 2024 07:56:03 +0900 Subject: [PATCH 17/19] feat(type2): Implement bytecode padding rule in the kernel (#386) * Implement code padding for Poseidon * Check empty code * Apply comments --- .../src/cpu/kernel/asm/account_code.asm | 63 +++++++++++++++++++ .../src/cpu/kernel/asm/core/create.asm | 5 +- .../src/cpu/kernel/asm/core/process_txn.asm | 5 +- .../src/generation/prover_input.rs | 17 ----- 4 files changed, 67 insertions(+), 23 deletions(-) diff --git a/evm_arithmetization/src/cpu/kernel/asm/account_code.asm b/evm_arithmetization/src/cpu/kernel/asm/account_code.asm index 6dac4c32a..bc8abc5f8 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/account_code.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/account_code.asm @@ -139,3 +139,66 @@ remove_padding_loop: remove_padding_after: %stack (offset, ctx, retdest) -> (retdest, offset) JUMP + +// Convenience macro to call poseidon_hash_code_unpadded and return where we left off. +%macro poseidon_hash_code_unpadded + %stack (addr, len) -> (addr, len, %%after) + %jump(poseidon_hash_code_unpadded) +%%after: +%endmacro + +/// Applies the padding rule to the code located at the provided address before hashing it. +/// Memory cells after the last code byte will be overwritten. +global poseidon_hash_code_unpadded: + // stack: addr, len, retdest + DUP2 ISZERO %jumpi(poseidon_empty_code) + DUP2 DUP2 ADD + // stack: padding_addr, addr, len, retdest + + // write 1 after the last code byte + DUP1 PUSH 1 MSTORE_GENERAL + // stack: padding_addr, addr, len, retdest + %increment + // stack: padding_addr, addr, len, retdest + + // Pad with 0s until the length is a multiple of 56 + PUSH 56 + DUP4 %increment + // stack: curr_len, 56, padding_addr, addr, len, retdest + PUSH 56 SUB + // stack: 56 - curr_len, 56, padding_addr, addr, len, retdest + MOD + // stack: padding_len, padding_addr, addr, len, retdest + SWAP3 DUP4 + // stack: padding_len, len, padding_addr, addr, padding_len, retdest + ADD + // stack: last_byte_offset, padding_addr, addr, padding_len, retdest + %stack (last_byte_offset, padding_addr, addr, padding_len) + -> (padding_addr, padding_len, after_padding, addr, last_byte_offset) + %jump(memset) +after_padding: + // stack: addr, last_byte_offset, retdest + + // Xor the last element with 0x80 + PUSH 1 DUP3 ADD + // stack: total_code_len, addr, last_byte_offset, retdest + SWAP2 + // stack: last_byte_offset, addr, total_code_len, retdest + DUP2 ADD + // stack: last_byte_addr, addr, total_code_len, retdest + DUP1 MLOAD_GENERAL + // stack: last_byte, last_byte_addr, addr, total_code_len, retdest + PUSH 0x80 ADD + // stack: last_byte_updated, last_byte_addr, addr, total_code_len, retdest + MSTORE_GENERAL + // stack: addr, total_code_len, retdest + + POSEIDON_GENERAL + // stack: codehash, retdest + SWAP1 + JUMP + +global poseidon_empty_code: + // stack: addr, len, retdest + %stack (addr, len, retdest) -> (retdest, @EMPTY_STRING_POSEIDON_HASH) + JUMP diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/create.asm b/evm_arithmetization/src/cpu/kernel/asm/core/create.asm index 07b2af6d9..99644f2fa 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/create.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/create.asm @@ -168,9 +168,8 @@ after_constructor: // Store the code hash of the new contract. %returndatasize PUSH @SEGMENT_RETURNDATA GET_CONTEXT %build_address_no_offset - // stack: addr, len - PROVER_INPUT(poseidon_code) // TODO: FIX THIS! - %stack (codehash, addr, len) -> (codehash) + // stack: addr, len, leftover_gas, success, address, kexit_info + %poseidon_hash_code_unpadded // stack: codehash, leftover_gas, success, address, kexit_info %observe_new_contract DUP4 diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm b/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm index d1a898264..242a266f8 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm @@ -203,9 +203,8 @@ global process_contract_creation_txn_after_constructor: PUSH @SEGMENT_RETURNDATA GET_CONTEXT %build_address_no_offset - // stack: addr, len - PROVER_INPUT(poseidon_code) // TODO: FIX THIS! - %stack (codehash, addr, len) -> (codehash) + // stack: addr, len, leftover_gas, new_ctx, address, retdest, success + %poseidon_hash_code_unpadded // stack: codehash, leftover_gas, new_ctx, address, retdest, success %observe_new_contract DUP4 diff --git a/evm_arithmetization/src/generation/prover_input.rs b/evm_arithmetization/src/generation/prover_input.rs index 5b664ceab..408222945 100644 --- a/evm_arithmetization/src/generation/prover_input.rs +++ b/evm_arithmetization/src/generation/prover_input.rs @@ -9,7 +9,6 @@ use itertools::Itertools; use num_bigint::BigUint; use plonky2::hash::hash_types::RichField; use serde::{Deserialize, Serialize}; -use smt_trie::code::hash_bytecode_u256; use crate::cpu::kernel::cancun_constants::KZG_VERSIONED_HASH; use crate::cpu::kernel::constants::cancun_constants::{ @@ -64,7 +63,6 @@ impl GenerationState { "num_bits" => self.run_num_bits(), "jumpdest_table" => self.run_jumpdest_table(input_fn), "access_lists" => self.run_access_lists(input_fn), - "poseidon_code" => self.run_poseidon_code(), "ger" => self.run_global_exit_roots(), "kzg_point_eval" => self.run_kzg_point_eval(), "kzg_point_eval_2" => self.run_kzg_point_eval_2(), @@ -415,21 +413,6 @@ impl GenerationState { Ok((Segment::AccessedStorageKeys as usize).into()) } - fn run_poseidon_code(&mut self) -> Result { - let addr = stack_peek(self, 0)?; - let len = stack_peek(self, 1)?.as_usize(); - let addr = MemoryAddress::new_bundle(addr)?; - let code = (0..len) - .map(|i| { - let mut a = addr; - a.virt += i; - self.memory.get_with_init(a).as_usize() as u8 - }) - .collect_vec(); - - Ok(hash_bytecode_u256(code)) - } - /// Returns the first part of the KZG precompile output. fn run_kzg_point_eval(&mut self) -> Result { let versioned_hash = stack_peek(self, 0)?; From a619246a19cf740952835a360d8b3ca6f0daad28 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Tue, 23 Jul 2024 17:23:13 -0400 Subject: [PATCH 18/19] feat: add conditional feature support in the kernel assembly (#416) * feat: add conditional feature support in the kernel * Clippy * Never enough clippy * Wording * Update doc --- .../src/cpu/kernel/aggregator.rs | 4 +- .../src/cpu/kernel/assembler.rs | 59 +++-- evm_arithmetization/src/cpu/kernel/ast.rs | 2 + .../src/cpu/kernel/evm_asm.pest | 4 +- evm_arithmetization/src/cpu/kernel/mod.rs | 4 +- evm_arithmetization/src/cpu/kernel/parser.rs | 218 +++++++++++++++++- 6 files changed, 262 insertions(+), 29 deletions(-) diff --git a/evm_arithmetization/src/cpu/kernel/aggregator.rs b/evm_arithmetization/src/cpu/kernel/aggregator.rs index 60672c59b..6c742bd4f 100644 --- a/evm_arithmetization/src/cpu/kernel/aggregator.rs +++ b/evm_arithmetization/src/cpu/kernel/aggregator.rs @@ -1,5 +1,7 @@ //! Loads each kernel assembly file and concatenates them. +use std::collections::HashSet; + use itertools::Itertools; use once_cell::sync::Lazy; @@ -179,7 +181,7 @@ pub static KERNEL_FILES: [&str; NUMBER_KERNEL_FILES] = [ pub static KERNEL: Lazy = Lazy::new(combined_kernel); pub(crate) fn combined_kernel_from_files(files: [&str; N]) -> Kernel { - let parsed_files = files.iter().map(|f| parse(f)).collect_vec(); + let parsed_files = files.iter().map(|f| parse(f, HashSet::new())).collect_vec(); assemble(parsed_files, evm_constants(), true) } diff --git a/evm_arithmetization/src/cpu/kernel/assembler.rs b/evm_arithmetization/src/cpu/kernel/assembler.rs index 7dfba09fc..c055e7a84 100644 --- a/evm_arithmetization/src/cpu/kernel/assembler.rs +++ b/evm_arithmetization/src/cpu/kernel/assembler.rs @@ -129,6 +129,7 @@ pub(crate) fn assemble( for file in files { let start = Instant::now(); let mut file = file.body; + file = expand_conditional_blocks(file); file = expand_macros(file, ¯os, &mut macro_counter); file = inline_constants(file, &constants); file = expand_stack_manipulation(file); @@ -159,22 +160,44 @@ pub(crate) fn assemble( fn find_macros(files: &[File]) -> HashMap { let mut macros = HashMap::new(); for file in files { - for item in &file.body { - if let Item::MacroDef(name, params, items) = item { - let signature = MacroSignature { - name: name.clone(), - num_params: params.len(), - }; - let macro_ = Macro { - params: params.clone(), - items: items.clone(), - }; - let old = macros.insert(signature.clone(), macro_); - assert!(old.is_none(), "Duplicate macro signature: {signature:?}"); + find_macros_internal(&file.body, &mut macros); + } + macros +} + +fn find_macros_internal(items: &[Item], macros: &mut HashMap) { + for item in items { + if let Item::ConditionalBlock(_, local_items) = item { + find_macros_internal(local_items, macros); + } + if let Item::MacroDef(name, params, local_items) = item { + let signature = MacroSignature { + name: name.clone(), + num_params: params.len(), + }; + let macro_ = Macro { + params: params.clone(), + items: local_items.clone(), + }; + let old = macros.insert(signature.clone(), macro_); + assert!(old.is_none(), "Duplicate macro signature: {signature:?}"); + } + } +} + +fn expand_conditional_blocks(body: Vec) -> Vec { + let mut expanded = vec![]; + for item in body { + match item { + Item::ConditionalBlock(_, items) => { + expanded.extend(items); + } + _ => { + expanded.push(item); } } } - macros + expanded } fn expand_macros( @@ -325,7 +348,8 @@ fn find_labels( let mut local_labels = HashMap::::new(); for item in body { match item { - Item::MacroDef(_, _, _) + Item::ConditionalBlock(_, _) + | Item::MacroDef(_, _, _) | Item::MacroCall(_, _) | Item::Repeat(_, _) | Item::StackManipulation(_, _) @@ -379,7 +403,8 @@ fn assemble_file( // Assemble the file. for item in body { match item { - Item::MacroDef(_, _, _) + Item::ConditionalBlock(_, _) + | Item::MacroDef(_, _, _) | Item::MacroCall(_, _) | Item::Repeat(_, _) | Item::StackManipulation(_, _) @@ -437,6 +462,8 @@ fn push_target_size(target: &PushTarget) -> u8 { #[cfg(test)] mod tests { + use std::collections::HashSet; + use super::*; use crate::cpu::kernel::parser::parse; @@ -734,7 +761,7 @@ mod tests { constants: HashMap, optimize: bool, ) -> Kernel { - let parsed_files = files.iter().map(|f| parse(f)).collect_vec(); + let parsed_files = files.iter().map(|f| parse(f, HashSet::new())).collect_vec(); assemble(parsed_files, constants, optimize) } } diff --git a/evm_arithmetization/src/cpu/kernel/ast.rs b/evm_arithmetization/src/cpu/kernel/ast.rs index 0af3bdabe..fa38166ba 100644 --- a/evm_arithmetization/src/cpu/kernel/ast.rs +++ b/evm_arithmetization/src/cpu/kernel/ast.rs @@ -9,6 +9,8 @@ pub(crate) struct File { #[derive(Eq, PartialEq, Clone, Debug)] pub(crate) enum Item { + /// Defines a conditional, feature-gated, block of items. + ConditionalBlock(String, Vec), /// Defines a new macro: name, params, body. MacroDef(String, Vec, Vec), /// Calls a macro: name, args. diff --git a/evm_arithmetization/src/cpu/kernel/evm_asm.pest b/evm_arithmetization/src/cpu/kernel/evm_asm.pest index 40dec03b3..7c2d645ae 100644 --- a/evm_arithmetization/src/cpu/kernel/evm_asm.pest +++ b/evm_arithmetization/src/cpu/kernel/evm_asm.pest @@ -15,7 +15,7 @@ literal = { literal_hex | literal_decimal } variable = ${ "$" ~ identifier } constant = ${ "@" ~ identifier } -item = { macro_def | macro_call | repeat | stack | global_label_decl | local_label_decl | macro_label_decl | bytes_item | jumptable_item | push_instruction | prover_input_instruction | nullary_instruction } +item = { conditional_block | macro_def | macro_call | repeat | stack | global_label_decl | local_label_decl | macro_label_decl | bytes_item | jumptable_item | push_instruction | prover_input_instruction | nullary_instruction } macro_def = { ^"%macro" ~ identifier ~ paramlist? ~ item* ~ ^"%endmacro" } macro_call = ${ "%" ~ !((^"macro" | ^"endmacro" | ^"rep" | ^"endrep" | ^"stack") ~ !identifier_char) ~ identifier ~ macro_arglist? } repeat = { ^"%rep" ~ literal ~ item* ~ ^"%endrep" } @@ -43,5 +43,7 @@ prover_input_instruction = { ^"PROVER_INPUT" ~ "(" ~ prover_input_fn ~ ")" } prover_input_fn = { identifier ~ ("::" ~ identifier)*} nullary_instruction = { identifier } +conditional_block = { ^"#" ~ "[" ~ "cfg" ~ "(" ~ "feature" ~ "=" ~ identifier ~ ")" ~ "]" ~ "{" ~ item* ~ ^"}"} + file = { SOI ~ item* ~ silent_eoi } silent_eoi = _{ !ANY } diff --git a/evm_arithmetization/src/cpu/kernel/mod.rs b/evm_arithmetization/src/cpu/kernel/mod.rs index 8c9d1bf88..7b8a58566 100644 --- a/evm_arithmetization/src/cpu/kernel/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/mod.rs @@ -12,6 +12,8 @@ mod utils; pub(crate) mod interpreter; +use std::collections::HashSet; + pub use constants::cancun_constants; pub use constants::global_exit_root; @@ -26,7 +28,7 @@ use crate::cpu::kernel::constants::evm_constants; /// Assemble files, outputting bytes. /// This is for debugging the kernel only. pub fn assemble_to_bytes(files: &[String]) -> Vec { - let parsed_files: Vec<_> = files.iter().map(|f| parse(f)).collect(); + let parsed_files: Vec<_> = files.iter().map(|f| parse(f, HashSet::new())).collect(); let kernel = assemble(parsed_files, evm_constants(), true); kernel.code } diff --git a/evm_arithmetization/src/cpu/kernel/parser.rs b/evm_arithmetization/src/cpu/kernel/parser.rs index 7864acfe0..5e880964e 100644 --- a/evm_arithmetization/src/cpu/kernel/parser.rs +++ b/evm_arithmetization/src/cpu/kernel/parser.rs @@ -1,4 +1,4 @@ -use std::str::FromStr; +use std::{collections::HashSet, str::FromStr}; use ethereum_types::U256; use pest::iterators::Pair; @@ -12,22 +12,26 @@ use crate::cpu::kernel::ast::{File, Item, PushTarget, StackReplacement}; #[grammar = "cpu/kernel/evm_asm.pest"] struct AsmParser; -pub(crate) fn parse(s: &str) -> File { +pub(crate) fn parse(s: &str, active_features: HashSet<&str>) -> File { let file = AsmParser::parse(Rule::file, s) .expect("Parsing failed") .next() .unwrap(); - let body = file.into_inner().map(parse_item).collect(); + let body = file + .into_inner() + .map(|i| parse_item(i, &active_features)) + .collect(); File { body } } -fn parse_item(item: Pair) -> Item { +fn parse_item(item: Pair, active_features: &HashSet<&str>) -> Item { assert_eq!(item.as_rule(), Rule::item); let item = item.into_inner().next().unwrap(); match item.as_rule() { - Rule::macro_def => parse_macro_def(item), + Rule::conditional_block => parse_conditional_block(item, active_features), + Rule::macro_def => parse_macro_def(item, active_features), Rule::macro_call => parse_macro_call(item), - Rule::repeat => parse_repeat(item), + Rule::repeat => parse_repeat(item, active_features), Rule::stack => parse_stack(item), Rule::global_label_decl => { Item::GlobalLabelDeclaration(item.into_inner().next().unwrap().as_str().into()) @@ -57,7 +61,23 @@ fn parse_item(item: Pair) -> Item { } } -fn parse_macro_def(item: Pair) -> Item { +fn parse_conditional_block(item: Pair, active_features: &HashSet<&str>) -> Item { + assert_eq!(item.as_rule(), Rule::conditional_block); + let mut inner = item.into_inner().peekable(); + + let name = inner.next().unwrap().as_str(); + + if active_features.contains(&name) { + Item::ConditionalBlock( + name.into(), + inner.map(|i| parse_item(i, active_features)).collect(), + ) + } else { + Item::ConditionalBlock(name.into(), vec![]) + } +} + +fn parse_macro_def(item: Pair, active_features: &HashSet<&str>) -> Item { assert_eq!(item.as_rule(), Rule::macro_def); let mut inner = item.into_inner().peekable(); @@ -71,7 +91,11 @@ fn parse_macro_def(item: Pair) -> Item { vec![] }; - Item::MacroDef(name, params, inner.map(parse_item).collect()) + Item::MacroDef( + name, + params, + inner.map(|i| parse_item(i, active_features)).collect(), + ) } fn parse_macro_call(item: Pair) -> Item { @@ -91,11 +115,14 @@ fn parse_macro_call(item: Pair) -> Item { Item::MacroCall(name, args) } -fn parse_repeat(item: Pair) -> Item { +fn parse_repeat(item: Pair, active_features: &HashSet<&str>) -> Item { assert_eq!(item.as_rule(), Rule::repeat); let mut inner = item.into_inner(); let count = parse_literal_u256(inner.next().unwrap()); - Item::Repeat(count, inner.map(parse_item).collect()) + Item::Repeat( + count, + inner.map(|i| parse_item(i, active_features)).collect(), + ) } fn parse_stack(item: Pair) -> Item { @@ -208,3 +235,174 @@ fn parse_hex(hex: Pair) -> String { debug_assert!(prefix == "0x" || prefix == "0X"); hex.as_str()[2..].to_string() } + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + use crate::cpu::kernel::assembler::assemble; + + #[test] + fn test_feature() { + let code = r#" + #[cfg(feature = feature_1)] + { + %macro bar + PUSH 2 + MUL + %endmacro + } + + global foo_1: + PUSH 1 + PUSH 2 + + #[cfg(feature = feature_1)] + { + %bar + PUSH 1 + } + PUSH 3 + PUSH 4 + ADD + + global foo_3: + PUSH 5 + PUSH 6 + DIV + + #[cfg(feature = feature_2)] + { + global foo_4: + PUSH 7 + PUSH 8 + MOD + } + "#; + + // Test `feature_1`. + let active_features = HashSet::from(["feature_1"]); + + let parsed_code = parse(code, active_features); + let final_code = assemble(vec![parsed_code], HashMap::new(), false); + + let expected_code = r#" + %macro bar + PUSH 2 + MUL + %endmacro + + global foo_1: + PUSH 1 + PUSH 2 + %bar + PUSH 1 + PUSH 3 + PUSH 4 + ADD + + global foo_3: + PUSH 5 + PUSH 6 + DIV + "#; + + let parsed_expected = parse(expected_code, HashSet::new()); + let final_expected = assemble(vec![parsed_expected], HashMap::new(), false); + + assert_eq!(final_code.code, final_expected.code); + + // Test `feature_2`. + let active_features = HashSet::from(["feature_2"]); + + let parsed_code = parse(code, active_features); + let final_code = assemble(vec![parsed_code], HashMap::new(), false); + + let expected_code = r#" + global foo_1: + PUSH 1 + PUSH 2 + PUSH 3 + PUSH 4 + ADD + + global foo_3: + PUSH 5 + PUSH 6 + DIV + + global foo_4: + PUSH 7 + PUSH 8 + MOD + "#; + + let parsed_expected = parse(expected_code, HashSet::new()); + let final_expected = assemble(vec![parsed_expected], HashMap::new(), false); + + assert_eq!(final_code.code, final_expected.code); + + // Test with both features enabled. + let active_features = HashSet::from(["feature_1", "feature_2"]); + + let parsed_code = parse(code, active_features); + let final_code = assemble(vec![parsed_code], HashMap::new(), false); + + let expected_code = r#" + %macro bar + PUSH 2 + MUL + %endmacro + + global foo_1: + PUSH 1 + PUSH 2 + %bar + PUSH 1 + PUSH 3 + PUSH 4 + ADD + + global foo_3: + PUSH 5 + PUSH 6 + DIV + + global foo_4: + PUSH 7 + PUSH 8 + MOD + "#; + + let parsed_expected = parse(expected_code, HashSet::new()); + let final_expected = assemble(vec![parsed_expected], HashMap::new(), false); + + assert_eq!(final_code.code, final_expected.code); + + // Test with all features disabled. + let active_features = HashSet::new(); + + let parsed_code = parse(code, active_features); + let final_code = assemble(vec![parsed_code], HashMap::new(), false); + + let expected_code = r#" + global foo_1: + PUSH 1 + PUSH 2 + PUSH 3 + PUSH 4 + ADD + + global foo_3: + PUSH 5 + PUSH 6 + DIV + "#; + + let parsed_expected = parse(expected_code, HashSet::new()); + let final_expected = assemble(vec![parsed_expected], HashMap::new(), false); + + assert_eq!(final_code.code, final_expected.code); + } +} From 615b77774bac7c503795422e7b66af044ce51fed Mon Sep 17 00:00:00 2001 From: Linda Guiga <101227802+LindaGuiga@users.noreply.github.com> Date: Wed, 14 Aug 2024 17:50:24 +0100 Subject: [PATCH 19/19] Allow features with macros within macros (#492) --- evm_arithmetization/src/cpu/kernel/assembler.rs | 3 +++ evm_arithmetization/src/cpu/kernel/parser.rs | 14 +++++++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/evm_arithmetization/src/cpu/kernel/assembler.rs b/evm_arithmetization/src/cpu/kernel/assembler.rs index c055e7a84..1bf97e604 100644 --- a/evm_arithmetization/src/cpu/kernel/assembler.rs +++ b/evm_arithmetization/src/cpu/kernel/assembler.rs @@ -219,6 +219,9 @@ fn expand_macros( expanded.extend(expand_macros(body.clone(), macros, macro_counter)); } } + Item::ConditionalBlock(_, items) => { + expanded.extend(expand_macros(items.clone(), macros, macro_counter)); + } item => { expanded.push(item); } diff --git a/evm_arithmetization/src/cpu/kernel/parser.rs b/evm_arithmetization/src/cpu/kernel/parser.rs index 5e880964e..c661dabd2 100644 --- a/evm_arithmetization/src/cpu/kernel/parser.rs +++ b/evm_arithmetization/src/cpu/kernel/parser.rs @@ -246,6 +246,14 @@ mod tests { #[test] fn test_feature() { let code = r#" + %macro bar_foo + #[cfg(feature = feature_1)] + { + %bar + PUSH 3 + ADD + } + %endmacro #[cfg(feature = feature_1)] { %macro bar @@ -260,7 +268,7 @@ mod tests { #[cfg(feature = feature_1)] { - %bar + %bar_foo PUSH 1 } PUSH 3 @@ -291,6 +299,8 @@ mod tests { %macro bar PUSH 2 MUL + PUSH 3 + ADD %endmacro global foo_1: @@ -353,6 +363,8 @@ mod tests { %macro bar PUSH 2 MUL + PUSH 3 + ADD %endmacro global foo_1: