diff --git a/.gitignore b/.gitignore index df92c500..3148ca6a 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,4 @@ main_blob.pil.json tools/full-tracer-tests/ft-traces/ batch-l2-data.json src/sm/sm_main/logs-ft-* +MyLogFile.log diff --git a/package-lock.json b/package-lock.json index 5bbd3a9f..4574559a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,9 +9,26 @@ "version": "7.0.0", "license": "UNLICENSED", "dependencies": { +<<<<<<< HEAD +<<<<<<< HEAD "@0xpolygonhermez/zkasmcom": "github:0xPolygonHermez/zkasmcom#develop-feijoa", "@0xpolygonhermez/zkevm-commonjs": "github:0xpolygonhermez/zkevm-commonjs#develop-feijoa", "@0xpolygonhermez/zkevm-rom": "github:0xPolygonHermez/zkevm-rom#develop-feijoa", +======= +<<<<<<< HEAD + "@0xpolygonhermez/zkasmcom": "github:0xPolygonHermez/zkasmcom#fix/mem-by-addr", +======= +======= +>>>>>>> ab09ca2 (Adding tests for all circom templates) + "@0xpolygonhermez/zkasmcom": "github:0xPolygonHermez/zkasmcom#develop-feijoa", + "@0xpolygonhermez/zkevm-blob-rom": "github:0xPolygonHermez/zkevm-blob-rom#develop-feijoa", + "@0xpolygonhermez/zkevm-commonjs": "github:0xpolygonhermez/zkevm-commonjs#v5.0.0-fork.8", +<<<<<<< HEAD + "@0xpolygonhermez/zkevm-rom": "github:0xPolygonHermez/zkevm-rom#feature/feijoa-batch-tree", +>>>>>>> 36feb2b (Integrating feijoa setup) +======= + "@0xpolygonhermez/zkevm-rom": "github:0xPolygonHermez/zkevm-rom#develop-feijoa", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "@0xpolygonhermez/zkevm-storage-rom": "https://github.com/0xPolygonHermez/zkevm-storage-rom.git#v4.0.0-fork.7", "@grpc/grpc-js": "^1.8.14", "chalk": "^3.0.0", @@ -19,12 +36,12 @@ "circomlibjs": "^0.1.1", "docker-compose": "^0.23.19", "ejs": "^3.1.6", - "ethers": "^5.4.7", + "ethers": "^6.8.1", "ffjavascript": "0.2.63", "fs": "^0.0.1-security", "json-bigint": "^1.0.0", "lodash": "^4.17.21", - "pil-stark": "0.0.57", + "pil-stark": "https://github.com/0xPolygonHermez/pil-stark.git#feature/new_parser", "pilcom": "0.0.24", "snarkjs": "0.7.0", "yargs": "^17.4.0" @@ -37,13 +54,28 @@ "eslint": "^8.34.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-plugin-mocha": "^10.1.0", - "mocha": "^9.1.3" + "mocha": "^9.1.3", + "temporary": "^1.1.0" } }, "node_modules/@0xpolygonhermez/zkasmcom": { "version": "2.0.0", +<<<<<<< HEAD +<<<<<<< HEAD "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkasmcom.git#414049fd74637fc0d1f22b58a3b7f19c3e37d9a3", "license": "UNLICENSED", +======= +<<<<<<< HEAD + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkasmcom.git#488b05a6ff988ef7d5df11b1a642f07d392cc101", +======= + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkasmcom.git#aeec9e1c03d4fc0e0d674e8a34a6eb9d3147366e", + "license": "UNLICENSED", +>>>>>>> df8cba2 (Integrating feijoa setup) +>>>>>>> 36feb2b (Integrating feijoa setup) +======= + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkasmcom.git#39e05159c95c68a2473737d2481e5dbe6787e890", + "license": "UNLICENSED", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "dependencies": { "chai": "^4.3.10", "ffjavascript": "^0.2.46", @@ -178,9 +210,26 @@ "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==" }, + "node_modules/@0xpolygonhermez/zkevm-blob-rom": { + "version": "1.0.0", + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-blob-rom.git#13c938b91d027a2daa4054ac7008f1c48739a321", + "license": "AGPL", + "dependencies": { + "@0xpolygonhermez/zkasmcom": "https://github.com/0xPolygonHermez/zkasmcom.git#develop-feijoa", + "yargs": "^17.5.1" + }, + "engines": { + "node": "=20", + "npm": "=10" + } + }, "node_modules/@0xpolygonhermez/zkevm-commonjs": { "version": "5.0.0", +<<<<<<< HEAD "resolved": "git+ssh://git@github.com/0xpolygonhermez/zkevm-commonjs.git#f1b19ad6321efb16573e0cdd6cba377b692b96d2", +======= + "resolved": "git+ssh://git@github.com/0xpolygonhermez/zkevm-commonjs.git#eb1ed1a1c05e2666cd32e3900beff5121bdeb4db", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "license": "pending", "dependencies": { "@ethereumjs/block": "^3.6.2", @@ -194,10 +243,77 @@ "pg": "^8.7.1" } }, + "node_modules/@0xpolygonhermez/zkevm-commonjs/node_modules/ethers": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.7.2.tgz", + "integrity": "sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==", + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abi": "5.7.0", + "@ethersproject/abstract-provider": "5.7.0", + "@ethersproject/abstract-signer": "5.7.0", + "@ethersproject/address": "5.7.0", + "@ethersproject/base64": "5.7.0", + "@ethersproject/basex": "5.7.0", + "@ethersproject/bignumber": "5.7.0", + "@ethersproject/bytes": "5.7.0", + "@ethersproject/constants": "5.7.0", + "@ethersproject/contracts": "5.7.0", + "@ethersproject/hash": "5.7.0", + "@ethersproject/hdnode": "5.7.0", + "@ethersproject/json-wallets": "5.7.0", + "@ethersproject/keccak256": "5.7.0", + "@ethersproject/logger": "5.7.0", + "@ethersproject/networks": "5.7.1", + "@ethersproject/pbkdf2": "5.7.0", + "@ethersproject/properties": "5.7.0", + "@ethersproject/providers": "5.7.2", + "@ethersproject/random": "5.7.0", + "@ethersproject/rlp": "5.7.0", + "@ethersproject/sha2": "5.7.0", + "@ethersproject/signing-key": "5.7.0", + "@ethersproject/solidity": "5.7.0", + "@ethersproject/strings": "5.7.0", + "@ethersproject/transactions": "5.7.0", + "@ethersproject/units": "5.7.0", + "@ethersproject/wallet": "5.7.0", + "@ethersproject/web": "5.7.1", + "@ethersproject/wordlists": "5.7.0" + } + }, "node_modules/@0xpolygonhermez/zkevm-rom": { "version": "5.0.0", +<<<<<<< HEAD +<<<<<<< HEAD +<<<<<<< HEAD "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-rom.git#dcbd682badbffbce290f8a31339bd67aad82b3a7", "license": "AGPL", +======= +======= +>>>>>>> 98bdace (Updating package-lock.json) +<<<<<<< HEAD + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-rom.git#3d54f35c3a7b7358d83fe21ad8a54dcac237ffef", +======= + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-rom.git#8b27fd19d019b6b5b2c809c1427669dc86dc37df", +======= + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-rom.git#f8019f0051bd2da2d65daf09d1de6ec1198846d9", +>>>>>>> 844a233 (Updating package-lock.json) + "license": "AGPL", +>>>>>>> df8cba2 (Integrating feijoa setup) +>>>>>>> 36feb2b (Integrating feijoa setup) +======= + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-rom.git#397f49cae3f4f99da1bce67213f69fb526337fbd", + "license": "AGPL", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "dependencies": { "@0xpolygonhermez/zkasmcom": "https://github.com/0xPolygonHermez/zkasmcom.git#develop-feijoa", "yargs": "^17.5.1" @@ -215,8 +331,12 @@ "node_modules/@0xpolygonhermez/zkevm-testvectors": { "name": "@0xpolygonhermez/test-vectors", "version": "5.0.0", +<<<<<<< HEAD "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-testvectors.git#8e333fb1148fd7b8b49f367ff06fdd86912d0fd1", "dev": true, +======= + "resolved": "git+ssh://git@github.com/0xPolygonHermez/zkevm-testvectors.git#78f1b767ca10307f6cac3426cb4cc4d955f43ce6", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "license": "pending" }, "node_modules/@aashutoshrathi/word-wrap": { @@ -228,6 +348,11 @@ "node": ">=0.10.0" } }, + "node_modules/@adraffy/ens-normalize": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", + "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", @@ -639,6 +764,11 @@ "scrypt-js": "3.0.1" } }, + "node_modules/@ethersproject/json-wallets/node_modules/aes-js": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.0.0.tgz", + "integrity": "sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw==" + }, "node_modules/@ethersproject/keccak256": { "version": "5.7.0", "resolved": "https://registry.npmjs.org/@ethersproject/keccak256/-/keccak256-5.7.0.tgz", @@ -765,6 +895,26 @@ "ws": "7.4.6" } }, + "node_modules/@ethersproject/providers/node_modules/ws": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/@ethersproject/random": { "version": "5.7.0", "resolved": "https://registry.npmjs.org/@ethersproject/random/-/random-5.7.0.tgz", @@ -846,6 +996,25 @@ "hash.js": "1.1.7" } }, + "node_modules/@ethersproject/signing-key/node_modules/elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/@ethersproject/signing-key/node_modules/elliptic/node_modules/bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" + }, "node_modules/@ethersproject/solidity": { "version": "5.7.0", "resolved": "https://registry.npmjs.org/@ethersproject/solidity/-/solidity-5.7.0.tgz", @@ -1024,9 +1193,15 @@ } }, "node_modules/@grpc/proto-loader": { +<<<<<<< HEAD "version": "0.7.12", "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.12.tgz", "integrity": "sha512-DCVwMxqYzpUCiDMl7hQ384FqP4T3DbNpXU8pt681l3UWCip1WUiD5JrkImUwCB9a7f2cq4CUTmi5r/xIMRPY1Q==", +======= + "version": "0.7.11", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.11.tgz", + "integrity": "sha512-amjhSfJ+xYnTP+hncJMmkchoRtjIdi+uO3FaymGSCr07yu5xfpXFEnhZkTU1mj2lPJB3oVToau7j9YkqB+YNdg==", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", @@ -1096,6 +1271,28 @@ "url": "https://opencollective.com/js-sdsl" } }, + "node_modules/@noble/curves": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz", + "integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==", + "dependencies": { + "@noble/hashes": "1.3.2" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@noble/hashes": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz", + "integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -1162,6 +1359,53 @@ "rustbn.js": "~0.2.0" } }, + "node_modules/@polygon-hermez/vm/node_modules/ethers": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.7.2.tgz", + "integrity": "sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==", + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abi": "5.7.0", + "@ethersproject/abstract-provider": "5.7.0", + "@ethersproject/abstract-signer": "5.7.0", + "@ethersproject/address": "5.7.0", + "@ethersproject/base64": "5.7.0", + "@ethersproject/basex": "5.7.0", + "@ethersproject/bignumber": "5.7.0", + "@ethersproject/bytes": "5.7.0", + "@ethersproject/constants": "5.7.0", + "@ethersproject/contracts": "5.7.0", + "@ethersproject/hash": "5.7.0", + "@ethersproject/hdnode": "5.7.0", + "@ethersproject/json-wallets": "5.7.0", + "@ethersproject/keccak256": "5.7.0", + "@ethersproject/logger": "5.7.0", + "@ethersproject/networks": "5.7.1", + "@ethersproject/pbkdf2": "5.7.0", + "@ethersproject/properties": "5.7.0", + "@ethersproject/providers": "5.7.2", + "@ethersproject/random": "5.7.0", + "@ethersproject/rlp": "5.7.0", + "@ethersproject/sha2": "5.7.0", + "@ethersproject/signing-key": "5.7.0", + "@ethersproject/solidity": "5.7.0", + "@ethersproject/strings": "5.7.0", + "@ethersproject/transactions": "5.7.0", + "@ethersproject/units": "5.7.0", + "@ethersproject/wallet": "5.7.0", + "@ethersproject/web": "5.7.1", + "@ethersproject/wordlists": "5.7.0" + } + }, "node_modules/@polygon-hermez/zkevm-commonjs": { "name": "@0xpolygonhermez/zkevm-commonjs", "version": "1.0.0", @@ -1201,6 +1445,53 @@ "rustbn.js": "~0.2.0" } }, + "node_modules/@polygon-hermez/zkevm-commonjs/node_modules/ethers": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.7.2.tgz", + "integrity": "sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==", + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abi": "5.7.0", + "@ethersproject/abstract-provider": "5.7.0", + "@ethersproject/abstract-signer": "5.7.0", + "@ethersproject/address": "5.7.0", + "@ethersproject/base64": "5.7.0", + "@ethersproject/basex": "5.7.0", + "@ethersproject/bignumber": "5.7.0", + "@ethersproject/bytes": "5.7.0", + "@ethersproject/constants": "5.7.0", + "@ethersproject/contracts": "5.7.0", + "@ethersproject/hash": "5.7.0", + "@ethersproject/hdnode": "5.7.0", + "@ethersproject/json-wallets": "5.7.0", + "@ethersproject/keccak256": "5.7.0", + "@ethersproject/logger": "5.7.0", + "@ethersproject/networks": "5.7.1", + "@ethersproject/pbkdf2": "5.7.0", + "@ethersproject/properties": "5.7.0", + "@ethersproject/providers": "5.7.2", + "@ethersproject/random": "5.7.0", + "@ethersproject/rlp": "5.7.0", + "@ethersproject/sha2": "5.7.0", + "@ethersproject/signing-key": "5.7.0", + "@ethersproject/solidity": "5.7.0", + "@ethersproject/strings": "5.7.0", + "@ethersproject/transactions": "5.7.0", + "@ethersproject/units": "5.7.0", + "@ethersproject/wallet": "5.7.0", + "@ethersproject/web": "5.7.1", + "@ethersproject/wordlists": "5.7.0" + } + }, "node_modules/@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -1291,9 +1582,27 @@ } }, "node_modules/@types/node": { +<<<<<<< HEAD +<<<<<<< HEAD "version": "20.12.4", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.4.tgz", "integrity": "sha512-E+Fa9z3wSQpzgYQdYmme5X3OTuejnnTx88A6p6vkkJosR3KBz+HpE3kqNm98VE6cfLFcISx7zW7MsJkH6KwbTw==", +======= +<<<<<<< HEAD + "version": "20.11.27", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.27.tgz", + "integrity": "sha512-qyUZfMnCg1KEz57r7pzFtSGt49f6RPkPBis3Vo4PbS7roQEDn22hiHzl/Lo1q4i4hDEgBJmBF/NTNg2XR0HbFg==", +======= + "version": "20.11.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz", + "integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==", +>>>>>>> df8cba2 (Integrating feijoa setup) +>>>>>>> 36feb2b (Integrating feijoa setup) +======= + "version": "20.11.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz", + "integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==", +>>>>>>> ab09ca2 (Adding tests for all circom templates) "dependencies": { "undici-types": "~5.26.4" } @@ -1363,9 +1672,9 @@ } }, "node_modules/aes-js": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.0.0.tgz", - "integrity": "sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw==" + "version": "4.0.0-beta.5", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", + "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" }, "node_modules/ajv": { "version": "6.12.6", @@ -2020,6 +2329,53 @@ "ffjavascript": "^0.2.45" } }, + "node_modules/circomlibjs/node_modules/ethers": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.7.2.tgz", + "integrity": "sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==", + "funding": [ + { + "type": "individual", + "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + }, + { + "type": "individual", + "url": "https://www.buymeacoffee.com/ricmoo" + } + ], + "dependencies": { + "@ethersproject/abi": "5.7.0", + "@ethersproject/abstract-provider": "5.7.0", + "@ethersproject/abstract-signer": "5.7.0", + "@ethersproject/address": "5.7.0", + "@ethersproject/base64": "5.7.0", + "@ethersproject/basex": "5.7.0", + "@ethersproject/bignumber": "5.7.0", + "@ethersproject/bytes": "5.7.0", + "@ethersproject/constants": "5.7.0", + "@ethersproject/contracts": "5.7.0", + "@ethersproject/hash": "5.7.0", + "@ethersproject/hdnode": "5.7.0", + "@ethersproject/json-wallets": "5.7.0", + "@ethersproject/keccak256": "5.7.0", + "@ethersproject/logger": "5.7.0", + "@ethersproject/networks": "5.7.1", + "@ethersproject/pbkdf2": "5.7.0", + "@ethersproject/properties": "5.7.0", + "@ethersproject/providers": "5.7.2", + "@ethersproject/random": "5.7.0", + "@ethersproject/rlp": "5.7.0", + "@ethersproject/sha2": "5.7.0", + "@ethersproject/signing-key": "5.7.0", + "@ethersproject/solidity": "5.7.0", + "@ethersproject/strings": "5.7.0", + "@ethersproject/transactions": "5.7.0", + "@ethersproject/units": "5.7.0", + "@ethersproject/wallet": "5.7.0", + "@ethersproject/web": "5.7.1", + "@ethersproject/wordlists": "5.7.0" + } + }, "node_modules/cjson": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/cjson/-/cjson-0.3.0.tgz", @@ -2348,9 +2704,9 @@ } }, "node_modules/elliptic": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", - "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "version": "6.5.5", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.5.tgz", + "integrity": "sha512-7EjbcmUm17NQFu4Pmgmq2olYMj8nwMnpcddByChSUjArp8F5DQWcIcpriwO4ZToLNAJig0yiyjswfyGNje/ixw==", "dependencies": { "bn.js": "^4.11.9", "brorand": "^1.1.0", @@ -2963,13 +3319,13 @@ } }, "node_modules/ethers": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-5.7.2.tgz", - "integrity": "sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg==", + "version": "6.11.1", + "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.11.1.tgz", + "integrity": "sha512-mxTAE6wqJQAbp5QAe/+o+rXOID7Nw91OZXvgpjDa1r4fAbq2Nu314oEZSbjoRLacuCzs7kUC3clEvkCQowffGg==", "funding": [ { "type": "individual", - "url": "https://gitcoin.co/grants/13/ethersjs-complete-simple-and-tiny-2" + "url": "https://github.com/sponsors/ethers-io/" }, { "type": "individual", @@ -2977,38 +3333,23 @@ } ], "dependencies": { - "@ethersproject/abi": "5.7.0", - "@ethersproject/abstract-provider": "5.7.0", - "@ethersproject/abstract-signer": "5.7.0", - "@ethersproject/address": "5.7.0", - "@ethersproject/base64": "5.7.0", - "@ethersproject/basex": "5.7.0", - "@ethersproject/bignumber": "5.7.0", - "@ethersproject/bytes": "5.7.0", - "@ethersproject/constants": "5.7.0", - "@ethersproject/contracts": "5.7.0", - "@ethersproject/hash": "5.7.0", - "@ethersproject/hdnode": "5.7.0", - "@ethersproject/json-wallets": "5.7.0", - "@ethersproject/keccak256": "5.7.0", - "@ethersproject/logger": "5.7.0", - "@ethersproject/networks": "5.7.1", - "@ethersproject/pbkdf2": "5.7.0", - "@ethersproject/properties": "5.7.0", - "@ethersproject/providers": "5.7.2", - "@ethersproject/random": "5.7.0", - "@ethersproject/rlp": "5.7.0", - "@ethersproject/sha2": "5.7.0", - "@ethersproject/signing-key": "5.7.0", - "@ethersproject/solidity": "5.7.0", - "@ethersproject/strings": "5.7.0", - "@ethersproject/transactions": "5.7.0", - "@ethersproject/units": "5.7.0", - "@ethersproject/wallet": "5.7.0", - "@ethersproject/web": "5.7.1", - "@ethersproject/wordlists": "5.7.0" + "@adraffy/ens-normalize": "1.10.1", + "@noble/curves": "1.2.0", + "@noble/hashes": "1.3.2", + "@types/node": "18.15.13", + "aes-js": "4.0.0-beta.5", + "tslib": "2.4.0", + "ws": "8.5.0" + }, + "engines": { + "node": ">=14.0.0" } }, + "node_modules/ethers/node_modules/@types/node": { + "version": "18.15.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", + "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" + }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -3172,6 +3513,19 @@ "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", "integrity": "sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w==" }, + "node_modules/fs-extra": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", + "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -3361,6 +3715,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, "node_modules/graphemer": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", @@ -4064,6 +4423,17 @@ "json5": "lib/cli.js" } }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, "node_modules/jsonlint": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/jsonlint/-/jsonlint-1.6.0.tgz", @@ -4990,14 +5360,15 @@ }, "node_modules/pil-stark": { "version": "0.0.57", - "resolved": "https://registry.npmjs.org/pil-stark/-/pil-stark-0.0.57.tgz", - "integrity": "sha512-t+x029QrzmQalErvcGf+yz0DvP3IZBVjz2/CsfSpf7JtBA6LizpqQ1jxacmY3ui+zXQmGZLYrcitttUN46peKg==", + "resolved": "git+ssh://git@github.com/0xPolygonHermez/pil-stark.git#418e0ecccc4e4e57781ee7362cd6f338f94fcf18", + "license": "UNLICENSED", "dependencies": { "@iden3/binfileutils": "^0.0.11", "circom_runtime": "^0.1.18", "circomlib": "^2.0.5", "circomlibjs": "^0.1.6", "ejs": "^3.1.8", + "fs-extra": "^11.2.0", "json-bigint": "^1.0.0", "pilcom": "^0.0.22", "r1csfile": "^0.0.44", @@ -5923,6 +6294,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/temporary": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/temporary/-/temporary-1.1.0.tgz", + "integrity": "sha512-geB3U/E75RLr++koy9EBw4mZ1BCfxg72SYJQdQld1iRvka+fyijfbiXrVpWQB9bJKNsgAB4lIHOeJtVSvyiJiQ==", + "dev": true + }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -5976,6 +6353,11 @@ "strip-bom": "^3.0.0" } }, + "node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -6114,6 +6496,14 @@ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -6246,11 +6636,11 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/ws": { - "version": "7.4.6", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", - "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.5.0.tgz", + "integrity": "sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg==", "engines": { - "node": ">=8.3.0" + "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", diff --git a/package.json b/package.json index 081331a1..609bdaed 100644 --- a/package.json +++ b/package.json @@ -7,16 +7,6 @@ "steps_piltoolfiles": [ "buildrom buildpil buildstoragerom buildconstants exec" ], - "steps": [ - "buildrom buildpil buildstoragerom buildconstants exec pilverify buildstarkinfo buildchelpers buildconstanttree prove verify gencircom compilecircom", - "c12a_setup c12a_buildstarkinfo c12a_buildchelpers c12a_exec c12a_pilverify c12a_buildconstanttree c12a_prove c12a_verify c12a_gencircom", - "recursive1_gencircom recursive1_compile recursive1_setup recursive1_buildstarkinfo recursive1_buildchelpers recursive1_buildconstanttree recursive1_verifier_gencircom", - "recursive2_gencircom recursive2_compile recursive2_setup recursive2_buildstarkinfo recursive2_buildchelpers recursive2_buildconstanttree recursive2_verifier_gencircom", - "recursive_pil_check recursive_verifier_check ", - "recursivef_gencircom recursivef_compile recursivef_setup recursivef_buildstarkinfo recursivef_buildchelpers recursivef_buildconstanttree recursivef_verifier_gencircom", - "final_gencircom final_compile", - "fflonk_setup fflonk_evk fflonk_solidity" - ], "steps_setup": [ "buildrom buildpil buildstoragerom buildconstants buildstarkinfo buildchelpers buildconstanttree gencircom compilecircom", "c12a_setup c12a_buildstarkinfo c12a_buildchelpers c12a_buildconstanttree c12a_gencircom", @@ -25,11 +15,25 @@ "recursive_pil_check recursive_verifier_check ", "recursivef_gencircom recursivef_compile recursivef_setup recursivef_buildstarkinfo recursivef_buildchelpers recursivef_buildconstanttree recursivef_verifier_gencircom", "final_gencircom final_compile", - "fflonk_setup fflonk_evk fflonk_solidity" + "fflonk_setup fflonk_evk fflonk_solidity", + "sha256" ], - "steps_proof": [ - "exec pilverify prove verify", - "c12a_exec c12a_pilverify c12a_prove c12a_verify" + "steps_setup_blob": [ + "buildrom buildpil buildstoragerom buildconstants buildstarkinfo buildchelpers buildconstanttree gencircom compilecircom", + "c12a_setup c12a_buildstarkinfo c12a_buildchelpers c12a_buildconstanttree c12a_gencircom_eip4844", + "recursive1_gencircom_eip4844 recursive1_compile recursive1_setup recursive1_buildstarkinfo recursive1_buildchelpers recursive1_buildconstanttree recursive1_verifier_gencircom_eip4844", + "recursive2_gencircom_eip4844 recursive2_compile recursive2_setup recursive2_buildstarkinfo recursive2_buildchelpers recursive2_buildconstanttree recursive2_verifier_gencircom_eip4844", + "recursive_pil_check recursive_verifier_check ", + "blob_inner_buildrom blob_inner_buildpil blob_inner_buildstoragerom blob_inner_buildconstants blob_inner_buildstarkinfo blob_inner_buildchelpers blob_inner_buildconstanttree blob_inner_gencircom blob_inner_compilecircom", + "blob_inner_compressor_setup blob_inner_compressor_buildstarkinfo blob_inner_compressor_buildchelpers blob_inner_compressor_buildconstanttree blob_inner_compressor_gencircom", + "blob_inner_recursive1_gencircom blob_inner_recursive1_compile blob_inner_recursive1_setup blob_inner_recursive1_buildstarkinfo blob_inner_recursive1_buildchelpers blob_inner_recursive1_buildconstanttree blob_inner_recursive1_verifier_gencircom", + "blob_outer_gencircom blob_outer_compile blob_outer_setup blob_outer_buildstarkinfo blob_outer_buildchelpers blob_outer_buildconstanttree blob_outer_verifier_gencircom", + "blob_outer_recursive2_gencircom blob_outer_recursive2_compile blob_outer_recursive2_setup blob_outer_recursive2_buildstarkinfo blob_outer_recursive2_buildchelpers blob_outer_recursive2_buildconstanttree blob_outer_recursive2_verifier_gencircom", + "blob_outer_recursive_pil_check blob_outer_recursive_verifier_check", + "recursivef_gencircom_eip4844 recursivef_compile recursivef_setup recursivef_buildstarkinfo recursivef_buildchelpers recursivef_buildconstanttree recursivef_verifier_gencircom", + "final_gencircom_eip4844 final_compile", + "fflonk_setup fflonk_evk fflonk_solidity", + "sha256" ] }, "scripts": { @@ -37,69 +41,125 @@ "buildall": "tools/build_all.sh ${npm_package_config_steps}", "buildpiltoolfiles": "tools/build_all.sh ${npm_package_config_steps_piltoolfiles}", "buildsetup": "tools/build_all.sh ${npm_package_config_steps_setup}", - "buildproof": "tools/build_all.sh ${npm_package_config_steps_proof}", - "buildrom": ". ./pre.sh && ROM_DIR=node_modules/@0xpolygonhermez/zkevm-rom && (cd $ROM_DIR && npm run build) && cp $ROM_DIR/build/rom.json $ROM_DIR/build/metadata-rom.txt $BDIR/", - "buildstoragerom": ". ./pre.sh && ROM_DIR=node_modules/@0xpolygonhermez/zkevm-storage-rom && (cd $ROM_DIR && npm run build:rom) && cp $ROM_DIR/build/storage_sm_rom.json src/sm/sm_storage && cp src/sm/sm_storage/storage_sm_rom.json $BDIR/", - "buildpil": ". ./pre.sh && mkdir -p $BDIR/pil/zkevm && node $NODE node_modules/.bin/pilcom $PIL -o $BDIR/pil/zkevm/$PIL_JSON -c $BDIR/pols_generated -n fork_1 && for PF in `tools/included_pils.sh $BDIR/pil/zkevm/$PIL_JSON`; do cp -v $PIL_DIR/$PF $BDIR/pil/zkevm/$PF; done", - "buildconstants": ". ./pre.sh && node $NODE src/main_buildconstants -p $PIL -r $BDIR/rom.json -o $BDIR/zkevm.const", - "exec": ". ./pre.sh && INPUT=${npm_config_input:=tools/build-genesis/input_executor.json} && echo \"input: $INPUT\" && node $NODE src/main_executor $INPUT -p $PIL -r $BDIR/rom.json -o $BDIR/zkevm$NTH.commit && cp $INPUT $BDIR/input_executor.json", - "pilverify": ". ./pre.sh && $PILCOM/main_pilverifier.js $BDIR/zkevm$NTH.commit -c $BDIR/zkevm.const -p $PIL", - "buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $PIL -s $BDIR/zkevm.starkstruct.json -i $BDIR/zkevm.starkinfo.json", - "buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $PIL -s $BDIR/zkevm.starkinfo.json -c $BDIR/zkevm.chelpers/zkevm.chelpers.cpp -C ZkevmSteps -o", - "buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/zkevm.const -p $PIL -s $BDIR/zkevm.starkstruct.json -t $BDIR/zkevm.consttree -v $BDIR/zkevm.verkey.json", - "prove": ". ./pre.sh && $PILSTARK/main_prover.js -m $BDIR/zkevm$NTH.commit -c $BDIR/zkevm.const -t $BDIR/zkevm.consttree -p $PIL -s $BDIR/zkevm.starkinfo.json -o $BDIR/zkevm$NTH.proof.json -z $BDIR/zkevm$NTH.zkin.proof.json -b $BDIR/zkevm$NTH.public.json", - "verify": ". ./pre.sh && $PILSTARK/main_verifier.js -p $PIL -s $BDIR/zkevm.starkinfo.json -o $BDIR/zkevm$NTH.proof.json -b $BDIR/zkevm$NTH.public.json -v $BDIR/zkevm.verkey.json", - "gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js -p $PIL -s $BDIR/zkevm.starkinfo.json -v $BDIR/zkevm.verkey.json -o $BDIR/zkevm.verifier.circom", - "compilecircom": ". ./pre.sh && circom --O1 --prime goldilocks --r1cs --sym --wasm --c --verbose $BDIR/zkevm.verifier.circom -o $BDIR -l node_modules/pil-stark/circuits.gl", - "c12a_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/zkevm.verifier.r1cs -p $BDIR/c12a.pil -c $BDIR/c12a.const -e $BDIR/c12a.exec --cols=18", - "c12a_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/c12a.pil -s $BDIR/c12a.starkstruct.json -i $BDIR/c12a.starkinfo.json", - "c12a_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $BDIR/c12a.pil -s $BDIR/c12a.starkinfo.json -c $BDIR/c12a.chelpers/c12a.chelpers.cpp -C C12aSteps", - "c12a_exec": ". ./pre.sh && $PILSTARK/compressor/main_compressor_exec.js -i $BDIR/zkevm.zkin.proof.json -w $BDIR/zkevm.verifier_js/zkevm.verifier.wasm -p $BDIR/c12a.pil -e $BDIR/c12a.exec -m $BDIR/c12a.commit", - "c12a_pilverify": ". ./pre.sh && $PILCOM/main_pilverifier.js $BDIR/c12a.commit -c $BDIR/c12a.const -p $BDIR/c12a.pil", - "c12a_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/c12a.const -p $BDIR/c12a.pil -s $BDIR/c12a.starkstruct.json -t $BDIR/c12a.consttree -v $BDIR/c12a.verkey.json", - "c12a_prove": ". ./pre.sh && $PILSTARK/main_prover.js -m $BDIR/c12a.commit -c $BDIR/c12a.const -t $BDIR/c12a.consttree -p $BDIR/c12a.pil -s $BDIR/c12a.starkinfo.json -o $BDIR/c12a.proof.json -z $BDIR/c12a.zkin.proof.json -b $BDIR/c12a.public.json", - "c12a_verify": ". ./pre.sh && $PILSTARK/main_verifier.js -p $BDIR/c12a.pil -s $BDIR/c12a.starkinfo.json -o $BDIR/c12a.proof.json -b $BDIR/c12a.public.json -v $BDIR/c12a.verkey.json", - "c12a_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain -p $BDIR/c12a.pil -s $BDIR/c12a.starkinfo.json -v $BDIR/c12a.verkey.json -o $BDIR/c12a.verifier.circom", - "recursive1_gencircom": ". ./pre.sh && cp recursive/recursive1.circom $BDIR", - "recursive1_compile": ". ./pre.sh && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/recursive1.circom -o $BDIR -l node_modules/pil-stark/circuits.gl", - "recursive1_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/recursive1.r1cs -p $BDIR/recursive1.pil -c $BDIR/recursive1.const -e $BDIR/recursive1.exec --cols=18", - "recursive1_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/recursive1.pil -s $BDIR/recursive.starkstruct.json -i $BDIR/recursive1.starkinfo.json", - "recursive1_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $BDIR/recursive1.pil -s $BDIR/recursive1.starkinfo.json -c $BDIR/recursive1.chelpers/recursive1.chelpers.cpp -C Recursive1Steps", - "recursive1_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/recursive1.const -p $BDIR/recursive1.pil -s $BDIR/recursive.starkstruct.json -t $BDIR/recursive1.consttree -v $BDIR/recursive1.verkey.json", - "recursive1_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput -p $BDIR/recursive1.pil -s $BDIR/recursive1.starkinfo.json -v $BDIR/recursive1.verkey.json -o $BDIR/recursive1.verifier.circom", - "recursive2_gencircom": ". ./pre.sh && node $NODE src/main_genrecursive.js -v $BDIR/recursive1.verkey.json -o $BDIR/recursive2.circom", - "recursive2_compile": ". ./pre.sh && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/recursive2.circom -o $BDIR -l node_modules/pil-stark/circuits.gl", - "recursive2_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/recursive2.r1cs -p $BDIR/recursive2.pil -c $BDIR/recursive2.const -e $BDIR/recursive2.exec --cols=18", - "recursive2_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/recursive2.pil -s $BDIR/recursive.starkstruct.json -i $BDIR/recursive2.starkinfo.json", - "recursive2_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $BDIR/recursive2.pil -s $BDIR/recursive2.starkinfo.json -c $BDIR/recursive2.chelpers/recursive2.chelpers.cpp -C Recursive2Steps", - "recursive2_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/recursive2.const -p $BDIR/recursive2.pil -s $BDIR/recursive.starkstruct.json -t $BDIR/recursive2.consttree -v $BDIR/recursive2.verkey.json", - "recursive2_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput -p $BDIR/recursive2.pil -s $BDIR/recursive2.starkinfo.json -v $BDIR/recursive2.verkey.json -o $BDIR/recursive2.verifier.circom", - "recursive_pil_check": ". ./pre.sh && F1=$BDIR/recursive1.pil && F2=$BDIR/recursive2.pil && diff $F1 $F2 || (echo \"ERROR: $F1 $F2 are different\"; exit 1)", - "recursive_verifier_check": ". ./pre.sh && F1=$BDIR/recursive1.verifier.circom && F2=$BDIR/recursive2.verifier.circom && diff $F1 $F2 || (echo \"ERROR: $F1 $F2 are different\"; exit 1)", - "recursivef_gencircom": ". ./pre.sh && node $NODE src/main_genrecursivef.js --verkey1 $BDIR/recursive1.verkey.json --verkey2 $BDIR/recursive2.verkey.json -o $BDIR/recursivef.circom", - "recursivef_compile": ". ./pre.sh && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/recursivef.circom -o $BDIR -l node_modules/pil-stark/circuits.gl -l node_modules/circomlib/circuits", - "recursivef_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/recursivef.r1cs -p $BDIR/recursivef.pil -c $BDIR/recursivef.const -e $BDIR/recursivef.exec --cols=12", - "recursivef_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/recursivef.pil -s $BDIR/recursivef.starkstruct.json -i $BDIR/recursivef.starkinfo.json", - "recursivef_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $BDIR/recursivef.pil -s $BDIR/recursivef.starkinfo.json -c $BDIR/recursivef.chelpers/recursivef.chelpers.cpp -C StarkRecursiveFSteps", - "recursivef_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/recursivef.const -p $BDIR/recursivef.pil -s $BDIR/recursivef.starkstruct.json -t $BDIR/recursivef.consttree -v $BDIR/recursivef.verkey.json", - "recursivef_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain -p $BDIR/recursivef.pil -s $BDIR/recursivef.starkinfo.json -v $BDIR/recursivef.verkey.json -o $BDIR/recursivef.verifier.circom", - "final_gencircom": ". ./pre.sh && cp recursive/final.circom $BDIR", - "final_compile": ". ./pre.sh && circom --O1 --r1cs --sym --wasm --c --verbose $BDIR/final.circom -o $BDIR -l node_modules/pil-stark/circuits.bn128 -l node_modules/circomlib/circuits", + "blobbuildsetup": "tools/build_all.sh ${npm_package_config_steps_setup_blob}", + "buildrom": ". ./pre.sh && mkdir -p $BDIR/config/scripts/ && ROM_DIR=node_modules/@0xpolygonhermez/zkevm-rom && (cd $ROM_DIR && npm run build) && cp $ROM_DIR/build/rom.json $ROM_DIR/build/metadata-rom.txt $BDIR/config/scripts/", + "buildpil": ". ./pre.sh && mkdir -p $BDIR/pil/zkevm && node $NODE node_modules/.bin/pilcom $PIL -o $BDIR/pil/zkevm/$PIL_JSON && for PF in `tools/included_pils.sh $BDIR/pil/zkevm/$PIL_JSON`; do cp -v $PIL_DIR/$PF $BDIR/pil/zkevm/$PF; done", + "buildstoragerom": ". ./pre.sh && ROM_DIR=node_modules/@0xpolygonhermez/zkevm-storage-rom && (cd $ROM_DIR && npm run build:rom) && cp $ROM_DIR/build/storage_sm_rom.json src/sm/sm_storage && cp src/sm/sm_storage/storage_sm_rom.json $BDIR/config/scripts/", + "buildconstants": ". ./pre.sh && mkdir -p $BDIR/config/zkevm/ && node $NODE src/main_buildconstants -p $PIL -r $BDIR/config/scripts/rom.json -o $BDIR/config/zkevm/zkevm.const", + "buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $PIL -s $BDIR/build/zkevm.starkstruct.json -i $BDIR/config/zkevm/zkevm.starkinfo.json", + "buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/zkevm/zkevm.starkinfo.json -c $BDIR/c_files/zkevm.chelpers -C Zkevm -b $BDIR/config/zkevm/zkevm.chelpers.bin -g $BDIR/config/zkevm/zkevm.chelpers_generic.bin", + "buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/zkevm/zkevm.const -p $PIL -s $BDIR/config/zkevm/zkevm.starkinfo.json -t $BDIR/config/zkevm/zkevm.consttree -v $BDIR/config/zkevm/zkevm.verkey.json", + "gencircom": ". ./pre.sh && mkdir -p $BDIR/circom && $PILSTARK/main_pil2circom.js -p $PIL -s $BDIR/config/zkevm/zkevm.starkinfo.json -v $BDIR/config/zkevm/zkevm.verkey.json -o $BDIR/circom/zkevm.verifier.circom", + "compilecircom": ". ./pre.sh && circom --O1 --prime goldilocks --r1cs --sym --wasm --c --verbose $BDIR/circom/zkevm.verifier.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "exec": ". ./pre.sh && INPUT=${npm_config_input:=tools/build-genesis/input_executor.json} && echo \"input: $INPUT\" && node $NODE src/main_executor $INPUT -p $PIL -r $BDIR/config/scripts/rom.json -o $BDIR/zkevm$NTH.commit && cp $INPUT $BDIR/input_executor.json", + "pilverify": ". ./pre.sh && $PILCOM/main_pilverifier.js $BDIR/zkevm$NTH.commit -c $BDIR/config/zkevm/zkevm.const -p $PIL", + "blob_inner_buildrom": ". ./pre.sh && ROM_DIR=node_modules/@0xpolygonhermez/zkevm-blob-rom && (cd $ROM_DIR && npm run build) && cp $ROM_DIR/build/blob-rom.json $ROM_DIR/build/metadata-blob-rom.txt $BDIR/config/scripts/", + "blob_inner_buildpil": ". ./pre.sh && mkdir -p $BDIR/pil/blob_inner && node $NODE node_modules/.bin/pilcom $BLOB_PIL -o $BDIR/pil/blob_inner/$BLOB_PIL_JSON && for PF in `tools/included_pils.sh $BDIR/pil/blob_inner/$PIL_JSON`; do cp -v $BLOB_PIL_DIR/$PF $BDIR/pil/blob_inner/$PF; done", + "blob_inner_buildstoragerom": ". ./pre.sh && ROM_DIR=node_modules/@0xpolygonhermez/zkevm-storage-rom && (cd $ROM_DIR && npm run build:rom) && cp $ROM_DIR/build/storage_sm_rom.json $BDIR/config/scripts/", + "blob_inner_buildconstants": ". ./pre.sh && mkdir -p $BDIR/config/blob_inner/ && node $NODE src/main_buildconstants -p $BLOB_PIL -r $BDIR/config/scripts/blob-rom.json -o $BDIR/config/blob_inner/blob_inner.const --blob", + "blob_inner_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BLOB_PIL -s $BDIR/config/blob_inner/blob_inner.starkstruct.json -i $BDIR/config/blob_inner/blob_inner.starkinfo.json", + "blob_inner_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/blob_inner/blob_inner.starkinfo.json -c $BDIR/c_files/blob_inner.chelpers -C BlobInner -b $BDIR/config/blob_inner/blob_inner.chelpers.bin -g $BDIR/config/blob_inner/blob_inner.chelpers_generic.bin", + "blob_inner_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/blob_inner/blob_inner.const -p $BLOB_PIL -s $BDIR/config/blob_inner/blob_inner.starkinfo.json -t $BDIR/config/blob_inner/blob_inner.consttree -v $BDIR/config/blob_inner/blob_inner.verkey.json", + "blob_inner_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js -p $BLOB_PIL -s $BDIR/config/blob_inner/blob_inner.starkinfo.json -v $BDIR/config/blob_inner/blob_inner.verkey.json -o $BDIR/circom/blob_inner.verifier.circom", + "blob_inner_compilecircom": ". ./pre.sh && circom --O1 --prime goldilocks --r1cs --sym --wasm --c --verbose $BDIR/circom/blob_inner.verifier.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "c12a_setup": ". ./pre.sh && mkdir -p $BDIR/config/c12a/ && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/zkevm.verifier.r1cs -p $BDIR/pil/c12a.pil -c $BDIR/config/c12a/c12a.const -e $BDIR/config/c12a/c12a.exec --cols=18", + "c12a_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/c12a.pil -s $BDIR/build/c12a.starkstruct.json -i $BDIR/config/c12a/c12a.starkinfo.json", + "c12a_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/c12a/c12a.starkinfo.json -c $BDIR/c_files/c12a.chelpers -C C12a -b $BDIR/config/c12a/c12a.chelpers.bin -g $BDIR/config/c12a/c12a.chelpers_generic.bin", + "c12a_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/c12a/c12a.const -p $BDIR/pil/c12a.pil -s $BDIR/config/c12a/c12a.starkinfo.json -t $BDIR/config/c12a/c12a.consttree -v $BDIR/config/c12a/c12a.verkey.json", + "c12a_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain -p $BDIR/pil/c12a.pil -s $BDIR/config/c12a/c12a.starkinfo.json -v $BDIR/config/c12a/c12a.verkey.json -o $BDIR/circom/c12a.verifier.circom", + "c12a_gencircom_eip4844": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --enableInput -p $BDIR/pil/c12a.pil -s $BDIR/config/c12a/c12a.starkinfo.json -v $BDIR/config/c12a/c12a.verkey.json -o $BDIR/circom/c12a.verifier.circom", + "recursive1_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/c12a/c12a.starkinfo.json --template=recursive1 -r $BDIR/circom/recursive1.circom -v $BDIR/config/c12a/c12a.verkey.json --verifiername=c12a --aggkey", + "recursive1_gencircom_eip4844": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/c12a/c12a.starkinfo.json --template=recursive1 -r $BDIR/circom/recursive1.circom -v $BDIR/config/c12a/c12a.verkey.json --setenable --verifiername=c12a --aggkey", + "recursive1_compile": ". ./pre.sh && mkdir -p $BDIR/config/recursive1/ && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/circom/recursive1.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "recursive1_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/recursive1.r1cs -p $BDIR/pil/recursive1.pil -c $BDIR/config/recursive1/recursive1.const -e $BDIR/config/recursive1/recursive1.exec --cols=18", + "recursive1_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/recursive1.pil -s $BDIR/build/recursive.starkstruct.json -i $BDIR/config/recursive1/recursive1.starkinfo.json", + "recursive1_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/recursive1/recursive1.starkinfo.json -c $BDIR/c_files/recursive1.chelpers -C Recursive1 -b $BDIR/config/recursive1/recursive1.chelpers.bin -g $BDIR/config/recursive1/recursive1.chelpers_generic.bin", + "recursive1_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/recursive1/recursive1.const -p $BDIR/pil/recursive1.pil -s $BDIR/config/recursive1/recursive1.starkinfo.json -t $BDIR/config/recursive1/recursive1.consttree -v $BDIR/config/recursive1/recursive1.verkey.json", + "recursive1_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput -p $BDIR/pil/recursive1.pil -s $BDIR/config/recursive1/recursive1.starkinfo.json -v $BDIR/config/recursive1/recursive1.verkey.json -o $BDIR/circom/recursive1.verifier.circom", + "recursive1_verifier_gencircom_eip4844": ". ./pre.sh && $PILSTARK/main_pil2circom.js --enableInput --skipMain --verkeyInput -p $BDIR/pil/recursive1.pil -s $BDIR/config/recursive1/recursive1.starkinfo.json -v $BDIR/config/recursive1/recursive1.verkey.json -o $BDIR/circom/recursive1.verifier.circom", + "recursive2_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/recursive1/recursive1.starkinfo.json --template=recursive2 -r $BDIR/circom/recursive2.circom -v $BDIR/config/recursive1/recursive1.verkey.json --aggkey --verifiername=recursive1 --builddir=$BDIR/circom", + "recursive2_gencircom_eip4844": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/recursive1/recursive1.starkinfo.json --template=recursive2_batch -r $BDIR/circom/recursive2.circom -v $BDIR/config/recursive1/recursive1.verkey.json --aggkey --setenable --isagg --verifiername=recursive1 --builddir=$BDIR/circom", + "recursive2_compile": ". ./pre.sh && mkdir -p $BDIR/config/recursive2/ && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/circom/recursive2.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "recursive2_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/recursive2.r1cs -p $BDIR/pil/recursive2.pil -c $BDIR/config/recursive2/recursive2.const -e $BDIR/config/recursive2/recursive2.exec --cols=18", + "recursive2_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/recursive2.pil -s $BDIR/build/recursive.starkstruct.json -i $BDIR/config/recursive2/recursive2.starkinfo.json", + "recursive2_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/recursive2/recursive2.starkinfo.json -c $BDIR/c_files/recursive2.chelpers -C Recursive2 -b $BDIR/config/recursive2/recursive2.chelpers.bin -g $BDIR/config/recursive2/recursive2.chelpers_generic.bin", + "recursive2_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/recursive2/recursive2.const -p $BDIR/pil/recursive2.pil -s $BDIR/config/recursive2/recursive2.starkinfo.json -t $BDIR/config/recursive2/recursive2.consttree -v $BDIR/config/recursive2/recursive2.verkey.json", + "recursive2_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput -p $BDIR/pil/recursive2.pil -s $BDIR/config/recursive2/recursive2.starkinfo.json -v $BDIR/config/recursive2/recursive2.verkey.json -o $BDIR/circom/recursive2.verifier.circom", + "recursive2_verifier_gencircom_eip4844": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput --enableInput -p $BDIR/pil/recursive2.pil -s $BDIR/config/recursive2/recursive2.starkinfo.json -v $BDIR/config/recursive2/recursive2.verkey.json -o $BDIR/circom/recursive2.verifier.circom", + "recursive_pil_check": ". ./pre.sh && F1=$BDIR/pil/recursive1.pil && F2=$BDIR/pil/recursive2.pil && diff $F1 $F2 || (echo \"ERROR: $F1 $F2 are different\"; exit 1)", + "recursive_verifier_check": ". ./pre.sh && F1=$BDIR/circom/recursive1.verifier.circom && F2=$BDIR/circom/recursive2.verifier.circom && diff $F1 $F2 || (echo \"ERROR: $F1 $F2 are different\"; exit 1)", + "blob_inner_compressor_setup": ". ./pre.sh && mkdir -p $BDIR/config/blob_inner_compressor/ && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/blob_inner.verifier.r1cs -p $BDIR/pil/blob_inner_compressor.pil -c $BDIR/config/blob_inner_compressor/blob_inner_compressor.const -e $BDIR/config/blob_inner_compressor/blob_inner_compressor.exec --cols=18", + "blob_inner_compressor_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/blob_inner_compressor.pil -s $BDIR/build/blob_inner_compressor.starkstruct.json -i $BDIR/config/blob_inner_compressor/blob_inner_compressor.starkinfo.json", + "blob_inner_compressor_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/blob_inner_compressor/blob_inner_compressor.starkinfo.json -c $BDIR/c_files/blob_inner_compressor.chelpers -C BlobInnerCompressor -b $BDIR/config/blob_inner_compressor/blob_inner_compressor.chelpers.bin -g $BDIR/config/blob_inner_compressor/blob_inner_compressor.chelpers_generic.bin", + "blob_inner_compressor_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/blob_inner_compressor/blob_inner_compressor.const -p $BDIR/pil/blob_inner_compressor.pil -s $BDIR/config/blob_inner_compressor/blob_inner_compressor.starkinfo.json -t $BDIR/config/blob_inner_compressor/blob_inner_compressor.consttree -v $BDIR/config/blob_inner_compressor/blob_inner_compressor.verkey.json", + "blob_inner_compressor_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain -p $BDIR/pil/blob_inner_compressor.pil -s $BDIR/config/blob_inner_compressor/blob_inner_compressor.starkinfo.json -v $BDIR/config/blob_inner_compressor/blob_inner_compressor.verkey.json -o $BDIR/circom/blob_inner_compressor.verifier.circom", + "blob_inner_recursive1_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/blob_inner_compressor/blob_inner_compressor.starkinfo.json --template=recursive1 -r $BDIR/circom/blob_inner_recursive1.circom -v $BDIR/config/blob_inner_compressor/blob_inner_compressor.verkey.json --verifiername=blob_inner_compressor", + "blob_inner_recursive1_compile": ". ./pre.sh && mkdir -p $BDIR/config/blob_inner_recursive1/ && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/circom/blob_inner_recursive1.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "blob_inner_recursive1_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/blob_inner_recursive1.r1cs -p $BDIR/pil/blob_inner_recursive1.pil -c $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.const -e $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.exec --cols=18", + "blob_inner_recursive1_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/blob_inner_recursive1.pil -s $BDIR/build/blob_inner_recursive1.starkstruct.json -i $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.starkinfo.json", + "blob_inner_recursive1_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.starkinfo.json -c $BDIR/c_files/blob_inner_recursive1.chelpers -C BlobInnerRecursive1 -b $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.chelpers.bin -g $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.chelpers_generic.bin", + "blob_inner_recursive1_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.const -p $BDIR/pil/blob_inner_recursive1.pil -s $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.starkinfo.json -t $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.consttree -v $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.verkey.json", + "blob_inner_recursive1_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --index=2 -p $BDIR/pil/blob_inner_recursive1.pil -s $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.starkinfo.json -v $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.verkey.json -o $BDIR/circom/blob_inner_recursive1.verifier.circom", + "blob_outer_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/recursive2/recursive2.starkinfo.json $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.starkinfo.json --template=blob_outer -v $BDIR/config/recursive1/recursive1.verkey.json $BDIR/config/recursive2/recursive2.verkey.json $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.verkey.json --verifiername=recursive2 --verifiername2=blob_inner_recursive1 -r $BDIR/circom/blob_outer.circom -b $BDIR/circom", + "blob_outer_compile": ". ./pre.sh && mkdir -p $BDIR/config/blob_outer/ && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/circom/blob_outer.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "blob_outer_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/blob_outer.r1cs -p $BDIR/pil/blob_outer.pil -c $BDIR/config/blob_outer/blob_outer.const -e $BDIR/config/blob_outer/blob_outer.exec --cols=18", + "blob_outer_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/blob_outer.pil -s $BDIR/build/blob_outer.starkstruct.json -i $BDIR/config/blob_outer/blob_outer.starkinfo.json", + "blob_outer_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/blob_outer/blob_outer.starkinfo.json -c $BDIR/c_files/blob_outer.chelpers -C BlobOuter -b $BDIR/config/blob_outer/blob_outer.chelpers.bin -g $BDIR/config/blob_outer/blob_outer.chelpers_generic.bin", + "blob_outer_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/blob_outer/blob_outer.const -p $BDIR/pil/blob_outer.pil -s $BDIR/config/blob_outer/blob_outer.starkinfo.json -t $BDIR/config/blob_outer/blob_outer.consttree -v $BDIR/config/blob_outer/blob_outer.verkey.json", + "blob_outer_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput -p $BDIR/pil/blob_outer.pil -s $BDIR/config/blob_outer/blob_outer.starkinfo.json -v $BDIR/config/blob_outer/blob_outer.verkey.json -o $BDIR/circom/blob_outer.verifier.circom", + "blob_outer_recursive2_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/blob_outer/blob_outer.starkinfo.json --template=recursive2_blob -r $BDIR/circom/blob_outer_recursive2.circom -v $BDIR/config/blob_outer/blob_outer.verkey.json --verifiername=blob_outer -b $BDIR/circom", + "blob_outer_recursive2_compile": ". ./pre.sh && mkdir -p $BDIR/config/blob_outer_recursive2/ && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/circom/blob_outer_recursive2.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl", + "blob_outer_recursive2_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/blob_outer_recursive2.r1cs -p $BDIR/pil/blob_outer_recursive2.pil -c $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.const -e $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.exec --cols=18", + "blob_outer_recursive2_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/blob_outer_recursive2.pil -s $BDIR/build/blob_outer.starkstruct.json -i $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.starkinfo.json", + "blob_outer_recursive2_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.starkinfo.json -c $BDIR/c_files/blob_outer_recursive2.chelpers -C BlobOuterRecursive2 -b $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.chelpers.bin -g $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.chelpers_generic.bin", + "blob_outer_recursive2_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.const -p $BDIR/pil/blob_outer_recursive2.pil -s $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.starkinfo.json -t $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.consttree -v $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.verkey.json", + "blob_outer_recursive2_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain --verkeyInput -p $BDIR/pil/blob_outer_recursive2.pil -s $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.starkinfo.json -v $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.verkey.json -o $BDIR/circom/blob_outer_recursive2.verifier.circom", + "blob_outer_recursive_pil_check": ". ./pre.sh && F1=$BDIR/pil/blob_outer.pil && F2=$BDIR/pil/blob_outer_recursive2.pil && diff $F1 $F2 || (echo \"ERROR: $F1 $F2 are different\"; exit 1)", + "blob_outer_recursive_verifier_check": ". ./pre.sh && F1=$BDIR/circom/blob_outer.verifier.circom && F2=$BDIR/circom/blob_outer_recursive2.verifier.circom && diff $F1 $F2 || (echo \"ERROR: $F1 $F2 are different\"; exit 1)", + "recursivef_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/recursive2/recursive2.starkinfo.json --template=recursivef -r $BDIR/circom/recursivef.circom -v $BDIR/config/recursive1/recursive1.verkey.json $BDIR/config/recursive2/recursive2.verkey.json --verifiername=recursive2", + "recursivef_gencircom_eip4844": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.starkinfo.json --template=recursivef -r $BDIR/circom/recursivef.circom -v $BDIR/config/blob_outer/blob_outer.verkey.json $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.verkey.json --verifiername=blob_outer_recursive2", + "recursivef_compile": ". ./pre.sh && mkdir -p $BDIR/config/recursivef/ && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/circom/recursivef.circom -o $BDIR/build -l node_modules/pil-stark/circuits.gl -l node_modules/circomlib/circuits", + "recursivef_setup": ". ./pre.sh && $PILSTARK/compressor/main_compressor_setup.js -r $BDIR/build/recursivef.r1cs -p $BDIR/pil/recursivef.pil -c $BDIR/config/recursivef/recursivef.const -e $BDIR/config/recursivef/recursivef.exec --cols=12", + "recursivef_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/pil/recursivef.pil -s $BDIR/build/recursivef.starkstruct.json -i $BDIR/config/recursivef/recursivef.starkinfo.json --arity=4", + "recursivef_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -s $BDIR/config/recursivef/recursivef.starkinfo.json -c $BDIR/c_files/recursivef.chelpers -C RecursiveF -b $BDIR/config/recursivef/recursivef.chelpers.bin -g $BDIR/config/recursivef/recursivef.chelpers_generic.bin", + "recursivef_buildconstanttree": ". ./pre.sh && $BCTREE -c $BDIR/config/recursivef/recursivef.const -p $BDIR/pil/recursivef.pil -s $BDIR/config/recursivef/recursivef.starkinfo.json -t $BDIR/config/recursivef/recursivef.consttree -v $BDIR/config/recursivef/recursivef.verkey.json", + "recursivef_verifier_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js --skipMain -p $BDIR/pil/recursivef.pil -s $BDIR/config/recursivef/recursivef.starkinfo.json -v $BDIR/config/recursivef/recursivef.verkey.json -o $BDIR/circom/recursivef.verifier.circom", + "final_gencircom": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/recursivef/recursivef.starkinfo.json -r $BDIR/circom/final.circom --template=final -v $BDIR/config/recursivef/recursivef.verkey.json --verifiername=recursivef -b $BDIR/circom", + "final_gencircom_eip4844": ". ./pre.sh && node $NODE src/main_gencircom.js -s $BDIR/config/recursivef/recursivef.starkinfo.json -r $BDIR/circom/final.circom $BDIR/circom/get_sha256_inputs.circom --template=final_blob -v $BDIR/config/recursivef/recursivef.verkey.json --verifiername=recursivef -b $BDIR/circom", + "final_compile": ". ./pre.sh && mkdir -p $BDIR/config/final/ && circom --O1 --r1cs --sym --wasm --c --verbose $BDIR/circom/final.circom -o $BDIR/build/ -l node_modules/pil-stark/circuits.bn128 -l node_modules/circomlib/circuits", "downloadptaw": "wget -P build https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final.ptau", - "fflonk_setup": ". ./pre.sh && $SNARKJS ffs $BDIR/final.r1cs build/powersOfTau28_hez_final.ptau $BDIR/final.fflonk.zkey --verbose", - "fflonk_evk": ". ./pre.sh && $SNARKJS zkev $BDIR/final.fflonk.zkey $BDIR/final.fflonk.verkey.json", - "fflonk_solidity": ". ./pre.sh && $SNARKJS zkesv $BDIR/final.fflonk.zkey $BDIR/final.fflonk.verifier.sol", + "fflonk_setup": ". ./pre.sh && $FFLONKSETUP $BDIR/build/final.r1cs build/powersOfTau28_hez_final.ptau $BDIR/config/final/final.fflonk.zkey", + "fflonk_evk": ". ./pre.sh && $SNARKJS zkev $BDIR/config/final/final.fflonk.zkey $BDIR/config/final/final.fflonk.verkey.json", + "fflonk_solidity": ". ./pre.sh && $SNARKJS zkesv $BDIR/config/final/final.fflonk.zkey $BDIR/build/final.fflonk.verifier.sol", + "sha256": ". ./pre.sh && echo 'Executing sha256 command...' && LC_ALL=C; cd $BDIR; find * -type f ! -name \"steps.log\" ! -path \"steps*\" ! -name \"sha256.txt*\" ! -name \"last_step.txt\"|sort | xargs sha256sum | tee build/sha256.txt && echo 'sha256 command executed successfully.'", "prefflonk_setup": ". ./pre.sh && [ -f build/powersOfTau28_hez_final.ptau ] || npm run downloadptaw", - "prebuildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $PIL -s $BDIR/zkevm.starkstruct.json; else cp src/zkevm.starkstruct.json $BDIR; fi", - "prec12a_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/c12a.pil -s $BDIR/c12a.starkstruct.json; else cp src/zkevm.c12a.starkstruct.json $BDIR/c12a.starkstruct.json; fi", "buildarith": "node tools/arith/parse tools/arith/arith.ejs.pil -o pil/arith.pil && node tools/arith/arith_eq_gen tools/arith/arith.ejs.pil -o src/sm/sm_arith/sm_arith_##.js", - "prerecursive1_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/recursive1.pil -s $BDIR/recursive.starkstruct.json; else cp src/recursive.starkstruct.json $BDIR; fi", - "prerecursivef_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t BN128 -p $BDIR/recursivef.pil -s $BDIR/recursivef.starkstruct.json; else cp src/recursivef.starkstruct.json $BDIR; fi", + "prebuildstarkinfo": ". ./pre.sh && mkdir -p $BDIR/build && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $PIL -s $BDIR/build/zkevm.starkstruct.json; else cp src/stark_struct/batch/zkevm.starkstruct.json $BDIR/build/zkevm.starkstruct.json; fi", + "prec12a_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/pil/c12a.pil -s $BDIR/build/c12a.starkstruct.json; else cp src/stark_struct/batch/c12a.starkstruct.json $BDIR/build/c12a.starkstruct.json; fi", + "prerecursive1_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/pil/recursive1.pil -s $BDIR/build/recursive.starkstruct.json; else cp src/stark_struct/batch/recursive.starkstruct.json $BDIR/build/recursive.starkstruct.json; fi", + "preblob_inner_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $PIL -s $BDIR/config/blob_inner/blob_inner.starkstruct.json; else cp src/stark_struct/blob/blob_inner.starkstruct.json $BDIR/config/blob_inner/blob_inner.starkstruct.json; fi", + "preblob_inner_compressor_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/pil/blob_inner_compressor.pil -s $BDIR/build/blob_inner_compressor.starkstruct.json; else cp src/stark_struct/blob/blob_inner_compressor.starkstruct.json $BDIR/build/blob_inner_compressor.starkstruct.json; fi", + "preblob_inner_recursive1_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/pil/blob_inner_recursive1.pil -s $BDIR/build/blob_inner_recursive1.starkstruct.json; else cp src/stark_struct/blob/blob_inner_recursive1.starkstruct.json $BDIR/build/blob_inner_recursive1.starkstruct.json; fi", + "preblob_outer_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/blob_outer.pil -s $BDIR/build/blob_outer.starkstruct.json; else cp src/stark_struct/blob/blob_outer.starkstruct.json $BDIR/build/blob_outer.starkstruct.json; fi", + "prerecursivef_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t BN128 -p $BDIR/pil/recursivef.pil -s $BDIR/build/recursivef.starkstruct.json; else cp src/stark_struct/recursivef.starkstruct.json $BDIR/build/recursivef.starkstruct.json; fi", + "postbuildconstants": ". ./pre.sh && cp -rf src/sm/sm_keccakf/keccak_script.json $BDIR/config/scripts/keccak_script.json && cp -rf src/sm/sm_keccakf/keccak_connections.json $BDIR/config/scripts/keccak_connections.json && cp -rf src/sm/sm_sha256f/sha256_script.json $BDIR/config/scripts/sha256_script.json && cp -rf src/sm/sm_sha256f/sha256_gates.json $BDIR/config/scripts/sha256_gates.json", + "postcompilecircom": ". ./pre.sh && cp -rf $BDIR/build/zkevm.verifier_cpp/zkevm.verifier.dat $BDIR/config/zkevm/zkevm.verifier.dat && cp -rf $BDIR/build/zkevm.verifier_cpp $BDIR/c_files", + "postrecursive1_compile": ". ./pre.sh && cp -rf $BDIR/build/recursive1_cpp/recursive1.dat $BDIR/config/recursive1/recursive1.dat && cp -rf $BDIR/build/recursive1_cpp $BDIR/c_files", + "postrecursive2_compile": ". ./pre.sh && cp -rf $BDIR/build/recursive2_cpp/recursive2.dat $BDIR/config/recursive2/recursive2.dat && cp -rf $BDIR/build/recursive2_cpp $BDIR/c_files", + "postblob_inner_compilecircom": ". ./pre.sh && cp -rf $BDIR/build/blob_inner.verifier_cpp/blob_inner.verifier.dat $BDIR/config/blob_inner/blob_inner.verifier.dat && cp -rf $BDIR/build/blob_inner.verifier_cpp $BDIR/c_files", + "postblob_inner_recursive1_compile": ". ./pre.sh && cp -rf $BDIR/build/blob_inner_recursive1_cpp/blob_inner_recursive1.dat $BDIR/config/blob_inner_recursive1/blob_inner_recursive1.dat && cp -rf $BDIR/build/blob_inner_recursive1_cpp $BDIR/c_files", + "postblob_outer_compile": ". ./pre.sh && cp -rf $BDIR/build/blob_outer_cpp/blob_outer.dat $BDIR/config/blob_outer/blob_outer.dat && cp -rf $BDIR/build/blob_outer_cpp $BDIR/c_files", + "postblob_outer_recursive2_compile": ". ./pre.sh && cp -rf $BDIR/build/blob_outer_recursive2_cpp/blob_outer_recursive2.dat $BDIR/config/blob_outer_recursive2/blob_outer_recursive2.dat && cp -rf $BDIR/build/blob_outer_recursive2_cpp $BDIR/c_files", + "postrecursivef_compile": ". ./pre.sh && cp -rf $BDIR/build/recursivef_cpp/recursivef.dat $BDIR/config/recursivef/recursivef.dat && cp -rf $BDIR/build/recursivef_cpp $BDIR/c_files", + "postfinal_compile": ". ./pre.sh && cp -rf $BDIR/build/final_cpp/final.dat $BDIR/config/final/final.dat && cp -rf $BDIR/build/final_cpp $BDIR/c_files", "buildsetup:basic": "npm run buildsetup --pil=pil/basic_main.pil --build=build/basic_proof --starkstruct=debug", "buildall:basic": "npm run buildall --pil=pil/basic_main.pil --build=build/basic_proof --starkstruct=debug", "buildproof:basic": "npm run buildproof --pil=pil/basic_main.pil --build=build/basic_proof --starkstruct=debug", "help": "npm_config_help=true && . ./pre.sh", "test": "mocha", + "test:recursive2": "mocha test/circuits/recursive2.circuit.test.js", + "test:blob_outer": "mocha test/circuits/blob_outer.circuit.test.js", + "test:final": "mocha test/circuits/final.circuit.test.js", "test:arith": "mocha --max-old-space-size=45000 test/sm/sm_arith.js", "test:storage": "mocha test/sm/sm_storage/sm_storage_test.js", "test:mem_align": "mocha test/sm_mem_align_test.js", @@ -122,6 +182,7 @@ "@0xpolygonhermez/zkasmcom": "github:0xPolygonHermez/zkasmcom#develop-feijoa", "@0xpolygonhermez/zkevm-commonjs": "github:0xpolygonhermez/zkevm-commonjs#develop-feijoa", "@0xpolygonhermez/zkevm-rom": "github:0xPolygonHermez/zkevm-rom#develop-feijoa", + "@0xpolygonhermez/zkevm-blob-rom": "github:0xPolygonHermez/zkevm-blob-rom#develop-feijoa", "@0xpolygonhermez/zkevm-storage-rom": "https://github.com/0xPolygonHermez/zkevm-storage-rom.git#v4.0.0-fork.7", "@grpc/grpc-js": "^1.8.14", "chalk": "^3.0.0", @@ -129,12 +190,12 @@ "circomlibjs": "^0.1.1", "docker-compose": "^0.23.19", "ejs": "^3.1.6", - "ethers": "^5.4.7", + "ethers": "^6.8.1", "ffjavascript": "0.2.63", "fs": "^0.0.1-security", "json-bigint": "^1.0.0", "lodash": "^4.17.21", - "pil-stark": "0.0.57", + "pil-stark": "https://github.com/0xPolygonHermez/pil-stark.git#feature/new_parser", "pilcom": "0.0.24", "snarkjs": "0.7.0", "yargs": "^17.4.0" @@ -147,6 +208,7 @@ "eslint-config-airbnb-base": "^15.0.0", "eslint-plugin-mocha": "^10.1.0", "mocha": "^9.1.3", - "@0xpolygonhermez/zkevm-testvectors": "github:0xPolygonHermez/zkevm-testvectors#develop-feijoa" + "@0xpolygonhermez/zkevm-testvectors": "github:0xPolygonHermez/zkevm-testvectors#develop-feijoa", + "temporary": "^1.1.0" } -} \ No newline at end of file +} diff --git a/pre.sh b/pre.sh index 925005e7..262ef2d6 100644 --- a/pre.sh +++ b/pre.sh @@ -8,7 +8,10 @@ checkAllMandatoryOptArgs() { checkMandatoryOptArg build $npm_config_build buildpath checkMandatoryOptArg pil $npm_config_pil file.pil checkMandatoryOptArg pilconfig $npm_config_pilconfig pilconfig.json + checkMandatoryOptArg blobpil $npm_config_blob_pil blobfile.pil + checkMandatoryOptArg blobpilconfig $npm_config_blob_pilconfig blobpilconfig.json checkMandatoryOptArg bctree $npm_config_bctree constanttreebuilder + checkMandatoryOptArg fflonksetup $npm_config_fflonk_setup fflonksetupbuilder checkMandatoryOptArg nth $npm_config_nth checkMandatoryOptArg starkstruct $npm_config_starkstruct debug checkMandatoryOptArg input $npm_config_input input @@ -23,7 +26,10 @@ usage() { echo " --build= folder were outputs was stored." echo " --pil=" echo " --pilconfig=" + echo " --blobpil=" + echo " --blobpilconfig=" echo " --bctree= alternative binary to generate constanttree (ex: ../zkevm-prover/build/bctree)" + echo " --fflonksetup= alternative binary to generate fflonksetup (ex: ../zkevm-prover/build/fflonksetup)" echo " --nth= suffix used on commited files and derivated (ex: _0)" echo " --starkstruct=debug auto-generate starkstruct, used in non-stardard pil as basic." echo " --input= input used in execution/proof." @@ -54,10 +60,16 @@ PIL_MAIN="${npm_config_pil:=pil/main.pil}" PIL_JSON="`basename $PIL_MAIN`.json" PIL_DIR="`dirname $PIL_MAIN`" PIL="$PIL_MAIN`[ ! -z $npm_config_pilconfig ] && echo \" -P $npm_config_pilconfig\"`" +BLOB_PIL_MAIN="${npm_config_blob_pil:=pil/main_blob.pil}" +BLOB_PIL_JSON="`basename $BLOB_PIL_MAIN`.json" +BLOB_PIL_DIR="`dirname $BLOB_PIL_MAIN`" +BLOB_PIL="$BLOB_PIL_MAIN`[ ! -z $npm_config_blob_pilconfig ] && echo \" -P $npm_config_blob_pilconfig\"`" PILSTARK="node $NODE node_modules/pil-stark/src" PILCOM="node $NODE node_modules/.bin/pilcom" SNARKJS="node $NODE node_modules/snarkjs/cli.js" BCTREE="${npm_config_bctree:=$PILSTARK/main_buildconsttree.js}" +FFLONKSETUP="${npm_config_fflonksetup:=$SNARKJS --verbose ffs}" +NUMCORES=$(nproc) # [ ! -z $npm_config_nth ] && NTH="${npm_config_nth}" true diff --git a/recursive/final.circom b/recursive/final.circom deleted file mode 100644 index 704cde7c..00000000 --- a/recursive/final.circom +++ /dev/null @@ -1,194 +0,0 @@ -pragma circom 2.1.0; - -/* -aggregatorAddr -> 160 -> 160 -oldStateRoot -> 256 -> 416 -oldAccInputHash -> 256 -> 672 -oldBathcNum -> 64 -> 736 -chainId -> 64 -> 800 -forkId -> 64 -> 864 -newStateRoot -> 256 -> 1120 -newAccInputHash -> 256 -> 1376 -newLocalExitRoot -> 256 -> 1632 -newBatchNum -> 64 -> 1696 - -Total: 1696 -*/ - -include "sha256/sha256.circom"; -include "bitify.circom"; -include "lessthangl.circom"; -include "recursivef.verifier.circom"; - - -template Main() { - signal output publicsHash; - - signal input aggregatorAddr; - - signal input publics[44]; - signal input root1; - signal input root2; - signal input root3; - signal input root4; - - signal input evals[86][3]; // Evaluations of the set polynomials at a challenge value z and gz - - // Leaves values of the merkle tree used to check all the queries - signal input s0_vals1[32][12]; - signal input s0_vals3[32][21]; - signal input s0_vals4[32][21]; - signal input s0_valsC[32][39]; - - signal input s0_siblings1[32][6][16]; - signal input s0_siblings3[32][6][16]; - signal input s0_siblings4[32][6][16]; - signal input s0_siblingsC[32][6][16]; - - signal input s1_root; - signal input s2_root; - signal input s3_root; - signal input s4_root; - signal input s5_root; - - signal input s1_vals[32][48]; - signal input s1_siblings[32][5][16]; - signal input s2_vals[32][24]; - signal input s2_siblings[32][4][16]; - signal input s3_vals[32][24]; - signal input s3_siblings[32][3][16]; - signal input s4_vals[32][24]; - signal input s4_siblings[32][2][16]; - signal input s5_vals[32][24]; - signal input s5_siblings[32][2][16]; - - signal input finalPol[32][3]; - - - component sv = StarkVerifier(); - sv.publics <== publics; - sv.root1 <== root1; - sv.root2 <== root2; - sv.root3 <== root3; - sv.root4 <== root4; - - sv.evals <== evals; - - sv.s0_vals1 <== s0_vals1; - sv.s0_vals3 <== s0_vals3; - sv.s0_vals4 <== s0_vals4; - sv.s0_valsC <== s0_valsC; - - sv.s0_siblings1 <== s0_siblings1; - sv.s0_siblings3 <== s0_siblings3; - sv.s0_siblings4 <== s0_siblings4; - sv.s0_siblingsC <== s0_siblingsC; - - sv.s1_root <== s1_root; - sv.s2_root <== s2_root; - sv.s3_root <== s3_root; - sv.s4_root <== s4_root; - sv.s5_root <== s5_root; - - sv.s1_vals <== s1_vals; - sv.s1_siblings <== s1_siblings; - sv.s2_vals <== s2_vals; - sv.s2_siblings <== s2_siblings; - sv.s3_vals <== s3_vals; - sv.s3_siblings <== s3_siblings; - sv.s4_vals <== s4_vals; - sv.s4_siblings <== s4_siblings; - sv.s5_vals <== s5_vals; - sv.s5_siblings <== s5_siblings; - - sv.finalPol <== finalPol; - - component publicsHasher = Sha256(1696); - - signal n2bAggregatorAddr[160] <== Num2Bits(160)(aggregatorAddr); - for (var i=0; i<160; i++) { - publicsHasher.in[0 + 160 - 1 -i] <== n2bAggregatorAddr[i]; - } - - signal n2bOldStateRoot[8][32]; - for (var i=0; i<8; i++) { - n2bOldStateRoot[i] <== Num2Bits(32)(publics[0 + i]); - for (var j=0; j<32; j++) { - publicsHasher.in[160 + 32*(8-i) - 1 -j] <== n2bOldStateRoot[i][j]; - } - } - - for (var i = 0; i < 4; i++) { - _<== LessThanGoldilocks()(publics[0 + 2*i] + (1 << 32) * publics[0 + 2*i + 1]); - } - - signal n2bOldAccInputHash[8][32]; - for (var i=0; i<8; i++) { - n2bOldAccInputHash[i] <== Num2Bits(32)(publics[8 + i]); - for (var j=0; j<32; j++) { - publicsHasher.in[416 + 32*(8-i) - 1 -j] <== n2bOldAccInputHash[i][j]; - } - } - - // Do 63 bits to avoid aliasing - signal n2bOldBatchNum[63] <== Num2Bits(63)(publics[16]); - for (var i=0; i<63; i++) { - publicsHasher.in[672 + 64 - 1 -i] <== n2bOldBatchNum[i]; - } - publicsHasher.in[672] <== 0; - - signal n2bChainId[63] <== Num2Bits(63)(publics[17]); - for (var i=0; i<63; i++) { - publicsHasher.in[736 + 64 - 1 -i] <== n2bChainId[i]; - } - publicsHasher.in[736] <== 0; - - signal n2bForkId[63] <== Num2Bits(63)(publics[18]); - for (var i=0; i<63; i++) { - publicsHasher.in[800 + 64 - 1 -i] <== n2bForkId[i]; - } - publicsHasher.in[800] <== 0; - - signal n2bNewStateRoot[8][32]; - for (var i=0; i<8; i++) { - n2bNewStateRoot[i] <== Num2Bits(32)(publics[19 + i]); - for (var j=0; j<32; j++) { - publicsHasher.in[864 + 32*(8-i) - 1 -j] <== n2bNewStateRoot[i][j]; - } - } - - for (var i = 0; i < 4; i++) { - _<== LessThanGoldilocks()(publics[19 + 2*i] + (1 << 32)*publics[19 + 2*i + 1]); - } - - signal n2bNewAccInputHash[8][32]; - for (var i=0; i<8; i++) { - n2bNewAccInputHash[i] <== Num2Bits(32)(publics[27+i]); - for (var j=0; j<32; j++) { - publicsHasher.in[1120 + 32*(8-i) - 1 -j] <== n2bNewAccInputHash[i][j]; - } - } - - signal n2bNewLocalExitRoot[8][32]; - for (var i=0; i<8; i++) { - n2bNewLocalExitRoot[i] <== Num2Bits(32)(publics[35 + i]); - for (var j=0; j<32; j++) { - publicsHasher.in[1376 + 32*(8-i) - 1 -j] <== n2bNewLocalExitRoot[i][j]; - } - } - - signal n2bNewBatchNum[63] <== Num2Bits(63)(publics[43]); - for (var i=0; i<63; i++) { - publicsHasher.in[1632 + 64 - 1 -i] <== n2bNewBatchNum[i]; - } - publicsHasher.in[1632] <== 0; - - component b2nPublicsHash = Bits2Num(256); - for (var i = 0; i < 256; i++) { - b2nPublicsHash.in[i] <== publicsHasher.out[255-i]; - } - - publicsHash <== b2nPublicsHash.out; -} - -component main = Main(); diff --git a/recursive/recursive1.circom b/recursive/recursive1.circom deleted file mode 100644 index 49ef4854..00000000 --- a/recursive/recursive1.circom +++ /dev/null @@ -1,78 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "c12a.verifier.circom"; - -template Main() { - - signal input publics[44]; - signal input rootC[4]; - - signal input root1[4]; - signal input root2[4]; - signal input root3[4]; - signal input root4[4]; - signal input evals[146][3]; // Evaluations of the set polynomials at a challenge value z and gz - - // Leaves values of the merkle tree used to check all the queries - signal input s0_vals1[64][18]; - signal input s0_vals3[64][78]; - signal input s0_vals4[64][12]; - signal input s0_valsC[64][52]; - - signal input s0_siblings1[64][22][4]; - signal input s0_siblings3[64][22][4]; - signal input s0_siblings4[64][22][4]; - signal input s0_siblingsC[64][22][4]; - - signal input s1_root[4]; - signal input s2_root[4]; - signal input s3_root[4]; - signal input s4_root[4]; - - signal input s1_vals[64][48]; - signal input s1_siblings[64][18][4]; - signal input s2_vals[64][48]; - signal input s2_siblings[64][14][4]; - signal input s3_vals[64][48]; - signal input s3_siblings[64][10][4]; - signal input s4_vals[64][48]; - signal input s4_siblings[64][6][4]; - - signal input finalPol[64][3]; - - - - component vA = StarkVerifier(); - - vA.publics <== publics; - vA.root1 <== root1; - vA.root2 <== root2; - vA.root3 <== root3; - vA.root4 <== root4; - vA.evals <== evals; - vA.s0_vals1 <== s0_vals1; - vA.s0_vals3 <== s0_vals3; - vA.s0_vals4 <== s0_vals4; - vA.s0_valsC <== s0_valsC; - vA.s0_siblings1 <== s0_siblings1; - vA.s0_siblings3 <== s0_siblings3; - vA.s0_siblings4 <== s0_siblings4; - vA.s0_siblingsC <== s0_siblingsC; - vA.s1_root <== s1_root; - vA.s2_root <== s2_root; - vA.s3_root <== s3_root; - vA.s4_root <== s4_root; - vA.s1_vals <== s1_vals; - vA.s1_siblings <== s1_siblings; - vA.s2_vals <== s2_vals; - vA.s2_siblings <== s2_siblings; - vA.s3_vals <== s3_vals; - vA.s3_siblings <== s3_siblings; - vA.s4_vals <== s4_vals; - vA.s4_siblings <== s4_siblings; - vA.finalPol <== finalPol; - -} - -component main {public [publics, rootC]}= Main(); \ No newline at end of file diff --git a/recursive/recursive2.circom.ejs b/recursive/recursive2.circom.ejs deleted file mode 100644 index f33e283f..00000000 --- a/recursive/recursive2.circom.ejs +++ /dev/null @@ -1,240 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "recursive1.verifier.circom"; -include "mux1.circom"; -include "iszero.circom"; - -template Main() { - - var rootCSingle[4]; - rootCSingle[0] = <%- constRoot[0] %>; - rootCSingle[1] = <%- constRoot[1] %>; - rootCSingle[2] = <%- constRoot[2] %>; - rootCSingle[3] = <%- constRoot[3] %>; - - signal input publics[44]; - signal input rootC[4]; - - signal input a_publics[44]; - - signal input a_root1[4]; - signal input a_root2[4]; - signal input a_root3[4]; - signal input a_root4[4]; - - signal input a_evals[118][3]; - - signal input a_s0_vals1[43][18]; - signal input a_s0_vals3[43][39]; - signal input a_s0_vals4[43][21]; - signal input a_s0_valsC[43][52]; - signal input a_s0_siblings1[43][20][4]; - signal input a_s0_siblings3[43][20][4]; - signal input a_s0_siblings4[43][20][4]; - signal input a_s0_siblingsC[43][20][4]; - - signal input a_s1_root[4]; - signal input a_s2_root[4]; - signal input a_s3_root[4]; - signal input a_s4_root[4]; - - - signal input a_s1_vals[43][48]; - signal input a_s1_siblings[43][16][4]; - signal input a_s2_vals[43][48]; - signal input a_s2_siblings[43][12][4]; - signal input a_s3_vals[43][24]; - signal input a_s3_siblings[43][9][4]; - signal input a_s4_vals[43][24]; - signal input a_s4_siblings[43][6][4]; - - - signal input a_finalPol[64][3]; - - - - signal input b_publics[44]; - - signal input b_root1[4]; - signal input b_root2[4]; - signal input b_root3[4]; - signal input b_root4[4]; - signal input b_evals[118][3]; - - signal input b_s0_vals1[43][18]; - signal input b_s0_vals3[43][39]; - signal input b_s0_vals4[43][21]; - signal input b_s0_valsC[43][52]; - signal input b_s0_siblings1[43][20][4]; - signal input b_s0_siblings3[43][20][4]; - signal input b_s0_siblings4[43][20][4]; - signal input b_s0_siblingsC[43][20][4]; - - signal input b_s1_root[4]; - signal input b_s2_root[4]; - signal input b_s3_root[4]; - signal input b_s4_root[4]; - - - signal input b_s1_vals[43][48]; - signal input b_s1_siblings[43][16][4]; - signal input b_s2_vals[43][48]; - signal input b_s2_siblings[43][12][4]; - signal input b_s3_vals[43][24]; - signal input b_s3_siblings[43][9][4]; - signal input b_s4_vals[43][24]; - signal input b_s4_siblings[43][6][4]; - - - signal input b_finalPol[64][3]; - - component vA = StarkVerifier(); - - for (var i=0; i<44; i++) { - vA.publics[i] <== a_publics[i]; - } - vA.root1 <== a_root1; - vA.root2 <== a_root2; - vA.root3 <== a_root3; - vA.root4 <== a_root4; - vA.evals <== a_evals; - vA.s0_vals1 <== a_s0_vals1; - vA.s0_vals3 <== a_s0_vals3; - vA.s0_vals4 <== a_s0_vals4; - vA.s0_valsC <== a_s0_valsC; - vA.s0_siblings1 <== a_s0_siblings1; - vA.s0_siblings3 <== a_s0_siblings3; - vA.s0_siblings4 <== a_s0_siblings4; - vA.s0_siblingsC <== a_s0_siblingsC; - vA.s1_root <== a_s1_root; - vA.s2_root <== a_s2_root; - vA.s3_root <== a_s3_root; - vA.s4_root <== a_s4_root; - vA.s1_vals <== a_s1_vals; - vA.s1_siblings <== a_s1_siblings; - vA.s2_vals <== a_s2_vals; - vA.s2_siblings <== a_s2_siblings; - vA.s3_vals <== a_s3_vals; - vA.s3_siblings <== a_s3_siblings; - vA.s4_vals <== a_s4_vals; - vA.s4_siblings <== a_s4_siblings; - vA.finalPol <== a_finalPol; - - component isOneBatchA = IsZero(); - isOneBatchA.in <== a_publics[43] - a_publics[16] - 1; - component a_muxRootC = MultiMux1(4); - a_muxRootC.c[0] <== rootC; - a_muxRootC.c[1] <== rootCSingle; - a_muxRootC.s <== isOneBatchA.out; - - for (var i=0; i<4; i++) { - vA.publics[44+i] <== rootC[i]; - } - vA.rootC <== a_muxRootC.out; - - component vB = StarkVerifier(); - - for (var i=0; i<44; i++) { - vB.publics[i] <== b_publics[i]; - } - vB.root1 <== b_root1; - vB.root2 <== b_root2; - vB.root3 <== b_root3; - vB.root4 <== b_root4; - vB.evals <== b_evals; - vB.s0_vals1 <== b_s0_vals1; - vB.s0_vals3 <== b_s0_vals3; - vB.s0_vals4 <== b_s0_vals4; - vB.s0_valsC <== b_s0_valsC; - vB.s0_siblings1 <== b_s0_siblings1; - vB.s0_siblings3 <== b_s0_siblings3; - vB.s0_siblings4 <== b_s0_siblings4; - vB.s0_siblingsC <== b_s0_siblingsC; - vB.s1_root <== b_s1_root; - vB.s2_root <== b_s2_root; - vB.s3_root <== b_s3_root; - vB.s4_root <== b_s4_root; - vB.s1_vals <== b_s1_vals; - vB.s1_siblings <== b_s1_siblings; - vB.s2_vals <== b_s2_vals; - vB.s2_siblings <== b_s2_siblings; - vB.s3_vals <== b_s3_vals; - vB.s3_siblings <== b_s3_siblings; - vB.s4_vals <== b_s4_vals; - vB.s4_siblings <== b_s4_siblings; - vB.finalPol <== b_finalPol; - - - component isOneBatchB = IsZero(); - isOneBatchB.in <== b_publics[43] - b_publics[16] - 1; - component b_muxRootC = MultiMux1(4); - b_muxRootC.c[0] <== rootC; - b_muxRootC.c[1] <== rootCSingle; - b_muxRootC.s <== isOneBatchB.out; - - for (var i=0; i<4; i++) { - vB.publics[44+i] <== rootC[i]; - } - vB.rootC <== b_muxRootC.out; - - // oldStateRoot - for (var i=0; i<8; i++) { - a_publics[i] === publics[i]; - } - - // oldAccInputHash - for (var i=8; i<16; i++) { - a_publics[i] === publics[i]; - } - - // oldBatchNum - a_publics[16] === publics[16]; - - // chainId - a_publics[17] === publics[17]; - - // forkId - a_publics[18] === publics[18]; - - // midStateRoot - for (var i=0; i<8; i++) { - b_publics[i] === a_publics[19+i]; - } - - // midAccInputHash - for (var i=8; i<16; i++) { - b_publics[i] === a_publics[19+i]; - } - - // midBatchNum - b_publics[16] === a_publics[19+24]; - - // chainId - b_publics[17] === publics[17]; - - // forkId - b_publics[18] === publics[18]; - - - // newStateRoot - for (var i=0; i<8; i++) { - publics[19+i] === b_publics[19+i]; - } - - // newAccInputHash - for (var i=8; i<16; i++) { - publics[19+i] === b_publics[19+i]; - } - - // localExitRoot - for (var i=16; i<24; i++) { - publics[19+i] === b_publics[19+i]; - } - - // newBatchNum - publics[19+24] === b_publics[19+24]; - -} - -component main {public [publics, rootC]}= Main(); diff --git a/recursive/recursivef.circom.ejs b/recursive/recursivef.circom.ejs deleted file mode 100644 index 93f1d6b2..00000000 --- a/recursive/recursivef.circom.ejs +++ /dev/null @@ -1,100 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "recursive2.verifier.circom"; -include "mux1.circom"; -include "iszero.circom"; - -template Main() { - signal input publics[44]; - - signal input root1[4]; - signal input root2[4]; - signal input root3[4]; - signal input root4[4]; - - signal input evals[118][3]; - - signal input s0_vals1[43][18]; - signal input s0_vals3[43][39]; - signal input s0_vals4[43][21]; - signal input s0_valsC[43][52]; - signal input s0_siblings1[43][20][4]; - signal input s0_siblings3[43][20][4]; - signal input s0_siblings4[43][20][4]; - signal input s0_siblingsC[43][20][4]; - - signal input s1_root[4]; - signal input s2_root[4]; - signal input s3_root[4]; - signal input s4_root[4]; - - signal input s1_vals[43][48]; - signal input s1_siblings[43][16][4]; - signal input s2_vals[43][48]; - signal input s2_siblings[43][12][4]; - signal input s3_vals[43][24]; - signal input s3_siblings[43][9][4]; - signal input s4_vals[43][24]; - signal input s4_siblings[43][6][4]; - - signal input finalPol[64][3]; - - component sv = StarkVerifier(); - - for (var i=0; i<44; i++) { - sv.publics[i] <== publics[i]; - } - sv.root1 <== root1; - sv.root2 <== root2; - sv.root3 <== root3; - sv.root4 <== root4; - sv.evals <== evals; - sv.s0_vals1 <== s0_vals1; - sv.s0_vals3 <== s0_vals3; - sv.s0_vals4 <== s0_vals4; - sv.s0_valsC <== s0_valsC; - sv.s0_siblings1 <== s0_siblings1; - sv.s0_siblings3 <== s0_siblings3; - sv.s0_siblings4 <== s0_siblings4; - sv.s0_siblingsC <== s0_siblingsC; - sv.s1_root <== s1_root; - sv.s2_root <== s2_root; - sv.s3_root <== s3_root; - sv.s4_root <== s4_root; - sv.s1_vals <== s1_vals; - sv.s1_siblings <== s1_siblings; - sv.s2_vals <== s2_vals; - sv.s2_siblings <== s2_siblings; - sv.s3_vals <== s3_vals; - sv.s3_siblings <== s3_siblings; - sv.s4_vals <== s4_vals; - sv.s4_siblings <== s4_siblings; - sv.finalPol <== finalPol; - - component isOne = IsZero(); - isOne.in <== publics[43] -publics[16] -1; - component muxKey = MultiMux1(4); - muxKey.s <== isOne.out; - muxKey.c[0][0] <== <%- constRoot2[0] %>; - muxKey.c[0][1] <== <%- constRoot2[1] %>; - muxKey.c[0][2] <== <%- constRoot2[2] %>; - muxKey.c[0][3] <== <%- constRoot2[3] %>; - muxKey.c[1][0] <== <%- constRoot1[0] %>; - muxKey.c[1][1] <== <%- constRoot1[1] %>; - muxKey.c[1][2] <== <%- constRoot1[2] %>; - muxKey.c[1][3] <== <%- constRoot1[3] %>; - - sv.publics[44] <== <%- constRoot2[0] %>; - sv.publics[45] <== <%- constRoot2[1] %>; - sv.publics[46] <== <%- constRoot2[2] %>; - sv.publics[47] <== <%- constRoot2[3] %>; - - sv.rootC[0] <== muxKey.out[0]; - sv.rootC[1] <== muxKey.out[1]; - sv.rootC[2] <== muxKey.out[2]; - sv.rootC[3] <== muxKey.out[3]; - -} - -component main {public [publics]}= Main(); \ No newline at end of file diff --git a/src/main_gencircom.js b/src/main_gencircom.js new file mode 100644 index 00000000..634b4c6a --- /dev/null +++ b/src/main_gencircom.js @@ -0,0 +1,190 @@ +const version = require("../package").version; +const fs = require("fs"); +const path = require("path"); +const JSONbig = require('json-bigint')({ useNativeBigInt: true, alwaysParseAsBig: true }); +const { batchPublics, batchPublicsEip4844, blobInnerPublics, blobOuterPublics } = require("./templates/helpers/publics"); +const ejs = require("ejs"); +const argv = require("yargs") + .version(version) + .usage("node main_gencircom.js -v -s starkinfo.json -t starkstruct.json --cols=<12/18> --template=") + .array("v").alias("v", "verkey") + .array("s").alias("s", "starkinfo") + .alias("b", "builddir") + .alias("r", "recursivefile") + .string("template") + .string("verifiername") + .string("verifiername2") + .string("arity") + .argv; + + +async function run() { + const templateName = argv.template; + if(!templateName) throw new Error("A template name must be provided!"); + if(!["blob_outer", "compressor", "recursive1", "recursive2", "recursive2_batch", "recursive2_blob", "recursivef", "final", "final_blob"].includes(templateName)) throw new Error("Invalid template name provided!"); + + let template; + if(templateName.includes("final")) { + template = "final"; + } else if(templateName.includes("recursive2")) { + template = "recursive2"; + } else { + template = templateName; + } + + const recursiveFile = argv.recursivefile; + if(typeof (recursiveFile) !== "string") throw new Error("A recursive file must be provided!"); + + const starkInfoVerifiers = []; + + const starkInfos = argv.starkinfo; + if(typeof (starkInfos[0]) !== "string") throw new Error("A stark info file must be provided!"); + starkInfoVerifiers.push(JSON.parse(await fs.promises.readFile(starkInfos[0].trim(), "utf8"))); + + if(template === "blob_outer") { + if(typeof (starkInfos[1]) !== "string") throw new Error("A second stark info file must be provided!"); + starkInfoVerifiers.push(JSON.parse(await fs.promises.readFile(starkInfos[1].trim(), "utf8"))); + } + + const options = { + setEnableInput: argv.setenable || false, + setAggregatedKey: argv.aggkey || false, + isAggregatedInput: argv.isagg || false, + } + let vks = []; + + if(!argv.verkey) throw new Error("A verification key file must be provided!"); + + let verkeyArray = argv.verkey; + + if(typeof(verkeyArray[0]) !== "string") throw new Error("A second verification key file must be provided!"); + const verkey = JSONbig.parse(await fs.promises.readFile(verkeyArray[0].trim(), "utf8")); + vks.push(verkey.constRoot); + + let verifierNames = []; + + let verifierName = argv.verifiername; + if(!verifierName) throw new Error("A verifier name must be provided!") + verifierNames.push(verifierName); + + const optionsCircom = { + nStages: 3, + starkInfoVerifiers, + vks, + options, + verifierNames, + }; + + if(template === "recursive2") { + const buildDir = argv.builddir; + if(typeof (buildDir) !== "string") throw new Error("A build directory must be provided!"); + + let verifyRecursive2CircomTemplate; + let verifyRecursive2InputsCircom = { isTest: false }; + + if(templateName === "recursive2") { + verifyRecursive2CircomTemplate = await fs.promises.readFile(path.join(__dirname, "templates", "helpers", "recursive2", "recursive2_checks_batch.circom.ejs"), "utf8"); + verifyRecursive2InputsCircom.publics = batchPublics; + optionsCircom.publics = batchPublics; + } else if(templateName === "recursive2_batch") { + verifyRecursive2CircomTemplate = await fs.promises.readFile(path.join(__dirname, "templates", "helpers", "recursive2", "recursive2_checks_batch_eip4844.circom.ejs"), "utf8"); + verifyRecursive2InputsCircom.publics = batchPublicsEip4844; + optionsCircom.publics = batchPublicsEip4844; + } else if(templateName === "recursive2_blob") { + verifyRecursive2CircomTemplate = await fs.promises.readFile(path.join(__dirname, "templates", "helpers", "recursive2", "recursive2_checks_blob.circom.ejs"), "utf8"); + verifyRecursive2InputsCircom.publics = blobOuterPublics; + optionsCircom.publics = blobOuterPublics; + } else throw new Error("Invalid templateName" + templateName); + + const verifyRecursive2File = `${buildDir}/verify_recursive2.circom`; + if(typeof (verifyRecursive2File) !== "string") throw new Error("A verify recursive2 file must be provided!"); + + const verifyRecursive2CircomFile = ejs.render(verifyRecursive2CircomTemplate, verifyRecursive2InputsCircom); + await fs.promises.writeFile(verifyRecursive2File, verifyRecursive2CircomFile, "utf8"); + + } + + if(template === "blob_outer") { + const buildDir = argv.builddir; + if(typeof (buildDir) !== "string") throw new Error("A build directory must be provided!"); + + if(typeof(verkeyArray[1]) !== "string") throw new Error("A second verification key file must be provided!"); + const verkey2 = JSONbig.parse(await fs.promises.readFile(verkeyArray[1].trim(), "utf8")); + vks.push(verkey2.constRoot); + + if(typeof(verkeyArray[2]) !== "string") throw new Error("A third verification key file must be provided!"); + const verkey3 = JSONbig.parse(await fs.promises.readFile(verkeyArray[2].trim(), "utf8")); + vks.push(verkey3.constRoot); + + let verifierName2 = argv.verifiername2; + if(!verifierName2) throw new Error("A verifier name for blob inner must be provided!") + verifierNames.push(verifierName2); + + if(verifierNames.length < 2) throw new Error("Invalid number of verifier names provided!"); + if(starkInfoVerifiers.length < 2) throw new Error("Invalid number of stark infos provided!"); + + const verifyBlobOuterCircomTemplate = await fs.promises.readFile(path.join(__dirname, "templates", "helpers", "verify_blob_outer.circom.ejs"), "utf8"); + const optionsVerifyBlobOuterCircom = { + batchPublics: batchPublicsEip4844, + blobInnerPublics, + blobOuterPublics, + isTest: false, + }; + + optionsCircom.batchPublics = batchPublicsEip4844; + optionsCircom.blobInnerPublics = blobInnerPublics; + optionsCircom.blobOuterPublics = blobOuterPublics; + + const verifyBlobOuterFile = `${buildDir}/verify_blob_outer.circom`; + if(typeof (verifyBlobOuterFile) !== "string") throw new Error("A verify blob outer file must be provided!"); + + const verifyBlobOuterCircomFile = ejs.render(verifyBlobOuterCircomTemplate, optionsVerifyBlobOuterCircom); + await fs.promises.writeFile(verifyBlobOuterFile, verifyBlobOuterCircomFile, "utf8"); + } + + if(template === "recursivef") { + if(typeof(verkeyArray[1]) !== "string") throw new Error("A second verification key file must be provided!"); + const verkey2 = JSONbig.parse(await fs.promises.readFile(verkeyArray[1].trim(), "utf8")); + vks.push(verkey2.constRoot); + } + + if(template === "final") { + const buildDir = argv.builddir; + if(typeof (buildDir) !== "string") throw new Error("A build directory must be provided!"); + + let getSha256InputsTemplate; + let optionsGetSha256InputsCircom = { isTest: false }; + + if(templateName === "final_blob") { + getSha256InputsTemplate = await fs.promises.readFile(path.join(__dirname, "templates", "helpers", "final", "get_sha256_inputs_blob.circom.ejs"), "utf8"); + optionsGetSha256InputsCircom.publics = blobOuterPublics; + } else { + getSha256InputsTemplate = await fs.promises.readFile(path.join(__dirname, "templates", "helpers", "final", "get_sha256_inputs_batch.circom.ejs"), "utf8"); + optionsGetSha256InputsCircom.publics = blobOuterPublics; + } + + const getSha256InputsFile = `${buildDir}/get_sha256_inputs.circom`; + if(typeof (getSha256InputsFile) !== "string") throw new Error("A get sha256 inputs file must be provided!"); + + const getSha256InputsCircomFile = ejs.render(getSha256InputsTemplate, optionsGetSha256InputsCircom); + await fs.promises.writeFile(getSha256InputsFile, getSha256InputsCircomFile, "utf8"); + + options.arity = starkInfoVerifiers[0].merkleTreeArity; + } + + + const circomTemplate = await fs.promises.readFile(path.join(__dirname, "templates", `${template}.circom.ejs`), "utf8"); + + const circomVerifier = ejs.render(circomTemplate, optionsCircom); + await fs.promises.writeFile(recursiveFile, circomVerifier, "utf8"); + + console.log("file Generated Correctly"); + +} +run().then(()=> { + process.exit(0); +}, (err) => { + console.log(err.message); + console.log(err.stack); + process.exit(1); +}); diff --git a/src/main_genrecursive.js b/src/main_genrecursive.js deleted file mode 100644 index 166ed087..00000000 --- a/src/main_genrecursive.js +++ /dev/null @@ -1,42 +0,0 @@ -const JSONbig = require('json-bigint')({ useNativeBigInt: true, alwaysParseAsBig: true }); -const fs = require("fs"); -const path = require("path"); -const ejs = require("ejs"); -const version = require("../package").version; - -const argv = require("yargs") - .version(version) - .usage("node main_genrecursive.js -v -o ") - .alias("v", "verkey") - .alias("o", "output") - .argv; - -async function run() { - - const verKeyFile = typeof(argv.verkey) === "string" ? argv.verkey.trim() : "mycircuit.verkey.json"; - const outputFile = typeof(argv.output) === "string" ? argv.output.trim() : "mycircuit.verifier.circom"; - - const verKey = JSONbig.parse(await fs.promises.readFile(verKeyFile, "utf8")); - const constRoot = verKey.constRoot; - - const template = await fs.promises.readFile(path.join(__dirname, "..", "recursive", "recursive2.circom.ejs"), "utf8"); - - const obj = { - constRoot: constRoot, - }; - - const verifier = ejs.render(template , obj); - - await fs.promises.writeFile(outputFile, verifier, "utf8"); - - console.log("file Generated Correctly"); - -} - -run().then(()=> { - process.exit(0); -}, (err) => { - console.log(err.message); - console.log(err.stack); - process.exit(1); -}); diff --git a/src/main_genrecursivef.js b/src/main_genrecursivef.js deleted file mode 100644 index 0117b99f..00000000 --- a/src/main_genrecursivef.js +++ /dev/null @@ -1,49 +0,0 @@ -const JSONbig = require('json-bigint')({ useNativeBigInt: true, alwaysParseAsBig: true }); -const fs = require("fs"); -const path = require("path"); -const ejs = require("ejs"); -const version = require("../package").version; - -const argv = require("yargs") - .version(version) - .usage("node main_genrecursive.js --verkey1 --verkey2 -o ") - .alias("v1", "verkey1") - .alias("v2", "verkey2") - .alias("o", "output") - .argv; - -async function run() { - - const verKey1File = typeof(argv.verkey1) === "string" ? argv.verkey1.trim() : "recursive1.verkey.json"; - const verKey2File = typeof(argv.verkey2) === "string" ? argv.verkey2.trim() : "recursive2.verkey.json"; - const outputFile = typeof(argv.output) === "string" ? argv.output.trim() : "recursivef.circom"; - - const verKey1 = JSONbig.parse(await fs.promises.readFile(verKey1File, "utf8")); - const verKey2 = JSONbig.parse(await fs.promises.readFile(verKey2File, "utf8")); - const constRoot1 = verKey1.constRoot; - const constRoot2 = verKey2.constRoot; - - const template = await fs.promises.readFile(path.join(__dirname, "..", "recursive", "recursivef.circom.ejs"), "utf8"); - - const obj = { - constRoot1: constRoot1, - constRoot2: constRoot2, - }; - - console.log(obj) - - const verifier = ejs.render(template , obj); - - await fs.promises.writeFile(outputFile, verifier, "utf8"); - - console.log("file Generated Correctly"); - -} - -run().then(()=> { - process.exit(0); -}, (err) => { - console.log(err.message); - console.log(err.stack); - process.exit(1); -}); diff --git a/src/main_joinzkin.js b/src/main_joinzkin.js deleted file mode 100644 index a43bd58e..00000000 --- a/src/main_joinzkin.js +++ /dev/null @@ -1,132 +0,0 @@ -const fs = require("fs"); -const path = require("path"); -const { polMulAxi } = require("pil-stark/src/polutils"); -const version = require("../package").version; -const JSONbig = require("json-bigint"); - -const argv = require("yargs") - .version(version) - .usage("node -v --zkin1 --zkin2 --zkinout ") - .argv; - -async function run() { - - const zkin1File = typeof(argv.zkin1) === "string" ? argv.zkin1.trim() : "zkin1.json"; - const zkin2File = typeof(argv.zkin2) === "string" ? argv.zkin2.trim() : "zkin2.json"; - const zkinOutFile = typeof(argv.zkinout) === "string" ? argv.zkinout : "zkinOut.json"; - const verKeyFile = typeof(argv.verkey) === "string" ? argv.verkey.trim() : "recursive2.verkey.json"; - - - const zkin1 = JSON.parse(await fs.promises.readFile(zkin1File, "utf8")); - const zkin2 = JSON.parse(await fs.promises.readFile(zkin2File, "utf8")); - const verKey = JSONbig.parse(await fs.promises.readFile(verKeyFile, "utf8")); - const constRoot = verKey.constRoot; - - - const zkinOut = {}; - - zkinOut.publics = []; - - for (let i=0; i<8; i++) zkinOut.publics[0+i] = zkin1.publics[0+i]; // oldStateRoot - - for (let i=0; i<8; i++) zkinOut.publics[8+i] = zkin1.publics[8+i]; // oldAccInputHash0 - - zkinOut.publics[16] = zkin1.publics[16]; // oldBatchNum - - zkinOut.publics[17] = zkin1.publics[17]; // chainId - - zkinOut.publics[18] = zkin1.publics[18]; // forkID - - if (zkin1.publics[17] != (zkin2.publics[17])) throw new Error("chainID doesn't match"); - - if (zkin1.publics[18] != (zkin2.publics[18])) throw new Error("forkID doesn't match"); - // midStateRoot - for (let i=0; i<8; i++) { - if (zkin1.publics[19 + i] != (zkin2.publics[0 + i])) throw new Error("midStateRoot doesnt't match"); - } - // midAccInputHash0 - for (let i=0; i<8; i++) { - if (zkin1.publics[27 + i] != (zkin2.publics[8 + i])) throw new Error("midAccInputHash0 doesnt't match"); - } - if (zkin1.publics[43] != (zkin2.publics[16])) throw new Error("batchNum doesn't match"); - - for (let i=0; i<8; i++) zkinOut.publics[19+i] = zkin2.publics[19+i]; // newStateRoot - for (let i=0; i<8; i++) zkinOut.publics[27+i] = zkin2.publics[27+i]; // newAccInputHash0 - for (let i=0; i<8; i++) zkinOut.publics[35+i] = zkin2.publics[35+i]; // newLocalExitRoot - - zkinOut.publics[43] = zkin2.publics[43]; // oldBatchNum - - zkinOut.a_publics = zkin1.publics; - zkinOut.a_root1 = zkin1.root1; - zkinOut.a_root2 = zkin1.root2; - zkinOut.a_root3 = zkin1.root3; - zkinOut.a_root4 = zkin1.root4; - zkinOut.a_evals = zkin1.evals; - zkinOut.a_s0_vals1 = zkin1.s0_vals1; - zkinOut.a_s0_vals3 = zkin1.s0_vals3; - zkinOut.a_s0_vals4 = zkin1.s0_vals4; - zkinOut.a_s0_valsC = zkin1.s0_valsC; - zkinOut.a_s0_siblings1 = zkin1.s0_siblings1; - zkinOut.a_s0_siblings3 = zkin1.s0_siblings3; - zkinOut.a_s0_siblings4 = zkin1.s0_siblings4; - zkinOut.a_s0_siblingsC = zkin1.s0_siblingsC; - zkinOut.a_s1_root = zkin1.s1_root; - zkinOut.a_s2_root = zkin1.s2_root; - zkinOut.a_s3_root = zkin1.s3_root; - zkinOut.a_s4_root = zkin1.s4_root; - zkinOut.a_s1_siblings = zkin1.s1_siblings; - zkinOut.a_s2_siblings = zkin1.s2_siblings; - zkinOut.a_s3_siblings = zkin1.s3_siblings; - zkinOut.a_s4_siblings = zkin1.s4_siblings; - zkinOut.a_s1_vals = zkin1.s1_vals; - zkinOut.a_s2_vals = zkin1.s2_vals; - zkinOut.a_s3_vals = zkin1.s3_vals; - zkinOut.a_s4_vals = zkin1.s4_vals; - zkinOut.a_finalPol = zkin1.finalPol; - - zkinOut.b_publics = zkin2.publics; - zkinOut.b_root1 = zkin2.root1; - zkinOut.b_root2 = zkin2.root2; - zkinOut.b_root3 = zkin2.root3; - zkinOut.b_root4 = zkin2.root4; - zkinOut.b_evals = zkin2.evals; - zkinOut.b_s0_vals1 = zkin2.s0_vals1; - zkinOut.b_s0_vals3 = zkin2.s0_vals3; - zkinOut.b_s0_vals4 = zkin2.s0_vals4; - zkinOut.b_s0_valsC = zkin2.s0_valsC; - zkinOut.b_s0_siblings1 = zkin2.s0_siblings1; - zkinOut.b_s0_siblings3 = zkin2.s0_siblings3; - zkinOut.b_s0_siblings4 = zkin2.s0_siblings4; - zkinOut.b_s0_siblingsC = zkin2.s0_siblingsC; - zkinOut.b_s1_root = zkin2.s1_root; - zkinOut.b_s2_root = zkin2.s2_root; - zkinOut.b_s3_root = zkin2.s3_root; - zkinOut.b_s4_root = zkin2.s4_root; - zkinOut.b_s1_siblings = zkin2.s1_siblings; - zkinOut.b_s2_siblings = zkin2.s2_siblings; - zkinOut.b_s3_siblings = zkin2.s3_siblings; - zkinOut.b_s4_siblings = zkin2.s4_siblings; - zkinOut.b_s1_vals = zkin2.s1_vals; - zkinOut.b_s2_vals = zkin2.s2_vals; - zkinOut.b_s3_vals = zkin2.s3_vals; - zkinOut.b_s4_vals = zkin2.s4_vals; - zkinOut.b_finalPol = zkin2.finalPol; - - zkinOut.rootC = []; - for (let i=0; i<4; i++) { - zkinOut.rootC[i] = constRoot[i].toString(); - } - - await fs.promises.writeFile(zkinOutFile, JSON.stringify(zkinOut, null, 1), "utf8"); - - console.log("file Generated Correctly"); - -} - -run().then(()=> { - process.exit(0); -}, (err) => { - console.log(err.message); - console.log(err.stack); - process.exit(1); -}); diff --git a/src/zkevm.c12a.starkstruct.json b/src/stark_struct/batch/c12a.starkstruct.json similarity index 100% rename from src/zkevm.c12a.starkstruct.json rename to src/stark_struct/batch/c12a.starkstruct.json diff --git a/src/recursive.starkstruct.json b/src/stark_struct/batch/recursive.starkstruct.json similarity index 100% rename from src/recursive.starkstruct.json rename to src/stark_struct/batch/recursive.starkstruct.json diff --git a/src/zkevm.starkstruct.json b/src/stark_struct/batch/zkevm.starkstruct.json similarity index 100% rename from src/zkevm.starkstruct.json rename to src/stark_struct/batch/zkevm.starkstruct.json diff --git a/src/stark_struct/blob/blob_inner.starkstruct.json b/src/stark_struct/blob/blob_inner.starkstruct.json new file mode 100644 index 00000000..40a415da --- /dev/null +++ b/src/stark_struct/blob/blob_inner.starkstruct.json @@ -0,0 +1,14 @@ +{ + "nBits": 23, + "nBitsExt": 24, + "nQueries": 128, + "verificationHashType": "GL", + "steps": [ + {"nBits": 24}, + {"nBits": 19}, + {"nBits": 14}, + {"nBits": 10}, + {"nBits": 6} + ] +} + diff --git a/src/stark_struct/blob/blob_inner_compressor.starkstruct.json b/src/stark_struct/blob/blob_inner_compressor.starkstruct.json new file mode 100644 index 00000000..80507796 --- /dev/null +++ b/src/stark_struct/blob/blob_inner_compressor.starkstruct.json @@ -0,0 +1,13 @@ +{ + "nBits": 20, + "nBitsExt": 22, + "nQueries": 64, + "verificationHashType": "GL", + "steps": [ + {"nBits": 22}, + {"nBits": 18}, + {"nBits": 14}, + {"nBits": 10}, + {"nBits": 6} + ] +} diff --git a/src/stark_struct/blob/blob_inner_recursive1.starkstruct.json b/src/stark_struct/blob/blob_inner_recursive1.starkstruct.json new file mode 100644 index 00000000..ac3f52f5 --- /dev/null +++ b/src/stark_struct/blob/blob_inner_recursive1.starkstruct.json @@ -0,0 +1,13 @@ +{ + "nBits": 17, + "nBitsExt": 20, + "nQueries": 43, + "verificationHashType": "GL", + "steps": [ + {"nBits": 20}, + {"nBits": 16}, + {"nBits": 12}, + {"nBits": 9}, + {"nBits": 6} + ] +} diff --git a/src/stark_struct/blob/blob_outer.starkstruct.json b/src/stark_struct/blob/blob_outer.starkstruct.json new file mode 100644 index 00000000..ac3f52f5 --- /dev/null +++ b/src/stark_struct/blob/blob_outer.starkstruct.json @@ -0,0 +1,13 @@ +{ + "nBits": 17, + "nBitsExt": 20, + "nQueries": 43, + "verificationHashType": "GL", + "steps": [ + {"nBits": 20}, + {"nBits": 16}, + {"nBits": 12}, + {"nBits": 9}, + {"nBits": 6} + ] +} diff --git a/src/recursivef.starkstruct.json b/src/stark_struct/recursivef.starkstruct.json similarity index 100% rename from src/recursivef.starkstruct.json rename to src/stark_struct/recursivef.starkstruct.json diff --git a/src/templates/blob_outer.circom.ejs b/src/templates/blob_outer.circom.ejs new file mode 100644 index 00000000..9e774042 --- /dev/null +++ b/src/templates/blob_outer.circom.ejs @@ -0,0 +1,167 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "<%- verifierNames[0] %>.verifier.circom"; +include "<%- verifierNames[1] %>.verifier.circom"; +include "verify_blob_outer.circom"; + +<% let starkInfo = starkInfoVerifiers[0]; %> +<% let starkInfo2 = starkInfoVerifiers[1]; %> + +<% let batchPublics = starkInfo.nPublics - 4; -%> +<% let blobInnerPublics = starkInfo2.nPublics; -%> + +template Main() { + + var rootCRecursive1Batch[4] = [<%- vks[0].join(",") %>]; + var rootCRecursive2Batch[4] = [<%- vks[1].join(",") %>]; + + signal input publics[<%- blobOuterPublics.nPublics %>]; + signal input rootC[4]; + + signal input chainId; + + // Batch Proof Inputs + signal input batch_publics[<%- batchPublics %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input batch_root<%- s %>[4]; +<% } -%> + + signal input batch_evals[<%- starkInfo.evMap.length %>][3]; + + signal input batch_s0_valsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.nConstants %>]; + signal input batch_s0_siblingsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input batch_s0_vals<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.mapSectionsN[`cm${s}_n`] %>]; + signal input batch_s0_siblings<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input batch_s<%- s %>_root[4]; +<% } -%> + +<% for (let s=1; s< starkInfo.starkStruct.steps.length; s++) { -%> + signal input batch_s<%- s %>_vals[<%- starkInfo.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo.starkStruct.steps[s-1].nBits - starkInfo.starkStruct.steps[s].nBits))*3 %>]; + signal input batch_s<%- s %>_siblings[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[s].nBits %>][4]; +<% } -%> + + signal input batch_finalPol[<%- 1 << parseInt(starkInfo.starkStruct.steps[starkInfo.starkStruct.steps.length-1].nBits) %>][3]; + + signal input batch_isAggregatedCircuit; + + // Blob Inner Proof Inputs + signal input blob_inner_publics[<%- blobInnerPublics %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input blob_inner_root<%- s %>[4]; +<% } -%> + + signal input blob_inner_evals[<%- starkInfo2.evMap.length %>][3]; + + signal input blob_inner_s0_valsC[<%- starkInfo2.starkStruct.nQueries %>][<%- starkInfo2.nConstants %>]; + signal input blob_inner_s0_siblingsC[<%- starkInfo2.starkStruct.nQueries %>][<%- starkInfo2.starkStruct.steps[0].nBits %>][4]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo2.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input blob_inner_s0_vals<%- s %>[<%- starkInfo2.starkStruct.nQueries %>][<%- starkInfo2.mapSectionsN[`cm${s}_n`] %>]; + signal input blob_inner_s0_siblings<%- s %>[<%- starkInfo2.starkStruct.nQueries %>][<%- starkInfo2.starkStruct.steps[0].nBits %>][4]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input blob_inner_s<%- s %>_root[4]; +<% } -%> + +<% for (let s=1; s< starkInfo2.starkStruct.steps.length; s++) { -%> + signal input blob_inner_s<%- s %>_vals[<%- starkInfo2.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo2.starkStruct.steps[s-1].nBits - starkInfo2.starkStruct.steps[s].nBits))*3 %>]; + signal input blob_inner_s<%- s %>_siblings[<%- starkInfo2.starkStruct.nQueries %>][<%- starkInfo2.starkStruct.steps[s].nBits %>][4]; +<% } -%> + + signal input blob_inner_finalPol[<%- 1 << parseInt(starkInfo2.starkStruct.steps[starkInfo2.starkStruct.steps.length-1].nBits) %>][3]; + + signal isValidBlob; + signal publicsBlobOuter[<%- blobOuterPublics.nPublics %>]; + (isValidBlob, publicsBlobOuter) <== VerifyBlobOuter()(batch_publics, blob_inner_publics, chainId); + + for(var i=0; i<<%- blobOuterPublics.nPublics %>; i++) { + publics[i] === publicsBlobOuter[i]; + } + + component vBatch = StarkVerifier(); + + for (var i=0; i< <%- batchPublics %>; i++) { + vBatch.publics[i] <== batch_publics[i]; + } + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + vBatch.root<%- s %> <== batch_root<%- s %>; +<% } -%> + + vBatch.evals <== batch_evals; + + vBatch.s0_valsC <== batch_s0_valsC; + vBatch.s0_siblingsC <== batch_s0_siblingsC; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + vBatch.s0_vals<%- s %> <== batch_s0_vals<%- s %>; + vBatch.s0_siblings<%- s %> <== batch_s0_siblings<%- s %>; +<% } -%> +<% } -%> + +<% for (let s=1; s + vBatch.s<%- s %>_root <== batch_s<%- s %>_root; +<% } -%> +<% for (let s=1; s + vBatch.s<%- s %>_vals <== batch_s<%- s %>_vals; + vBatch.s<%- s %>_siblings <== batch_s<%- s %>_siblings; +<% } -%> + vBatch.finalPol <== batch_finalPol; + + signal isOneBatch <== 1 - batch_isAggregatedCircuit; + vBatch.rootC <== MultiMux1(4)([rootCRecursive2Batch, rootCRecursive1Batch], isOneBatch); + + vBatch.enable <== isValidBlob; // Check isValid public + + for (var i=0; i<4; i++) { + vBatch.publics[<%- batchPublics %> + i] <== rootCRecursive2Batch[i]; + } + + component vBlob = StarkVerifier2(); + + for (var i=0; i< <%- blobInnerPublics %>; i++) { + vBlob.publics[i] <== blob_inner_publics[i]; + } + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + vBlob.root<%- s %> <== blob_inner_root<%- s %>; +<% } -%> + + vBlob.evals <== blob_inner_evals; + + vBlob.s0_valsC <== blob_inner_s0_valsC; + vBlob.s0_siblingsC <== blob_inner_s0_siblingsC; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo2.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + vBlob.s0_vals<%- s %> <== blob_inner_s0_vals<%- s %>; + vBlob.s0_siblings<%- s %> <== blob_inner_s0_siblings<%- s %>; +<% } -%> +<% } -%> + +<% for (let s=1; s + vBlob.s<%- s %>_root <== blob_inner_s<%- s %>_root; +<% } -%> +<% for (let s=1; s + vBlob.s<%- s %>_vals <== blob_inner_s<%- s %>_vals; + vBlob.s<%- s %>_siblings <== blob_inner_s<%- s %>_siblings; +<% } -%> + vBlob.finalPol <== blob_inner_finalPol; + +} + +component main {public [publics, rootC]}= Main(); \ No newline at end of file diff --git a/src/templates/final.circom.ejs b/src/templates/final.circom.ejs new file mode 100644 index 00000000..92698d52 --- /dev/null +++ b/src/templates/final.circom.ejs @@ -0,0 +1,75 @@ +pragma circom 2.1.0; + +include "<%- verifierNames[0] %>.verifier.circom"; +include "get_sha256_inputs.circom"; + +<% let starkInfo = starkInfoVerifiers[0]; -%> +<% let arity = starkInfo.merkleTreeArity; -%> +template Main() { + signal output publicsHash; + + signal input aggregatorAddr; + + signal input publics[<%- starkInfo.nPublics %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input root<%- s %>; +<% } -%> + + signal input evals[<%- starkInfo.evMap.length %>][3]; // Evaluations of the set polynomials at a challenge value z and gz + + signal input s0_valsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.nConstants %>]; + signal input s0_siblingsC[<%- starkInfo.starkStruct.nQueries %>][<%- Math.floor((parseInt(starkInfo.starkStruct.steps[0].nBits) - 1)/Math.log2(arity))+1 %>][<%- arity %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input s0_vals<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.mapSectionsN[`cm${s}_n`] %>]; + signal input s0_siblings<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- Math.floor((parseInt(starkInfo.starkStruct.steps[0].nBits) - 1)/Math.log2(arity))+1 %>][<%- arity %>]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input s<%- s %>_root; +<% } -%> + +<% for (let s=1; s< starkInfo.starkStruct.steps.length; s++) { -%> + signal input s<%- s %>_vals[<%- starkInfo.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo.starkStruct.steps[s-1].nBits - starkInfo.starkStruct.steps[s].nBits))*3 %>]; + signal input s<%- s %>_siblings[<%- starkInfo.starkStruct.nQueries %>][<%- Math.floor((parseInt(starkInfo.starkStruct.steps[s].nBits) -1) /Math.log2(arity)) +1 %>][<%- arity %>]; +<% } -%> + + signal input finalPol[<%- 1 << parseInt(starkInfo.starkStruct.steps[starkInfo.starkStruct.steps.length-1].nBits) %>][3]; + + + component sv = StarkVerifier(); + sv.publics <== publics; +<% for (let s=1; s<=nStages + 1; ++s) { -%> + sv.root<%- s %> <== root<%- s %>; +<% } -%> + + sv.evals <== evals; + + sv.s0_valsC <== s0_valsC; + sv.s0_siblingsC <== s0_siblingsC; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + sv.s0_vals<%- s %> <== s0_vals<%- s %>; + sv.s0_siblings<%- s %> <== s0_siblings<%- s %>; +<% } -%> +<% } -%> + +<% for (let s=1; s + sv.s<%- s %>_root <== s<%- s %>_root; +<% } -%> +<% for (let s=1; s + sv.s<%- s %>_vals <== s<%- s %>_vals; + sv.s<%- s %>_siblings <== s<%- s %>_siblings; +<% } -%> + + sv.finalPol <== finalPol; + + publicsHash <== getSha256Inputs()(aggregatorAddr, publics); + +} + +component main = Main(); \ No newline at end of file diff --git a/src/templates/helpers/final/get_sha256_inputs_batch.circom.ejs b/src/templates/helpers/final/get_sha256_inputs_batch.circom.ejs new file mode 100644 index 00000000..396f9564 --- /dev/null +++ b/src/templates/helpers/final/get_sha256_inputs_batch.circom.ejs @@ -0,0 +1,172 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "lessthangl.circom"; +include "sha256/sha256.circom"; +include "bitify.circom"; + +<% function getPositionBits(publics, currentPublic) { + const publicsNames = Object.keys(publics).filter(p => p !== "nPublics").sort((a, b) => publics[a] - publics[b]); + if(!publicsNames.includes(currentPublic) && currentPublic !== "aggregatorAddr") { + throw new Error("Something went wrong!"); + } + + let publicIndex = currentPublic === "aggregatorAddr" + ? publicsNames.length + : publicsNames.findIndex((publicName) => publicName === currentPublic); + let initialBits = 0; + for(let i = 0; i < publicIndex; i++) { + const public = publicsNames[i]; + if(public.includes("NumPos") || public === "forkIdPos" || public === "chainIdPos") { + initialBits += 64; + } else { + initialBits += 256; + } + } + return initialBits; +} -%> + +template getSha256Inputs() { + signal input aggregatorAddr; + signal input publics[<%- publics.nPublics %>]; + + signal output publicsHash; + + signal oldStateRoot[8]; + signal n2bOldStateRoot[8][32]; + var oldStateRootPos = <%- publics.oldStateRootPos %>; + var oldStateRootBitsOffset = <%- getPositionBits(publics, "oldStateRootPos") %>; + + signal oldBatchAccInputHash[8]; + signal n2bOldBatchAccInputHash[8][32]; + var oldBatchAccInputHashPos = <%- publics.oldBatchAccInputHashPos %>; + var oldBatchAccInputHashBitsOffset = <%- getPositionBits(publics, "oldBatchAccInputHashPos") %>; + + signal oldBatchNum; + signal n2bOldBatchNum[63]; + var oldBatchNumPos = <%- publics.oldBatchNumPos %>; + var oldBatchNumBitsOffset = <%- getPositionBits(publics, "oldBatchNumPos") %>; + + signal chainId; + signal n2bChainId[63]; + var chainIdPos = <%- publics.chainIdPos %>; + var chainIdBitsOffset = <%- getPositionBits(publics, "chainIdPos") %>; + + signal forkId; + signal n2bForkId[63]; + var forkIdPos = <%- publics.forkIdPos %>; + var forkIdBitsOffset = <%- getPositionBits(publics, "forkIdPos") %>; + + signal newStateRoot[8]; + signal n2bNewStateRoot[8][32]; + var newStateRootPos = <%- publics.newStateRootPos %>; + var newStateRootBitsOffset = <%- getPositionBits(publics, "newStateRootPos") %>; + + signal newBatchAccInputHash[8]; + signal n2bNewBatchAccInputHash[8][32]; + var newBatchAccInputHashPos = <%- publics.newBatchAccInputHashPos %>; + var newBatchAccInputHashBitsOffset = <%- getPositionBits(publics, "newBatchAccInputHashPos") %>; + + signal newBatchNum; + signal n2bNewBatchNum[63]; + var newBatchNumPos = <%- publics.newBatchNumPos %>; + var newBatchNumBitsOffset = <%- getPositionBits(publics, "newBatchNumPos") %>; + + signal newLocalExitRoot[8]; + signal n2bNewLocalExitRoot[8][32]; + var newLocalExitRootPos = <%- publics.newLocalExitRootPos %>; + var newLocalExitRootBitsOffset = <%- getPositionBits(publics, "newLocalExitRootPos") %>; + + +<% let aggregatorAddressOffset = getPositionBits(publics, "aggregatorAddr"); -%> + var aggregatorAddrBitsOffset = <%- aggregatorAddressOffset %>; + + var totalBits = <%- aggregatorAddressOffset + 160 %>; + + for(var i=0; i<8; i++) { + // State Root + oldStateRoot[i] <== publics[oldStateRootPos + i]; + n2bOldStateRoot[i] <== Num2Bits(32)(oldStateRoot[i]); + + newStateRoot[i] <== publics[newStateRootPos + i]; + n2bNewStateRoot[i] <== Num2Bits(32)(newStateRoot[i]); + + // Batch Acc Input Hash + oldBatchAccInputHash[i] <== publics[oldBatchAccInputHashPos + i]; + n2bOldBatchAccInputHash[i] <== Num2Bits(32)(oldBatchAccInputHash[i]); + + newBatchAccInputHash[i] <== publics[newBatchAccInputHashPos + i]; + n2bNewBatchAccInputHash[i] <== Num2Bits(32)(newBatchAccInputHash[i]); + + // Local Exit Root + newLocalExitRoot[i] <== publics[newLocalExitRootPos + i]; + n2bNewLocalExitRoot[i] <== Num2Bits(32)(newLocalExitRoot[i]); + } + + // Batch Num + oldBatchNum <== publics[oldBatchNumPos]; + n2bOldBatchNum <== Num2Bits(63)(oldBatchNum); + + newBatchNum <== publics[newBatchNumPos]; + n2bNewBatchNum <== Num2Bits(63)(newBatchNum); + + chainId <== publics[chainIdPos]; + n2bChainId <== Num2Bits(63)(chainId); + + forkId <== publics[forkIdPos]; + n2bForkId <== Num2Bits(63)(forkId); + + signal n2bAggregatorAddr[160] <== Num2Bits(160)(aggregatorAddr); + + // Check that state roots are lower than GL + for (var i = 0; i < 4; i++) { + _<== LessThanGoldilocks()(oldStateRoot[2*i] + (1 << 32) * oldStateRoot[2*i + 1]); + _<== LessThanGoldilocks()(newStateRoot[2*i] + (1 << 32) * newStateRoot[2*i + 1]); + } + + component publicsHasher = Sha256(totalBits); + + for (var i=0; i<8; i++) { + for (var j=0; j<32; j++) { + publicsHasher.in[oldStateRootBitsOffset + 32*(8-i) - 1 -j] <== n2bOldStateRoot[i][j]; + publicsHasher.in[newStateRootBitsOffset + 32*(8-i) - 1 -j] <== n2bNewStateRoot[i][j]; + + publicsHasher.in[oldBatchAccInputHashBitsOffset + 32*(8-i) - 1 -j] <== n2bOldBatchAccInputHash[i][j]; + publicsHasher.in[newBatchAccInputHashBitsOffset + 32*(8-i) - 1 -j] <== n2bNewBatchAccInputHash[i][j]; + + publicsHasher.in[newLocalExitRootBitsOffset + 32*(8-i) - 1 -j] <== n2bNewLocalExitRoot[i][j]; + } + } + + // Do 63 bits to avoid aliasing + for (var i=0; i<63; i++) { + publicsHasher.in[oldBatchNumBitsOffset + 64 - 1 -i] <== n2bOldBatchNum[i]; + publicsHasher.in[newBatchNumBitsOffset + 64 - 1 -i] <== n2bNewBatchNum[i]; + + publicsHasher.in[chainIdBitsOffset + 64 - 1 -i] <== n2bChainId[i]; + + publicsHasher.in[forkIdBitsOffset + 64 - 1 -i] <== n2bForkId[i]; + } + + publicsHasher.in[oldBatchNumBitsOffset] <== 0; + publicsHasher.in[newBatchNumBitsOffset] <== 0; + + publicsHasher.in[chainIdBitsOffset] <== 0; + + publicsHasher.in[forkIdBitsOffset] <== 0; + + for (var i=0; i<160; i++) { + publicsHasher.in[aggregatorAddrBitsOffset + 160 - 1 -i] <== n2bAggregatorAddr[i]; + } + + component b2nPublicsHash = Bits2Num(256); + for (var i = 0; i < 256; i++) { + b2nPublicsHash.in[i] <== publicsHasher.out[255-i]; + } + + publicsHash <== b2nPublicsHash.out; +} + +<% if(isTest) { -%> +component main = getSha256Inputs(); +<% } -%> \ No newline at end of file diff --git a/src/templates/helpers/final/get_sha256_inputs_blob.circom.ejs b/src/templates/helpers/final/get_sha256_inputs_blob.circom.ejs new file mode 100644 index 00000000..bbcf3900 --- /dev/null +++ b/src/templates/helpers/final/get_sha256_inputs_blob.circom.ejs @@ -0,0 +1,194 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "lessthangl.circom"; +include "sha256/sha256.circom"; +include "bitify.circom"; + +<% function getPositionBits(publics, currentPublic) { + const publicsNames = Object.keys(publics).filter(p => p !== "nPublics").sort((a, b) => publics[a] - publics[b]); + if(!publicsNames.includes(currentPublic) && currentPublic !== "aggregatorAddr") { + throw new Error("Something went wrong!"); + } + + let publicIndex = currentPublic === "aggregatorAddr" + ? publicsNames.length + : publicsNames.findIndex((publicName) => publicName === currentPublic); + let initialBits = 0; + for(let i = 0; i < publicIndex; i++) { + const public = publicsNames[i]; + if(public.includes("NumPos") || public === "forkIdPos" || public === "chainIdPos") { + initialBits += 64; + } else { + initialBits += 256; + } + } + return initialBits; +} -%> + +template getSha256Inputs() { + signal input aggregatorAddr; + signal input publics[<%- publics.nPublics %>]; + + signal output publicsHash; + + signal oldStateRoot[8]; + signal n2bOldStateRoot[8][32]; + var oldStateRootPos = <%- publics.oldStateRootPos %>; + var oldStateRootBitsOffset = <%- getPositionBits(publics, "oldStateRootPos") %>; + + signal oldBlobStateRoot[8]; + signal n2bOldBlobStateRoot[8][32]; + var oldBlobStateRootPos = <%- publics.oldBlobStateRootPos %>; + var oldBlobStateRootBitsOffset = <%- getPositionBits(publics, "oldBlobStateRootPos") %>; + + signal oldBlobAccInputHash[8]; + signal n2bOldBlobAccInputHash[8][32]; + var oldBlobAccInputHashPos = <%- publics.oldBlobAccInputHashPos %>; + var oldBlobAccInputHashBitsOffset = <%- getPositionBits(publics, "oldBlobAccInputHashPos") %>; + + signal oldBlobNum; + signal n2bOldBlobNum[63]; + var oldBlobNumPos = <%- publics.oldBlobNumPos %>; + var oldBlobNumBitsOffset = <%- getPositionBits(publics, "oldBlobNumPos") %>; + + signal chainId; + signal n2bChainId[63]; + var chainIdPos = <%- publics.chainIdPos %>; + var chainIdBitsOffset = <%- getPositionBits(publics, "chainIdPos") %>; + + signal forkId; + signal n2bForkId[63]; + var forkIdPos = <%- publics.forkIdPos %>; + var forkIdBitsOffset = <%- getPositionBits(publics, "forkIdPos") %>; + + signal newStateRoot[8]; + signal n2bNewStateRoot[8][32]; + var newStateRootPos = <%- publics.newStateRootPos %>; + var newStateRootBitsOffset = <%- getPositionBits(publics, "newStateRootPos") %>; + + signal newBlobStateRoot[8]; + signal n2bNewBlobStateRoot[8][32]; + var newBlobStateRootPos = <%- publics.newBlobStateRootPos %>; + var newBlobStateRootBitsOffset = <%- getPositionBits(publics, "newBlobStateRootPos") %>; + + signal newBlobAccInputHash[8]; + signal n2bNewBlobAccInputHash[8][32]; + var newBlobAccInputHashPos = <%- publics.newBlobAccInputHashPos %>; + var newBlobAccInputHashBitsOffset = <%- getPositionBits(publics, "newBlobAccInputHashPos") %>; + + signal newBlobNum; + signal n2bNewBlobNum[63]; + var newBlobNumPos = <%- publics.newBlobNumPos %>; + var newBlobNumBitsOffset = <%- getPositionBits(publics, "newBlobNumPos") %>; + + signal newLocalExitRoot[8]; + signal n2bNewLocalExitRoot[8][32]; + var newLocalExitRootPos = <%- publics.newLocalExitRootPos %>; + var newLocalExitRootBitsOffset = <%- getPositionBits(publics, "newLocalExitRootPos") %>; + + +<% let aggregatorAddressOffset = getPositionBits(publics, "aggregatorAddr"); -%> + var aggregatorAddrBitsOffset = <%- aggregatorAddressOffset %>; + + var totalBits = <%- aggregatorAddressOffset + 160 %>; + + for(var i=0; i<8; i++) { + // State Root + oldStateRoot[i] <== publics[oldStateRootPos + i]; + n2bOldStateRoot[i] <== Num2Bits(32)(oldStateRoot[i]); + + newStateRoot[i] <== publics[newStateRootPos + i]; + n2bNewStateRoot[i] <== Num2Bits(32)(newStateRoot[i]); + + // Blob Acc Input Hash + oldBlobAccInputHash[i] <== publics[oldBlobAccInputHashPos + i]; + n2bOldBlobAccInputHash[i] <== Num2Bits(32)(oldBlobAccInputHash[i]); + + newBlobAccInputHash[i] <== publics[newBlobAccInputHashPos + i]; + n2bNewBlobAccInputHash[i] <== Num2Bits(32)(newBlobAccInputHash[i]); + + // Blob State Root + oldBlobStateRoot[i] <== publics[oldBlobStateRootPos + i]; + n2bOldBlobStateRoot[i] <== Num2Bits(32)(oldBlobStateRoot[i]); + + newBlobStateRoot[i] <== publics[newBlobStateRootPos + i]; + n2bNewBlobStateRoot[i] <== Num2Bits(32)(newBlobStateRoot[i]); + + // Local Exit Root + newLocalExitRoot[i] <== publics[newLocalExitRootPos + i]; + n2bNewLocalExitRoot[i] <== Num2Bits(32)(newLocalExitRoot[i]); + } + + // Blob Num + oldBlobNum <== publics[oldBlobNumPos]; + n2bOldBlobNum <== Num2Bits(63)(oldBlobNum); + + newBlobNum <== publics[newBlobNumPos]; + n2bNewBlobNum <== Num2Bits(63)(newBlobNum); + + chainId <== publics[chainIdPos]; + n2bChainId <== Num2Bits(63)(chainId); + + forkId <== publics[forkIdPos]; + n2bForkId <== Num2Bits(63)(forkId); + + signal n2bAggregatorAddr[160] <== Num2Bits(160)(aggregatorAddr); + + // Check that state roots are lower than GL + for (var i = 0; i < 4; i++) { + _<== LessThanGoldilocks()(oldStateRoot[2*i] + (1 << 32) * oldStateRoot[2*i + 1]); + _<== LessThanGoldilocks()(newStateRoot[2*i] + (1 << 32) * newStateRoot[2*i + 1]); + _<== LessThanGoldilocks()(oldBlobStateRoot[2*i] + (1 << 32) * oldBlobStateRoot[2*i + 1]); + _<== LessThanGoldilocks()(newBlobStateRoot[2*i] + (1 << 32) * newBlobStateRoot[2*i + 1]); + } + + component publicsHasher = Sha256(totalBits); + + for (var i=0; i<8; i++) { + for (var j=0; j<32; j++) { + publicsHasher.in[oldStateRootBitsOffset + 32*(8-i) - 1 -j] <== n2bOldStateRoot[i][j]; + publicsHasher.in[newStateRootBitsOffset + 32*(8-i) - 1 -j] <== n2bNewStateRoot[i][j]; + + publicsHasher.in[oldBlobStateRootBitsOffset + 32*(8-i) - 1 -j] <== n2bOldBlobStateRoot[i][j]; + publicsHasher.in[newBlobStateRootBitsOffset + 32*(8-i) - 1 -j] <== n2bNewBlobStateRoot[i][j]; + + publicsHasher.in[oldBlobAccInputHashBitsOffset + 32*(8-i) - 1 -j] <== n2bOldBlobAccInputHash[i][j]; + publicsHasher.in[newBlobAccInputHashBitsOffset + 32*(8-i) - 1 -j] <== n2bNewBlobAccInputHash[i][j]; + + publicsHasher.in[newLocalExitRootBitsOffset + 32*(8-i) - 1 -j] <== n2bNewLocalExitRoot[i][j]; + } + } + + // Do 63 bits to avoid aliasing + for (var i=0; i<63; i++) { + publicsHasher.in[oldBlobNumBitsOffset + 64 - 1 -i] <== n2bOldBlobNum[i]; + publicsHasher.in[newBlobNumBitsOffset + 64 - 1 -i] <== n2bNewBlobNum[i]; + + publicsHasher.in[chainIdBitsOffset + 64 - 1 -i] <== n2bChainId[i]; + + publicsHasher.in[forkIdBitsOffset + 64 - 1 -i] <== n2bForkId[i]; + } + + publicsHasher.in[oldBlobNumBitsOffset] <== 0; + publicsHasher.in[newBlobNumBitsOffset] <== 0; + + publicsHasher.in[chainIdBitsOffset] <== 0; + + publicsHasher.in[forkIdBitsOffset] <== 0; + + for (var i=0; i<160; i++) { + publicsHasher.in[aggregatorAddrBitsOffset + 160 - 1 -i] <== n2bAggregatorAddr[i]; + } + + component b2nPublicsHash = Bits2Num(256); + for (var i = 0; i < 256; i++) { + b2nPublicsHash.in[i] <== publicsHasher.out[255-i]; + } + + publicsHash <== b2nPublicsHash.out; +} + +<% if(isTest) { -%> +component main = getSha256Inputs(); +<% } -%> \ No newline at end of file diff --git a/src/templates/helpers/publics.js b/src/templates/helpers/publics.js new file mode 100644 index 00000000..d8fdea51 --- /dev/null +++ b/src/templates/helpers/publics.js @@ -0,0 +1,63 @@ +module.exports.batchPublics = { + oldStateRootPos: 0, + oldBatchAccInputHashPos: 8, + oldBatchNumPos: 16, + chainIdPos: 17, + forkIdPos: 18, + newStateRootPos: 19, + newBatchAccInputHashPos: 27, + newLocalExitRootPos: 35, + newBatchNumPos: 43, + nPublics: 44, +}; + +// ####################### PUBLICS FEIJOA ####################### + +module.exports.batchPublicsEip4844 = { + oldStateRootPos: 0, + oldBatchAccInputHashPos: 8, + previousL1InfoTreeRootPos: 16, + previousL1InfoTreeIndexPos: 24, + chainIdPos: 25, + forkIdPos: 26, + newStateRootPos: 27, + newBatchAccInputHashPos: 35, + currentL1InfoTreeRootPos: 43, + currentL1InfoTreeIndexPos: 51, + newLocalExitRootPos: 52, + newLastTimestampPos: 60, + nPublics: 61, +}; + +module.exports.blobInnerPublics = { + oldBlobStateRootPos: 0, + oldBlobAccInputHashPos: 8, + oldBlobNumPos: 16, + oldStateRootPos: 17, + forkIdPos: 25, + newBlobStateRootPos: 26, + newBlobAccInputHashPos: 34, + newBlobNumPos: 42, + finalAccBatchHashDataPos: 43, + localExitRootFromBlobPos: 51, + isInvalidPos: 59, + timestampLimitPos: 60, + lastL1InfoTreeRootPos: 61, + lastL1InfoTreeIndexPos: 69, + nPublics: 70, +}; + +module.exports.blobOuterPublics = { + oldStateRootPos: 0, + oldBlobStateRootPos: 8, + oldBlobAccInputHashPos: 16, + oldBlobNumPos: 24, + chainIdPos: 25, + forkIdPos: 26, + newStateRootPos: 27, + newBlobStateRootPos: 35, + newBlobAccInputHashPos: 43, + newBlobNumPos: 51, + newLocalExitRootPos: 52, + nPublics: 60, +}; \ No newline at end of file diff --git a/src/templates/helpers/recursive2/recursive2_checks_batch.circom.ejs b/src/templates/helpers/recursive2/recursive2_checks_batch.circom.ejs new file mode 100644 index 00000000..2aa14e22 --- /dev/null +++ b/src/templates/helpers/recursive2/recursive2_checks_batch.circom.ejs @@ -0,0 +1,55 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "iszero.circom"; + +template VerifyRecursive2() { + var oldStateRootPos = <%- publics.oldStateRootPos %>; + var oldBatchAccInputHashPos = <%- publics.oldBatchAccInputHashPos %>; + var oldBatchNumPos = <%- publics.oldBatchNumPos %>; + var chainIdPos = <%- publics.chainIdPos %>; + var forkIdPos = <%- publics.forkIdPos %>; + var newStateRootPos = <%- publics.newStateRootPos %>; + var newBatchAccInputHashPos = <%- publics.newBatchAccInputHashPos %>; + var newLocalExitRootPos = <%- publics.newLocalExitRootPos %>; + var newBatchNumPos = <%- publics.newBatchNumPos %>; + + signal input a_publics[<%- publics.nPublics %>]; + signal input b_publics[<%- publics.nPublics %>]; + + signal output publics[<%- publics.nPublics %>]; + signal output a_isOneBatch; + signal output b_isOneBatch; + + + // Check recursive 2 publics + + for (var i=0; i<8; i++) { + a_publics[newStateRootPos + i] === b_publics[oldStateRootPos + i]; // Check State Root + publics[oldStateRootPos + i] <== a_publics[oldStateRootPos + i]; // Old State Root + publics[newStateRootPos + i] <== b_publics[newStateRootPos + i]; // New State Root + + a_publics[newBatchAccInputHashPos + i] === b_publics[oldBatchAccInputHashPos + i]; // Check accumulated input hash + publics[oldBatchAccInputHashPos + i] <== a_publics[oldBatchAccInputHashPos + i]; // Old accumulated input hash + publics[newBatchAccInputHashPos + i] <== b_publics[newBatchAccInputHashPos + i]; // New accumulated input hash + + publics[newLocalExitRootPos+i] <== b_publics[newLocalExitRootPos+i]; // Local exit root + } + + a_publics[chainIdPos] === b_publics[chainIdPos]; // Check that chain ID matches between chains + publics[chainIdPos] <== a_publics[chainIdPos]; // Chain ID + + a_publics[forkIdPos] === b_publics[forkIdPos]; // Check that fork ID matches between chains + publics[forkIdPos] <== a_publics[forkIdPos]; // Fork ID + + a_publics[newBatchNumPos] === b_publics[oldBatchNumPos]; // Check batch number + publics[oldBatchNumPos] <== a_publics[oldBatchNumPos]; // Old batch number + publics[newBatchNumPos] <== b_publics[newBatchNumPos]; // New Batch number + + a_isOneBatch <== IsZero()(a_publics[newBatchNumPos] - a_publics[oldBatchNumPos] - 1); + b_isOneBatch <== IsZero()(b_publics[newBatchNumPos] - b_publics[oldBatchNumPos] - 1); +} + +<% if(isTest) { -%> + component main = VerifyRecursive2(); +<% } -%> diff --git a/src/templates/helpers/recursive2/recursive2_checks_batch_eip4844.circom.ejs b/src/templates/helpers/recursive2/recursive2_checks_batch_eip4844.circom.ejs new file mode 100644 index 00000000..b1c4dbcc --- /dev/null +++ b/src/templates/helpers/recursive2/recursive2_checks_batch_eip4844.circom.ejs @@ -0,0 +1,66 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "iszero.circom"; + +template VerifyRecursive2() { + var oldStateRootPos = <%- publics.oldStateRootPos %>; + var oldBatchAccInputHashPos = <%- publics.oldBatchAccInputHashPos %>; + var previousL1InfoTreeRootPos = <%- publics.previousL1InfoTreeRootPos %>; + var previousL1InfoTreeIndexPos = <%- publics.previousL1InfoTreeIndexPos %>; + var chainIdPos = <%- publics.chainIdPos %>; + var forkIdPos = <%- publics.forkIdPos %>; + var newStateRootPos = <%- publics.newStateRootPos %>; + var newBatchAccInputHashPos = <%- publics.newBatchAccInputHashPos %>; + var currentL1InfoTreeRootPos = <%- publics.currentL1InfoTreeRootPos %>; + var currentL1InfoTreeIndexPos = <%- publics.currentL1InfoTreeIndexPos %>; + var newLocalExitRootPos = <%- publics.newLocalExitRootPos %>; + var newLastTimestampPos = <%- publics.newLastTimestampPos %>; + + signal input a_publics[<%- publics.nPublics %>]; + signal input b_publics[<%- publics.nPublics %>]; + + signal input a_isAggregatedCircuit; + signal input b_isAggregatedCircuit; + + signal output publics[<%- publics.nPublics %>]; + signal output a_isOneBatch; + signal output b_isOneBatch; + + // Check recursive 2 publics + + for (var i=0; i<8; i++) { + a_publics[newStateRootPos + i] === b_publics[oldStateRootPos + i]; // Check State Root + publics[oldStateRootPos + i] <== a_publics[oldStateRootPos + i]; // Old State Root + publics[newStateRootPos + i] <== b_publics[newStateRootPos + i]; // New State Root + + a_publics[newBatchAccInputHashPos + i] === b_publics[oldBatchAccInputHashPos + i]; // Check accumulated input hash + publics[oldBatchAccInputHashPos + i] <== a_publics[oldBatchAccInputHashPos + i]; // Old accumulated input hash + publics[newBatchAccInputHashPos + i] <== b_publics[newBatchAccInputHashPos + i]; // New accumulated input hash + + a_publics[currentL1InfoTreeRootPos + i] === b_publics[previousL1InfoTreeRootPos + i]; // Check L1 Info Tree Root + publics[previousL1InfoTreeRootPos + i] <== a_publics[previousL1InfoTreeRootPos + i]; // Old L1 Info Tree Root + publics[currentL1InfoTreeRootPos + i] <== b_publics[currentL1InfoTreeRootPos + i]; // New L1 Info Tree Root + + publics[newLocalExitRootPos+i] <== b_publics[newLocalExitRootPos+i]; // Local exit root + } + + a_publics[chainIdPos] === b_publics[chainIdPos]; // Check that chain ID matches between chains + publics[chainIdPos] <== a_publics[chainIdPos]; // Chain ID + + a_publics[forkIdPos] === b_publics[forkIdPos]; // Check that fork ID matches between chains + publics[forkIdPos] <== a_publics[forkIdPos]; // Fork ID + + a_publics[currentL1InfoTreeIndexPos] === b_publics[previousL1InfoTreeIndexPos]; // Check L1 Info Tree Index + publics[previousL1InfoTreeIndexPos] <== a_publics[previousL1InfoTreeIndexPos]; // Old L1 Info Tree Index + publics[currentL1InfoTreeIndexPos] <== b_publics[currentL1InfoTreeIndexPos]; // New L1 Info Tree Index + + publics[newLastTimestampPos] <== b_publics[newLastTimestampPos]; // Last timestamp + + a_isOneBatch <== IsZero()(a_isAggregatedCircuit); + b_isOneBatch <== IsZero()(b_isAggregatedCircuit); +} + +<% if(isTest) { -%> + component main = VerifyRecursive2(); +<% } -%> diff --git a/src/templates/helpers/recursive2/recursive2_checks_blob.circom.ejs b/src/templates/helpers/recursive2/recursive2_checks_blob.circom.ejs new file mode 100644 index 00000000..161e12c9 --- /dev/null +++ b/src/templates/helpers/recursive2/recursive2_checks_blob.circom.ejs @@ -0,0 +1,61 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "iszero.circom"; + +template VerifyRecursive2() { + var oldStateRootPos = <%- publics.oldStateRootPos %>; + var oldBlobStateRootPos = <%- publics.oldBlobStateRootPos %>; + var oldBlobAccInputHashPos = <%- publics.oldBlobAccInputHashPos %>; + var oldBlobNumPos = <%- publics.oldBlobNumPos %>; + var chainIdPos = <%- publics.chainIdPos %>; + var forkIdPos = <%- publics.forkIdPos %>; + var newStateRootPos = <%- publics.newStateRootPos %>; + var newBlobStateRootPos = <%- publics.newBlobStateRootPos %>; + var newBlobAccInputHashPos = <%- publics.newBlobAccInputHashPos %>; + var newBlobNumPos = <%- publics.newBlobNumPos %>; + var newLocalExitRootPos = <%- publics.newLocalExitRootPos %>; + + signal input a_publics[<%- publics.nPublics %>]; + signal input b_publics[<%- publics.nPublics %>]; + + signal output publics[<%- publics.nPublics %>]; + signal output a_isOneBatch; + signal output b_isOneBatch; + + // Check recursive 2 publics + + for (var i=0; i<8; i++) { + a_publics[newStateRootPos + i] === b_publics[oldStateRootPos + i]; // Check State Root + publics[oldStateRootPos + i] <== a_publics[oldStateRootPos + i]; // Old State Root + publics[newStateRootPos + i] <== b_publics[newStateRootPos + i]; // New State Root + + a_publics[newBlobStateRootPos + i] === b_publics[oldBlobStateRootPos + i]; // Check Blob State Root + publics[oldBlobStateRootPos + i] <== a_publics[oldBlobStateRootPos + i]; // Old Blob State Root + publics[newBlobStateRootPos + i] <== b_publics[newBlobStateRootPos + i]; // New Blob State Root + + a_publics[newBlobAccInputHashPos + i] === b_publics[oldBlobAccInputHashPos + i]; // Check accumulated input hash + publics[oldBlobAccInputHashPos + i] <== a_publics[oldBlobAccInputHashPos + i]; // Old accumulated input hash + publics[newBlobAccInputHashPos + i] <== b_publics[newBlobAccInputHashPos + i]; // New accumulated input hash + + publics[newLocalExitRootPos+i] <== b_publics[newLocalExitRootPos+i]; // Local exit root + } + + a_publics[chainIdPos] === b_publics[chainIdPos]; // Check that chain ID matches between chains + publics[chainIdPos] <== a_publics[chainIdPos]; // Chain ID + + a_publics[forkIdPos] === b_publics[forkIdPos]; // Check that fork ID matches between chains + publics[forkIdPos] <== a_publics[forkIdPos]; // Fork ID + + a_publics[newBlobNumPos] === b_publics[oldBlobNumPos]; // Check blob number + publics[oldBlobNumPos] <== a_publics[oldBlobNumPos]; // Old blob number + publics[newBlobNumPos] <== b_publics[newBlobNumPos]; // New blob number + + a_isOneBatch <== IsZero()(a_publics[newBlobNumPos] - a_publics[oldBlobNumPos] - 1); + b_isOneBatch <== IsZero()(b_publics[newBlobNumPos] - b_publics[oldBlobNumPos] - 1); + +} + +<% if(isTest) { -%> + component main = VerifyRecursive2(); +<% } -%> diff --git a/src/templates/helpers/verify_blob_outer.circom.ejs b/src/templates/helpers/verify_blob_outer.circom.ejs new file mode 100644 index 00000000..b36014d6 --- /dev/null +++ b/src/templates/helpers/verify_blob_outer.circom.ejs @@ -0,0 +1,145 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "mux1.circom"; +include "iszero.circom"; +include "bitify.circom"; + +template LessEqThan(n) { + signal input in[2]; + signal output out; + + assert(n <= 62); + + _ <== Num2Bits(n)(in[0]); + _ <== Num2Bits(n)(in[1]); + + component n2b = Num2Bits(n + 1); + n2b.in <== in[0] + (1<]; + signal input publicsBlobInner[<%- blobInnerPublics.nPublics %>]; + signal input chainId; + + signal output isValidBlob; + signal output publicsBlobOuter[<%- blobOuterPublics.nPublics %>]; + + // Indexes Batch publics + var oldBatchStatePos = <%- batchPublics.oldStateRootPos %>; + var chainIdPos = <%- batchPublics.chainIdPos %>; + var forkIdPos = <%- batchPublics.forkIdPos %>; + var newBatchStateRootPos = <%- batchPublics.newStateRootPos %>; + var newBatchAccInputHashPos = <%- batchPublics.newBatchAccInputHashPos %>; + var currentL1InfoTreeRootPos = <%- batchPublics.currentL1InfoTreeRootPos %>; + var currentL1InfoTreeIndexPos = <%- batchPublics.currentL1InfoTreeIndexPos %>; + var newLocalExitRootPos = <%- batchPublics.newLocalExitRootPos %>; + var newLastTimestampPos = <%- batchPublics.newLastTimestampPos %>; + + // Indexes Blob Inner publics + var blobInner_oldBlobStateRootPos = <%- blobInnerPublics.oldBlobStateRootPos %>; + var blobInner_oldBlobAccInputHashPos = <%- blobInnerPublics.oldBlobAccInputHashPos %>; + var blobInner_oldBlobNumPos = <%- blobInnerPublics.oldBlobNumPos %>; + var blobInner_oldStateRootPos = <%- blobInnerPublics.oldStateRootPos %>; + var blobInner_forkIdPos = <%- blobInnerPublics.forkIdPos %>; + var blobInner_newBlobStateRootPos = <%- blobInnerPublics.newBlobStateRootPos %>; + var blobInner_newBlobAccInputHashPos = <%- blobInnerPublics.newBlobAccInputHashPos %>; + var blobInner_newBlobNumPos = <%- blobInnerPublics.newBlobNumPos %>; + var blobInner_finalAccBatchHashDataPos = <%- blobInnerPublics.finalAccBatchHashDataPos %>; + var blobInner_localExitRootFromBlobPos = <%- blobInnerPublics.localExitRootFromBlobPos %>; + var blobInner_isInvalidPos = <%- blobInnerPublics.isInvalidPos %>; + var blobInner_timestampLimitPos = <%- blobInnerPublics.timestampLimitPos %>; + var blobInner_lastL1InfoTreeRootPos = <%- blobInnerPublics.lastL1InfoTreeRootPos %>; + var blobInner_lastL1InfoTreeIndexPos = <%- blobInnerPublics.lastL1InfoTreeIndexPos %>; + + // Indexes Blob Outer publics + var blobOuter_oldStateRootPos = <%- blobOuterPublics.oldStateRootPos %>; + var blobOuter_oldBlobStateRootPos = <%- blobOuterPublics.oldBlobStateRootPos %>; + var blobOuter_oldBlobAccInputHashPos = <%- blobOuterPublics.oldBlobAccInputHashPos %>; + var blobOuter_oldBlobNumPos = <%- blobOuterPublics.oldBlobNumPos %>; + var blobOuter_chainIdPos = <%- blobOuterPublics.chainIdPos %>; + var blobOuter_forkIdPos = <%- blobOuterPublics.forkIdPos %>; + var blobOuter_newStateRootPos = <%- blobOuterPublics.newStateRootPos %>; + var blobOuter_newBlobStateRootPos = <%- blobOuterPublics.newBlobStateRootPos %>; + var blobOuter_newBlobAccInputHashPos = <%- blobOuterPublics.newBlobAccInputHashPos %>; + var blobOuter_newBlobNumPos = <%- blobOuterPublics.newBlobNumPos %>; + var blobOuter_newLocalExitRootPos = <%- blobOuterPublics.newLocalExitRootPos -%>; + + // Check if the final acc batch hash data is invalid (i.e. is zero) + signal isFinalAccBatchDataZero[8]; + for (var i=0; i<8; i++) { + isFinalAccBatchDataZero[i] <== IsZero()(publicsBlobInner[blobInner_finalAccBatchHashDataPos + i]); + } + + signal isInvalidFinalAccBatchHashData <== IsZero()(isFinalAccBatchDataZero[0] + isFinalAccBatchDataZero[1] + isFinalAccBatchDataZero[2] + isFinalAccBatchDataZero[3] + isFinalAccBatchDataZero[4] + isFinalAccBatchDataZero[5] + isFinalAccBatchDataZero[6] + isFinalAccBatchDataZero[7] - 8); + + // Check if the blob is valid (which means that isInvalid is zero and finalAccBatchData is different than zero) + isValidBlob <== IsZero()(publicsBlobInner[blobInner_isInvalidPos] + isInvalidFinalAccBatchHashData); + + // Check that Blob and Batch are properly connected + + // Check that final acc batch data is the same as the new batch acc input hash + for(var i=0; i<8; i++) { + isValidBlob * (publicsBlobInner[blobInner_finalAccBatchHashDataPos + i] - publicsBatch[newBatchAccInputHashPos + i]) === 0; + } + + // Check that the L1 info tree index is correct + signal isValidL1InfoTreeIndex <== IsZero()(publicsBatch[currentL1InfoTreeIndexPos] - publicsBlobInner[blobInner_lastL1InfoTreeIndexPos]); + + // Check that the L1 info tree root is correct + signal checkL1InfoTreeRoot <== isValidBlob * isValidL1InfoTreeIndex; + for(var i=0; i<8; i++) { + checkL1InfoTreeRoot * (publicsBatch[currentL1InfoTreeRootPos + i] - publicsBlobInner[blobInner_lastL1InfoTreeRootPos + i]) === 0; + } + + // Check that batch new last timestamp is less than or equal than the blob timestamp limit + signal isValidTimestamp <== LessEqThan(62)([publicsBatch[newLastTimestampPos], publicsBlobInner[blobInner_timestampLimitPos]]); + + signal isValid <== IsZero()(isValidBlob + isValidL1InfoTreeIndex + isValidTimestamp - 3); + + // Build blob outer publics + + // Old roots + for (var i=0; i<8; i++) { + publicsBlobOuter[blobOuter_oldStateRootPos + i] <== publicsBlobInner[blobInner_oldStateRootPos + i]; + publicsBlobInner[blobInner_oldStateRootPos + i] === publicsBatch[oldBatchStatePos + i]; + + publicsBlobOuter[blobOuter_oldBlobStateRootPos + i] <== publicsBlobInner[blobInner_oldBlobStateRootPos + i]; + + publicsBlobOuter[blobOuter_oldBlobAccInputHashPos + i] <== publicsBlobInner[blobInner_oldBlobAccInputHashPos + i]; + } + + // Old Blob Num + publicsBlobOuter[blobOuter_oldBlobNumPos] <== publicsBlobInner[blobInner_oldBlobNumPos]; + + // chainId + publicsBlobOuter[blobOuter_chainIdPos] <== Mux1()([chainId, publicsBatch[chainIdPos]], isValid); + + // forkId + publicsBlobOuter[blobOuter_forkIdPos] <== publicsBlobInner[blobInner_forkIdPos]; + publicsBlobInner[blobInner_forkIdPos] === publicsBatch[forkIdPos]; + + // New roots + for (var i=0; i<8; i++) { + publicsBlobOuter[blobOuter_newStateRootPos + i] <== Mux1()([publicsBlobInner[blobInner_oldStateRootPos + i], publicsBatch[newBatchStateRootPos + i]], isValid); + + publicsBlobOuter[blobOuter_newBlobStateRootPos + i] <== publicsBlobInner[blobInner_newBlobStateRootPos + i]; + + publicsBlobOuter[blobOuter_newBlobAccInputHashPos + i] <== publicsBlobInner[blobInner_newBlobAccInputHashPos + i]; + + publicsBlobOuter[blobOuter_newLocalExitRootPos + i] <== Mux1()([publicsBlobInner[blobInner_localExitRootFromBlobPos + i], publicsBatch[newLocalExitRootPos + i]], isValid); + + } + + // New Blob num + publicsBlobOuter[blobOuter_newBlobNumPos] <== publicsBlobInner[blobInner_newBlobNumPos]; +} + +<% if(isTest) { -%> +component main = VerifyBlobOuter(); +<% } -%> \ No newline at end of file diff --git a/src/templates/recursive1.circom.ejs b/src/templates/recursive1.circom.ejs new file mode 100644 index 00000000..a738e9a8 --- /dev/null +++ b/src/templates/recursive1.circom.ejs @@ -0,0 +1,76 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "<%- verifierNames[0] %>.verifier.circom"; + +<% let starkInfo = starkInfoVerifiers[0]; %> + +template Main() { + signal input publics[<%- starkInfo.nPublics %>]; +<% if(options.setAggregatedKey) { -%> + signal input rootC[4]; +<% } -%> + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input root<%- s %>[4]; +<% } -%> + + signal input evals[<%- starkInfo.evMap.length %>][3]; // Evaluations of the set polynomials at a challenge value z and gz + + signal input s0_valsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.nConstants %>]; + signal input s0_siblingsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input s0_vals<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.mapSectionsN[`cm${s}_n`] %>]; + signal input s0_siblings<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input s<%- s %>_root[4]; +<% } -%> + +<% for (let s=1; s< starkInfo.starkStruct.steps.length; s++) { -%> + signal input s<%- s %>_vals[<%- starkInfo.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo.starkStruct.steps[s-1].nBits - starkInfo.starkStruct.steps[s].nBits))*3 %>]; + signal input s<%- s %>_siblings[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[s].nBits %>][4]; +<% } -%> + + signal input finalPol[<%- 1 << parseInt(starkInfo.starkStruct.steps[starkInfo.starkStruct.steps.length-1].nBits) %>][3]; + + component vA = StarkVerifier(); + + vA.publics <== publics; +<% for (let s=1; s<=nStages + 1; ++s) { -%> + vA.root<%- s %> <== root<%- s %>; +<% } -%> + + vA.evals <== evals; + + vA.s0_valsC <== s0_valsC; + vA.s0_siblingsC <== s0_siblingsC; +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + vA.s0_vals<%- s %> <== s0_vals<%- s %>; + vA.s0_siblings<%- s %> <== s0_siblings<%- s %>; +<% } -%> +<% } -%> +<% for (let s=1; s + vA.s<%- s %>_root <== s<%- s %>_root; +<% } -%> +<% for (let s=1; s + vA.s<%- s %>_vals <== s<%- s %>_vals; + vA.s<%- s %>_siblings <== s<%- s %>_siblings; +<% } -%> + vA.finalPol <== finalPol; +<% if(options.setEnableInput) { -%> + vA.enable <== 1; +<% } -%> + +} + +<% if(options.setAggregatedKey) { -%> +component main {public [publics, rootC]}= Main(); +<% } else { -%> +component main {public [publics]}= Main(); +<% } -%> \ No newline at end of file diff --git a/src/templates/recursive2.circom.ejs b/src/templates/recursive2.circom.ejs new file mode 100644 index 00000000..ec31ae3f --- /dev/null +++ b/src/templates/recursive2.circom.ejs @@ -0,0 +1,174 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "<%- verifierNames[0] %>.verifier.circom"; +include "mux1.circom"; +include "iszero.circom"; +include "verify_recursive2.circom"; + +<% let starkInfo = starkInfoVerifiers[0]; %> + +template Main() { + var rootCSingle[4] = [<%- vks[0].join(",") %>]; + + signal input publics[<%- publics.nPublics %>]; + signal input rootC[4]; + + signal input a_publics[<%- publics.nPublics %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input a_root<%- s %>[4]; +<% } -%> + + signal input a_evals[<%- starkInfo.evMap.length %>][3]; + + signal input a_s0_valsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.nConstants %>]; + signal input a_s0_siblingsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input a_s0_vals<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.mapSectionsN[`cm${s}_n`] %>]; + signal input a_s0_siblings<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input a_s<%- s %>_root[4]; +<% } -%> + +<% for (let s=1; s< starkInfo.starkStruct.steps.length; s++) { -%> + signal input a_s<%- s %>_vals[<%- starkInfo.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo.starkStruct.steps[s-1].nBits - starkInfo.starkStruct.steps[s].nBits))*3 %>]; + signal input a_s<%- s %>_siblings[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[s].nBits %>][4]; +<% } -%> + + signal input a_finalPol[<%- 1 << parseInt(starkInfo.starkStruct.steps[starkInfo.starkStruct.steps.length-1].nBits) %>][3]; + + signal input b_publics[<%- publics.nPublics %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input b_root<%- s %>[4]; +<% } -%> + + signal input b_evals[<%- starkInfo.evMap.length %>][3]; + + signal input b_s0_valsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.nConstants %>]; + signal input b_s0_siblingsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input b_s0_vals<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.mapSectionsN[`cm${s}_n`] %>]; + signal input b_s0_siblings<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input b_s<%- s %>_root[4]; +<% } -%> + +<% for (let s=1; s< starkInfo.starkStruct.steps.length; s++) { -%> + signal input b_s<%- s %>_vals[<%- starkInfo.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo.starkStruct.steps[s-1].nBits - starkInfo.starkStruct.steps[s].nBits))*3 %>]; + signal input b_s<%- s %>_siblings[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[s].nBits %>][4]; +<% } -%> + + signal input b_finalPol[<%- 1 << parseInt(starkInfo.starkStruct.steps[starkInfo.starkStruct.steps.length-1].nBits) %>][3]; + + signal publicsAggregated[<%- publics.nPublics %>]; + signal a_isOneBatch; + signal b_isOneBatch; + +<% if(options.isAggregatedInput) { -%> + signal input a_isAggregatedCircuit; + signal input b_isAggregatedCircuit; + + (publicsAggregated, a_isOneBatch, b_isOneBatch) <== VerifyRecursive2()(a_publics, b_publics, a_isAggregatedCircuit, b_isAggregatedCircuit); +<% } else { -%> + (publicsAggregated, a_isOneBatch, b_isOneBatch) <== VerifyRecursive2()(a_publics, b_publics); +<% } -%> + + + for(var i=0; i<<%- publics.nPublics %>; i++) { + publics[i] === publicsAggregated[i]; + } + + component vA = StarkVerifier(); + + for (var i=0; i< <%- publics.nPublics %>; i++) { + vA.publics[i] <== a_publics[i]; + } + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + vA.root<%- s %> <== a_root<%- s %>; +<% } -%> + + vA.evals <== a_evals; + + vA.s0_valsC <== a_s0_valsC; + vA.s0_siblingsC <== a_s0_siblingsC; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + vA.s0_vals<%- s %> <== a_s0_vals<%- s %>; + vA.s0_siblings<%- s %> <== a_s0_siblings<%- s %>; +<% } -%> +<% } -%> + +<% for (let s=1; s + vA.s<%- s %>_root <== a_s<%- s %>_root; +<% } -%> +<% for (let s=1; s + vA.s<%- s %>_vals <== a_s<%- s %>_vals; + vA.s<%- s %>_siblings <== a_s<%- s %>_siblings; +<% } -%> + vA.finalPol <== a_finalPol; +<% if(options.setEnableInput) { -%> + vA.enable <== 1; +<% } -%> + + vA.rootC <== MultiMux1(4)([rootC, rootCSingle], a_isOneBatch); + + for (var i=0; i<4; i++) { + vA.publics[<%- publics.nPublics %> + i] <== rootC[i]; + } + + component vB = StarkVerifier(); + + for (var i=0; i< <%- publics.nPublics %>; i++) { + vB.publics[i] <== b_publics[i]; + } + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + vB.root<%- s %> <== b_root<%- s %>; +<% } -%> + + vB.evals <== b_evals; + + vB.s0_valsC <== b_s0_valsC; + vB.s0_siblingsC <== b_s0_siblingsC; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + vB.s0_vals<%- s %> <== b_s0_vals<%- s %>; + vB.s0_siblings<%- s %> <== b_s0_siblings<%- s %>; +<% } -%> +<% } -%> + +<% for (let s=1; s + vB.s<%- s %>_root <== b_s<%- s %>_root; +<% } -%> +<% for (let s=1; s + vB.s<%- s %>_vals <== b_s<%- s %>_vals; + vB.s<%- s %>_siblings <== b_s<%- s %>_siblings; +<% } -%> + vB.finalPol <== b_finalPol; +<% if(options.setEnableInput) { -%> + vB.enable <== 1; +<% } -%> + + vB.rootC <== MultiMux1(4)([rootC, rootCSingle], b_isOneBatch); + + for (var i=0; i<4; i++) { + vB.publics[<%- publics.nPublics %> +i] <== rootC[i]; + } +} + +component main {public [publics, rootC]}= Main(); diff --git a/src/templates/recursivef.circom.ejs b/src/templates/recursivef.circom.ejs new file mode 100644 index 00000000..a3a3d25f --- /dev/null +++ b/src/templates/recursivef.circom.ejs @@ -0,0 +1,87 @@ +pragma circom 2.1.0; +pragma custom_templates; + +include "<%- verifierNames[0] %>.verifier.circom"; +include "mux1.circom"; +include "iszero.circom"; + +<% let starkInfo = starkInfoVerifiers[0]; %> + +template Main() { +<% let nPublics = starkInfo.nPublics - 4; -%> + signal input publics[<%- nPublics %>]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + signal input root<%- s %>[4]; +<% } -%> + + signal input evals[<%- starkInfo.evMap.length %>][3]; + + signal input s0_valsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.nConstants %>]; + signal input s0_siblingsC[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; + +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + signal input s0_vals<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.mapSectionsN[`cm${s}_n`] %>]; + signal input s0_siblings<%- s %>[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[0].nBits %>][4]; +<% } -%> +<% } -%> + +<% for (let s=1; s + signal input s<%- s %>_root[4]; +<% } -%> + +<% for (let s=1; s< starkInfo.starkStruct.steps.length; s++) { -%> + signal input s<%- s %>_vals[<%- starkInfo.starkStruct.nQueries %>][<%- (1 << parseInt(starkInfo.starkStruct.steps[s-1].nBits - starkInfo.starkStruct.steps[s].nBits))*3 %>]; + signal input s<%- s %>_siblings[<%- starkInfo.starkStruct.nQueries %>][<%- starkInfo.starkStruct.steps[s].nBits %>][4]; +<% } -%> + + signal input finalPol[<%- 1 << parseInt(starkInfo.starkStruct.steps[starkInfo.starkStruct.steps.length-1].nBits) %>][3]; + + component sv = StarkVerifier(); + + for (var i=0; i< <%- nPublics %>; i++) { + sv.publics[i] <== publics[i]; + } + +<% for (let s=1; s<=nStages + 1; ++s) { -%> + sv.root<%- s %> <== root<%- s %>; +<% } -%> + + sv.evals <== evals; + + sv.s0_valsC <== s0_valsC; + sv.s0_siblingsC <== s0_siblingsC; +<% for (let s=1; s<=nStages + 1; ++s) { -%> +<% if (starkInfo.mapSectionsN[`cm${s}_2ns`] > 0) { -%> + sv.s0_vals<%- s %> <== s0_vals<%- s %>; + sv.s0_siblings<%- s %> <== s0_siblings<%- s %>; +<% } -%> +<% } -%> + +<% for (let s=1; s + sv.s<%- s %>_root <== s<%- s %>_root; +<% } -%> +<% for (let s=1; s + sv.s<%- s %>_vals <== s<%- s %>_vals; + sv.s<%- s %>_siblings <== s<%- s %>_siblings; +<% } -%> + sv.finalPol <== finalPol; + +<% if(options.setEnableInput) { -%> + sv.enable <== 1; +<% } -%> + + var rootCBasic[4] = [<%- vks[0].join(",") %>]; + var rootCAgg[4] = [<%- vks[1].join(",") %>]; + + signal isOne <== IsZero()(publics[43] -publics[16] -1); + sv.rootC <== MultiMux1(4)([rootCAgg, rootCBasic], isOne); + + sv.publics[<%- nPublics %>] <== <%- vks[1][0] %>; + sv.publics[<%- nPublics + 1 %>] <== <%- vks[1][1] %>; + sv.publics[<%- nPublics + 2 %>] <== <%- vks[1][2] %>; + sv.publics[<%- nPublics + 3 %>] <== <%- vks[1][3] %>; +} + +component main {public [publics]}= Main(); \ No newline at end of file diff --git a/src/zkevm.c12b.starkstruct.json b/src/zkevm.c12b.starkstruct.json deleted file mode 100644 index f1e58315..00000000 --- a/src/zkevm.c12b.starkstruct.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "nBits": 22, - "nBitsExt": 24, - "nQueries": 64, - "verificationHashType": "BN128", - "steps": [ - {"nBits": 24}, - {"nBits": 20}, - {"nBits": 16}, - {"nBits": 11}, - {"nBits": 6} - ] -} diff --git a/test/circuits/blob_outer.circuit.test.js b/test/circuits/blob_outer.circuit.test.js new file mode 100644 index 00000000..cdee413d --- /dev/null +++ b/test/circuits/blob_outer.circuit.test.js @@ -0,0 +1,189 @@ +const path = require("path"); +const tmp = require('temporary'); +const fs = require("fs"); +const ejs = require("ejs"); +const { batchPublicsEip4844, blobInnerPublics, blobOuterPublics } = require("../../src/templates/helpers/publics"); +const { assert } = require("chai"); +const { preparePublics, generateRandomHex, generateRandomValue } = require("./helpers"); + +const wasm_tester = require("circom_tester").wasm; + +function generatePublicsBlobOuterProof(isInvalid_ = false, chainId_) { + const oldBatchStateRoot = generateRandomHex(63); + const oldBatchAccInputHash = generateRandomHex(256); + const previousL1InfoTreeRoot = generateRandomHex(256); + const previousL1InfoTreeIndex = generateRandomValue(32); + const chainId = generateRandomValue(10, chainId_); + const forkId = generateRandomValue(10); + const newBatchStateRoot = generateRandomHex(63, oldBatchStateRoot); + const newBatchAccInputHash = generateRandomHex(256, oldBatchAccInputHash); + const currentL1InfoTreeRoot = generateRandomHex(256, previousL1InfoTreeRoot); + const currentL1InfoTreeIndex = generateRandomValue(32, previousL1InfoTreeIndex); + const newLocalExitRoot = generateRandomHex(256); + const newLastTimestamp = generateRandomValue(32); + + const publicsBatch = { oldStateRoot: oldBatchStateRoot, oldBatchAccInputHash, previousL1InfoTreeRoot, previousL1InfoTreeIndex, chainId, forkId, newStateRoot: newBatchStateRoot, newBatchAccInputHash, currentL1InfoTreeRoot, currentL1InfoTreeIndex, newLocalExitRoot, newLastTimestamp }; + + const oldBlobStateRoot = generateRandomHex(63); + const oldBlobAccInputHash = generateRandomHex(256); + const oldBlobNum = generateRandomValue(10); + const oldStateRoot = oldBatchStateRoot; + const forkIdBlobInner = forkId; + const newBlobStateRoot = generateRandomHex(63); + const newBlobAccInputHash = generateRandomHex(256); + const newBlobNum = oldBlobNum + 1; + const finalAccBatchHashData = newBatchAccInputHash; + const localExitRootFromBlob = generateRandomHex(256); + const isInvalid = 0; + const lastL1InfoTreeRoot = currentL1InfoTreeRoot; + const lastL1InfoTreeIndex = currentL1InfoTreeIndex; + const timestampLimit = newLastTimestamp + 100; + + const publicsBlobInner = { oldBlobStateRoot, oldBlobAccInputHash, oldBlobNum, oldStateRoot, newBlobStateRoot, forkId: forkIdBlobInner, newBlobAccInputHash, newBlobNum, finalAccBatchHashData, localExitRootFromBlob, isInvalid, timestampLimit, lastL1InfoTreeRoot, lastL1InfoTreeIndex }; + + const publicsBlobOuter = { + oldStateRoot: isInvalid_ ? oldStateRoot : oldBatchStateRoot, + oldBlobStateRoot, + oldBlobAccInputHash, + oldBlobNum, + chainId: isInvalid_ ? chainId_ : chainId, + forkId, + newStateRoot: isInvalid_ ? oldStateRoot : newBatchStateRoot, + newBlobStateRoot, + newBlobAccInputHash, + newBlobNum, + newLocalExitRoot: isInvalid_ ? localExitRootFromBlob : newLocalExitRoot, + }; + + return { publicsBatch, publicsBlobInner, publicsBlobOuter }; +} + + +describe("Verify Blob Outer Circuit Test", function () { + let circuit; + + let chainId_ = generateRandomValue(10); + this.timeout(10000000); + + before( async() => { + const template = await fs.promises.readFile(path.join(__dirname, "../../src/templates", "helpers", "verify_blob_outer.circom.ejs"), "utf8"); + const options = { batchPublics: batchPublicsEip4844, blobInnerPublics, blobOuterPublics, isTest: true} + const content = ejs.render(template, options); + const circuitFile = path.join(new tmp.Dir().path, "circuit.circom"); + await fs.promises.writeFile(circuitFile, content); + circuit = await wasm_tester(circuitFile, {O:1, prime: "goldilocks", include: "node_modules/pil-stark/circuits.gl"}); + }); + + it("Check that correct blob outer publics are generated in the happy path", async () => { + const { publicsBatch, publicsBlobInner, publicsBlobOuter } = generatePublicsBlobOuterProof(); + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + const blobOuterPublicsCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + const witness = await circuit.calculateWitness(input, true); + await circuit.checkConstraints(witness); + + + await circuit.assertOut(witness, { isValidBlob: 1, publicsBlobOuter: blobOuterPublicsCircom }); + + }); + + it("Check that if isInvalid = true in blob inner, blob outer inputs are selected from blob inner", async () => { + const { publicsBatch, publicsBlobInner, publicsBlobOuter } = generatePublicsBlobOuterProof(true, chainId_); + publicsBlobInner.isInvalid = 1; + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + const blobOuterPublicsCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + const witness = await circuit.calculateWitness(input, true); + await circuit.checkConstraints(witness); + + + await circuit.assertOut(witness, { isValidBlob: 0, publicsBlobOuter: blobOuterPublicsCircom }); + }); + + it("Check that if finalAccBatchHashData = 0, blob outer inputs are selected from blob inner", async () => { + const { publicsBatch, publicsBlobInner, publicsBlobOuter } = generatePublicsBlobOuterProof(true, chainId_); + publicsBlobInner.finalAccBatchHashData = 0; + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + const blobOuterPublicsCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + const witness = await circuit.calculateWitness(input, true); + await circuit.checkConstraints(witness); + + await circuit.assertOut(witness, { isValidBlob: 0, publicsBlobOuter: blobOuterPublicsCircom }); + + }); + + it("Check that if blob is valid and newBatchAccInputHash (batch) is not equal to finalAccBatchHashData (blobInner), verification fails", async () => { + const { publicsBatch, publicsBlobInner } = generatePublicsBlobOuterProof(false, chainId_); + publicsBlobInner.finalAccBatchHashData = generateRandomHex(256, publicsBatch.newBatchAccInputHash); + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + try { + await circuit.calculateWitness(input, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyBlobOuter_5 line: 88")); + } + }); + + it("Check that if blob is valid and newLastTimestampPos (batch) > timestampLimitPos (blobInner), blob outer inputs are selected from blob inner", async () => { + const { publicsBatch, publicsBlobInner, publicsBlobOuter } = generatePublicsBlobOuterProof(true, chainId_); + publicsBatch.newLastTimestamp = publicsBlobInner.timestampLimit + 500; + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + const blobOuterPublicsCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + const witness = await circuit.calculateWitness(input, true); + await circuit.checkConstraints(witness); + + + await circuit.assertOut(witness, { isValidBlob: 1, publicsBlobOuter: blobOuterPublicsCircom }); + }); + + it("Check that if blob is valid and currentL1InfoTreeIndex (batch) != lastL1InfoTreeIndex (blobInner), blob outer inputs are selected from blob inner", async () => { + const { publicsBatch, publicsBlobInner, publicsBlobOuter } = generatePublicsBlobOuterProof(true, chainId_); + publicsBlobInner.lastL1InfoTreeIndex = publicsBatch.currentL1InfoTreeIndex + 1; + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + const blobOuterPublicsCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + const witness = await circuit.calculateWitness(input, true); + await circuit.checkConstraints(witness); + + + await circuit.assertOut(witness, { isValidBlob: 1, publicsBlobOuter: blobOuterPublicsCircom }); + }); + + it("Check that if blob is valid and currentL1InfoTreeIndex (batch) == lastL1InfoTreeIndex (blobInner) and currentL1InfoTreeRoot (batch) != lastL1InfoTreeRoot (blobInner), verification fails", async () => { + + const { publicsBatch, publicsBlobInner } = generatePublicsBlobOuterProof(false, chainId_); + publicsBlobInner.lastL1InfoTreeRoot = generateRandomHex(256, publicsBatch.currentL1InfoTreeRoot); + + const batchPublicsCircom = preparePublics(publicsBatch, batchPublicsEip4844); + const blobInnerPublicsCircom = preparePublics(publicsBlobInner, blobInnerPublics); + + const input = { publicsBatch: batchPublicsCircom, publicsBlobInner: blobInnerPublicsCircom, chainId: chainId_ }; + try { + await circuit.calculateWitness(input, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyBlobOuter_5 line: 97")); + } + }); +}); diff --git a/test/circuits/final.circuit.test.js b/test/circuits/final.circuit.test.js new file mode 100644 index 00000000..833d4af6 --- /dev/null +++ b/test/circuits/final.circuit.test.js @@ -0,0 +1,189 @@ +const path = require("path"); +const tmp = require('temporary'); +const fs = require("fs"); +const ejs = require("ejs"); +const { batchPublics, blobOuterPublics } = require("../../src/templates/helpers/publics"); +const { FrSNARK } = require("@0xpolygonhermez/zkevm-commonjs/src/constants"); +const { Scalar } = require("ffjavascript"); +const { solidityPackedSha256 } = require("ethers"); +const { preparePublics, generateRandomHex, generateRandomValue } = require("./helpers"); +const { assert } = require("console"); + + +const wasm_tester = require("circom_tester").wasm; + +function generatePublicsBatch(aggregatorAddress) { + + const oldStateRoot = generateRandomHex(63); + const oldBatchAccInputHash = generateRandomHex(256); + const oldBatchNum = generateRandomValue(10); + const chainId = generateRandomValue(10); + const forkId = generateRandomValue(10); + const newStateRoot = generateRandomHex(63); + const newBatchAccInputHash = generateRandomHex(256); + const newLocalExitRoot = generateRandomHex(256); + const newBatchNum = oldBatchNum + 10; + + const publicsBatch = { oldStateRoot, oldBatchAccInputHash, oldBatchNum, chainId, forkId, newStateRoot, newBatchAccInputHash, newLocalExitRoot, newBatchNum }; + + const publicsBatchSolidity = [oldStateRoot, oldBatchAccInputHash, oldBatchNum, chainId, forkId, newStateRoot, newBatchAccInputHash, newLocalExitRoot, newBatchNum, aggregatorAddress]; + const publicsBatchHashTypesSolidity = ["uint256", "uint256", "uint64", "uint64", "uint64", "uint256", "uint256", "uint256", "uint64", "address"]; + + return { publicsBatch, publicsBatchSolidity, publicsBatchHashTypesSolidity }; +} + +function generatePublicsBlob(aggregatorAddress) { + + const oldStateRoot = generateRandomHex(63); + const oldBlobStateRoot = generateRandomHex(63); + const oldBlobAccInputHash = generateRandomHex(256); + const oldBlobNum = generateRandomValue(10); + const chainId = generateRandomValue(10); + const forkId = generateRandomValue(10); + const newStateRoot = generateRandomHex(63); + const newBlobStateRoot = generateRandomHex(63); + const newBlobAccInputHash = generateRandomHex(256); + const newBlobNum = oldBlobNum + 1; + const newLocalExitRoot = generateRandomHex(256); + + const publicsBlobOuter = { oldStateRoot, oldBlobStateRoot, oldBlobAccInputHash, oldBlobNum, chainId, forkId, newStateRoot, newBlobStateRoot, newBlobAccInputHash, newBlobNum, newLocalExitRoot }; + + const publicsBlobOuterSolidity = [oldStateRoot, oldBlobStateRoot, oldBlobAccInputHash, oldBlobNum, chainId, forkId, newStateRoot, newBlobStateRoot, newBlobAccInputHash, newBlobNum, newLocalExitRoot, aggregatorAddress]; + const publicsBlobOuterHashTypesSolidity = ["uint256", "uint256", "uint256", "uint64", "uint64", "uint64", "uint256", "uint256", "uint256", "uint64", "uint256", "address"]; + + return { publicsBlobOuter, publicsBlobOuterSolidity, publicsBlobOuterHashTypesSolidity }; +} + +describe("Get Sha256 Inputs Circuit Test", function () { + let aggregatorAddress = "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266"; + + this.timeout(10000000); + + describe("Forks previous to Feijoa", async() => { + let circuit; + + beforeEach( async() => { + const template = await fs.promises.readFile(path.join(__dirname, "../../src/templates", "helpers", "final", "get_sha256_inputs_batch.circom.ejs"), "utf8"); + + const options = { publics: batchPublics, isTest: true} + const content = ejs.render(template, options); + const circuitFile = path.join(new tmp.Dir().path, "circuit.circom"); + await fs.promises.writeFile(circuitFile, content); + circuit = await wasm_tester(circuitFile, {O:1, include: ["node_modules/circomlib/circuits", "node_modules/pil-stark/circuits.bn128"]}); + }); + + it("Test that solidity hash matches circom hash", async () => { + const { publicsBatch, publicsBatchSolidity, publicsBatchHashTypesSolidity } = generatePublicsBatch(aggregatorAddress); + const publicsBatchCircom = preparePublics(publicsBatch, batchPublics); + + const sha256Solidity = Scalar.mod(Scalar.fromString(solidityPackedSha256(publicsBatchHashTypesSolidity, publicsBatchSolidity), 16), FrSNARK); + + const witness = await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBatchCircom}, true); + + await circuit.assertOut(witness, { publicsHash: sha256Solidity }); + }); + + it("Fails if old state root is higher than GL", async () => { + const { publicsBatch } = generatePublicsBatch(aggregatorAddress); + publicsBatch.oldStateRoot = 0xFFFFFFFF00000001n + 1n; + + const publicsBatchCircom = preparePublics(publicsBatch, batchPublics); + try { + await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBatchCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template LessThanGoldilocks")); + } + }); + + it("Fails if new state root is higher than GL", async () => { + const { publicsBatch } = generatePublicsBatch(aggregatorAddress); + publicsBatch.newStateRoot = 0xFFFFFFFF00000001n + 1n; + + const publicsBatchCircom = preparePublics(publicsBatch, batchPublics); + try { + await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBatchCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template LessThanGoldilocks")); + } + }); + }); + + describe("Forks after Feijoa", async() => { + let circuit; + + beforeEach( async() => { + const templateEip4844 = await fs.promises.readFile(path.join(__dirname, "../../src/templates", "helpers", "final", "get_sha256_inputs_blob.circom.ejs"), "utf8"); + + const optionsEip4844 = { publics: blobOuterPublics, isTest: true } + const content4844 = ejs.render(templateEip4844, optionsEip4844); + const circuitEip4844File = path.join(new tmp.Dir().path, "circuitEip4844.circom"); + await fs.promises.writeFile(circuitEip4844File, content4844); + circuit = await wasm_tester(circuitEip4844File, {O:1, include: ["node_modules/circomlib/circuits", "node_modules/pil-stark/circuits.bn128"]}); + }); + + it("Test that solidity hash matches circom hash", async () => { + const { publicsBlobOuter, publicsBlobOuterSolidity, publicsBlobOuterHashTypesSolidity } = generatePublicsBlob(aggregatorAddress); + const publicsBlobOuterCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + + const sha256Solidity = Scalar.mod(Scalar.fromString(solidityPackedSha256(publicsBlobOuterHashTypesSolidity, publicsBlobOuterSolidity), 16), FrSNARK); + + const witness = await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBlobOuterCircom}, true); + + await circuit.assertOut(witness, { publicsHash: sha256Solidity }); + }); + + it("Fails if old blob state root is higher than GL", async () => { + const { publicsBlobOuter } = generatePublicsBlob(aggregatorAddress); + publicsBlobOuter.oldBlobStateRoot = 0xFFFFFFFF00000001n + 1n; + + const publicsBlobOuterCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + try { + await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBlobOuterCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template LessThanGoldilocks")); + } + }); + + it("Fails if new blob state root is higher than GL", async () => { + const { publicsBlobOuter } = generatePublicsBlob(aggregatorAddress); + publicsBlobOuter.newBlobStateRoot = 0xFFFFFFFF00000001n + 1n; + + const publicsBlobOuterCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + try { + await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBlobOuterCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template LessThanGoldilocks")); + } + }); + + it("Fails if old state root is higher than GL", async () => { + const { publicsBlobOuter } = generatePublicsBlob(aggregatorAddress); + publicsBlobOuter.oldStateRoot = 0xFFFFFFFF00000001n + 1n; + + const publicsBlobOuterCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + try { + await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBlobOuterCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template LessThanGoldilocks")); + } + }); + + it("Fails if new state root is higher than GL", async () => { + const { publicsBlobOuter } = generatePublicsBlob(aggregatorAddress); + publicsBlobOuter.newStateRoot = 0xFFFFFFFF00000001n + 1n; + + const publicsBlobOuterCircom = preparePublics(publicsBlobOuter, blobOuterPublics); + try { + await circuit.calculateWitness({aggregatorAddr: aggregatorAddress, publics: publicsBlobOuterCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template LessThanGoldilocks")); + } + }); + }); +}); diff --git a/test/circuits/helpers.js b/test/circuits/helpers.js new file mode 100644 index 00000000..56973db2 --- /dev/null +++ b/test/circuits/helpers.js @@ -0,0 +1,52 @@ +const { scalar2fea } = require("@0xpolygonhermez/zkevm-commonjs/src/smt-utils"); +const { F1Field, Scalar } = require("ffjavascript"); + +module.exports.preparePublics = function preparePublics(publics, publicsIndexes) { + const Fr = new F1Field(0xffffffff00000001n); + + const publicsCircom = new Array(publicsIndexes.nPublics); + + const publicsNames = Object.keys(publicsIndexes); + for(let i = 0; i < publicsNames.length; i++) { + const name = publicsNames[i]; + if(name === "nPublics") continue; + + const nameIndex = publicsIndexes[name]; + const nextNameIndex = publicsIndexes[publicsNames[i + 1]]; + const length = nextNameIndex - nameIndex; + const value = publics[name.slice(0, -3)]; + if(length === 1) { + publicsCircom[nameIndex] = Fr.e(value); + } else if(length === 8) { + const circomInputs = scalar2fea(Fr, Scalar.e(value)); + for(let j = 0; j < circomInputs.length; j++) { + publicsCircom[nameIndex + j] = circomInputs[j]; + } + } else throw new Error("Unsupported length: ", + length); + + } + + return publicsCircom; +} + +module.exports.generateRandomHex = function generateRandomHex(maxBits = 32, forbiddenHex = null) { + let maxValue = Math.pow(2, maxBits); + let hexValue = '0x' + Math.floor(Math.random() * maxValue).toString(16); + if(forbiddenHex) { + while(hexValue === forbiddenHex) { + hexValue = '0x' + Math.floor(Math.random() * maxValue).toString(16); + } + } + return hexValue; +} + +module.exports.generateRandomValue = function generateRandomValue(maxBits, forbiddenValue = null) { + let maxValue = Math.pow(2, maxBits); + let value = Math.floor(Math.random() * maxValue); + if(forbiddenValue) { + while(value === forbiddenValue) { + value = Math.floor(Math.random() * maxValue); + } + } + return value; +} \ No newline at end of file diff --git a/test/circuits/recursive2.circuit.test.js b/test/circuits/recursive2.circuit.test.js new file mode 100644 index 00000000..3a30d1ea --- /dev/null +++ b/test/circuits/recursive2.circuit.test.js @@ -0,0 +1,531 @@ +const path = require("path"); +const tmp = require('temporary'); +const fs = require("fs"); +const ejs = require("ejs"); +const { batchPublicsEip4844, blobOuterPublics, batchPublics } = require("../../src/templates/helpers/publics"); +const { assert } = require("chai"); +const { preparePublics, generateRandomHex, generateRandomValue } = require("./helpers"); + +const wasm_tester = require("circom_tester").wasm; + +describe("Verify Recursive 2", function () { + this.timeout(10000000); + + describe("Forks previous to feijoa batch recursion", async () => { + + let circuit; + + function generatePublics() { + const chainId = generateRandomValue(10); + const forkId = generateRandomValue(10); + + const oldBatchStateRootA = generateRandomHex(63); + const oldBatchAccInputHashA = generateRandomHex(256); + const oldBatchNumA = generateRandomValue(10); + const newBatchStateRootA = generateRandomHex(63); + const newBatchAccInputHashA = generateRandomHex(256); + const newBatchNumA = oldBatchNumA + 1; + const newLocalExitRootA = generateRandomHex(256); + + const publicsBatchA = { + oldStateRoot: oldBatchStateRootA, + oldBatchAccInputHash: oldBatchAccInputHashA, + oldBatchNum: oldBatchNumA, + chainId, + forkId, + newStateRoot: newBatchStateRootA, + newBatchAccInputHash: newBatchAccInputHashA, + newLocalExitRoot: newLocalExitRootA, + newBatchNum: newBatchNumA + }; + + const oldBatchStateRootB = newBatchStateRootA; + const oldBatchAccInputHashB = newBatchAccInputHashA; + const oldBatchNumB = newBatchNumA; + const newBatchStateRootB = generateRandomHex(63); + const newBatchAccInputHashB = generateRandomHex(256); + const newBatchNumB = oldBatchNumB + 10; + const newLocalExitRootB = generateRandomHex(256); + + const publicsBatchB = { + oldStateRoot: oldBatchStateRootB, + oldBatchAccInputHash: oldBatchAccInputHashB, + oldBatchNum: oldBatchNumB, + chainId, + forkId, + newStateRoot: newBatchStateRootB, + newBatchAccInputHash: newBatchAccInputHashB, + newLocalExitRoot: newLocalExitRootB, + newBatchNum: newBatchNumB + }; + + + const publicsAggregated = { + oldStateRoot: oldBatchStateRootA, + oldBatchAccInputHash: oldBatchAccInputHashA, + oldBatchNum: oldBatchNumA, + chainId, + forkId, + newStateRoot: newBatchStateRootB, + newBatchAccInputHash: newBatchAccInputHashB, + newLocalExitRoot: newLocalExitRootB, + newBatchNum: newBatchNumB + } + + return { publicsBatchA, publicsBatchB, publicsAggregated }; + } + + beforeEach( async() => { + const template = await fs.promises.readFile(path.join(__dirname, "../../src/templates", "helpers", "recursive2", "recursive2_checks_batch.circom.ejs"), "utf8"); + const options = { publics: batchPublics, isTest: true} + const content = ejs.render(template, options); + const circuitFile = path.join(new tmp.Dir().path, "circuit.circom"); + await fs.promises.writeFile(circuitFile, content); + circuit = await wasm_tester(circuitFile, {O:1, prime: "goldilocks", include: "node_modules/pil-stark/circuits.gl"}); + }); + + it("Check that correct recursive2 publics are generated in the happy path", async () => { + const { publicsBatchA, publicsBatchB, publicsAggregated } = generatePublics(); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublics); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublics); + const publicsAggregatedCircom = preparePublics(publicsAggregated, batchPublics); + + const witness = await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom}, true); + + await circuit.assertOut(witness, { publics: publicsAggregatedCircom, a_isOneBatch: 1, b_isOneBatch: 0 }); + }); + + it("Fails if old state root doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.oldStateRoot = generateRandomHex(63, publicsBatchA.newStateRoot); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublics); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 28")); + } + }); + + it("Fails if new batch acc input hash root doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.oldBatchAccInputHash = generateRandomHex(256, publicsBatchA.newBatchAccInputHash); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublics); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 32")); + } + }); + + it("Fails if chain Id doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.chainId = generateRandomValue(10, publicsBatchA.chainId); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublics); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 39")); + } + }); + + it("Fails if fork Id doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.forkId = generateRandomValue(10, publicsBatchA.forkId); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublics); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 42")); + } + }); + + it("Fails if batch number doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.oldBatchNum = publicsBatchA.newBatchNum + 500; + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublics); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 45")); + } + }); + + }); + + describe("Forks from feijoa batch recursion", async () => { + let circuit; + + function generatePublics() { + const chainId = generateRandomValue(10); + const forkId = generateRandomValue(10); + + const oldBatchStateRootA = generateRandomHex(63); + const oldBatchAccInputHashA = generateRandomHex(256); + const previousL1InfoTreeRootA = generateRandomHex(256); + const previousL1InfoTreeIndexA = generateRandomValue(10); + const newBatchStateRootA = generateRandomHex(63); + const newBatchAccInputHashA = generateRandomHex(256); + const currentL1InfoTreeRootA = generateRandomHex(256); + const currentL1InfoTreeIndexA = previousL1InfoTreeIndexA + 1; + const newLastTimestampA = generateRandomValue(32); + const newLocalExitRootA = generateRandomHex(256); + + const publicsBatchA = { + oldStateRoot: oldBatchStateRootA, + oldBatchAccInputHash: oldBatchAccInputHashA, + previousL1InfoTreeRoot: previousL1InfoTreeRootA, + previousL1InfoTreeIndex: previousL1InfoTreeIndexA, + chainId, + forkId, + newStateRoot: newBatchStateRootA, + newBatchAccInputHash: newBatchAccInputHashA, + currentL1InfoTreeRoot: currentL1InfoTreeRootA, + currentL1InfoTreeIndex: currentL1InfoTreeIndexA, + newLocalExitRoot: newLocalExitRootA, + newLastTimestamp: newLastTimestampA + }; + + const oldBatchStateRootB = newBatchStateRootA; + const oldBatchAccInputHashB = newBatchAccInputHashA; + const previousL1InfoTreeRootB = currentL1InfoTreeRootA; + const previousL1InfoTreeIndexB = currentL1InfoTreeIndexA; + const newBatchStateRootB = generateRandomHex(63); + const newBatchAccInputHashB = generateRandomHex(256); + const currentL1InfoTreeRootB = generateRandomHex(256); + const currentL1InfoTreeIndexB = previousL1InfoTreeIndexB + 10; + const newLastTimestampB = generateRandomValue(32); + const newLocalExitRootB = generateRandomHex(256); + + + const publicsBatchB = { + oldStateRoot: oldBatchStateRootB, + oldBatchAccInputHash: oldBatchAccInputHashB, + previousL1InfoTreeRoot: previousL1InfoTreeRootB, + previousL1InfoTreeIndex: previousL1InfoTreeIndexB, + chainId, + forkId, + newStateRoot: newBatchStateRootB, + newBatchAccInputHash: newBatchAccInputHashB, + currentL1InfoTreeRoot: currentL1InfoTreeRootB, + currentL1InfoTreeIndex: currentL1InfoTreeIndexB, + newLocalExitRoot: newLocalExitRootB, + newLastTimestamp: newLastTimestampB + }; + + const publicsAggregated = { + oldStateRoot: oldBatchStateRootA, + oldBatchAccInputHash: oldBatchAccInputHashA, + previousL1InfoTreeRoot: previousL1InfoTreeRootA, + previousL1InfoTreeIndex: previousL1InfoTreeIndexA, + chainId, + forkId, + newStateRoot: newBatchStateRootB, + newBatchAccInputHash: newBatchAccInputHashB, + currentL1InfoTreeRoot: currentL1InfoTreeRootB, + currentL1InfoTreeIndex: currentL1InfoTreeIndexB, + newLocalExitRoot: newLocalExitRootB, + newLastTimestamp: newLastTimestampB + } + + return { publicsBatchA, publicsBatchB, publicsAggregated }; + } + + beforeEach( async() => { + const template = await fs.promises.readFile(path.join(__dirname, "../../src/templates", "helpers", "recursive2", "recursive2_checks_batch_eip4844.circom.ejs"), "utf8"); + const options = { publics: batchPublicsEip4844, isTest: true} + const content = ejs.render(template, options); + const circuitFile = path.join(new tmp.Dir().path, "circuit.circom"); + await fs.promises.writeFile(circuitFile, content); + circuit = await wasm_tester(circuitFile, {O:1, prime: "goldilocks", include: "node_modules/pil-stark/circuits.gl"}); + }); + + it("Check that correct recursive2 publics are generated in the happy path", async () => { + const { publicsBatchA, publicsBatchB, publicsAggregated } = generatePublics(); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + const publicsAggregatedCircom = preparePublics(publicsAggregated, batchPublicsEip4844); + + const witness = await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + + await circuit.assertOut(witness, { publics: publicsAggregatedCircom, a_isOneBatch: 0, b_isOneBatch: 1 }); + }); + + it("Fails if old state root doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchA.newStateRoot = generateRandomHex(63, publicsBatchB.oldStateRoot); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 33")); + } + }); + + it("Fails if new batch acc input hash root doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchA.newBatchAccInputHash = generateRandomHex(256, publicsBatchB.oldBatchAccInputHash); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 37")); + } + }); + + it("Fails if chain Id doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.chainId = generateRandomValue(10, publicsBatchA.chainId); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 48")); + } + }); + + it("Fails if fork Id doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.forkId = generateRandomValue(10, publicsBatchA.forkId); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 51")); + } + }); + + it("Fails if L1 Info Tree Root doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.previousL1InfoTreeRoot = generateRandomHex(256, publicsBatchA.currentL1InfoTreeRoot); + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 41")); + } + }); + + it("Fails if L1 Info Tree Index doesn't match", async () => { + const { publicsBatchA, publicsBatchB } = generatePublics(); + publicsBatchB.previousL1InfoTreeIndex = publicsBatchA.currentL1InfoTreeIndex - 354; + const publicsBatchACircom = preparePublics(publicsBatchA, batchPublicsEip4844); + const publicsBatchBCircom = preparePublics(publicsBatchB, batchPublicsEip4844); + + try { + await circuit.calculateWitness({ a_publics: publicsBatchACircom, b_publics: publicsBatchBCircom, a_isAggregatedCircuit: 1, b_isAggregatedCircuit: 0}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 54")); + } + }); + }); + + describe("Forks from feijoa blob recursion", async () => { + let circuit; + + function generatePublics() { + const chainId = generateRandomValue(10); + const forkId = generateRandomValue(10); + + const oldStateRootA = generateRandomHex(63); + const oldBlobStateRootA = generateRandomHex(63); + const oldBlobAccInputHashA = generateRandomHex(256); + const oldBlobNumA = generateRandomValue(10); + const newStateRootA = generateRandomHex(63); + const newBlobStateRootA = generateRandomHex(63); + const newBlobAccInputHashA = generateRandomHex(256); + const newBlobNumA = oldBlobNumA + 1; + const newLocalExitRootA = generateRandomHex(256); + + const publicsBlobA = { + oldStateRoot: oldStateRootA, + oldBlobStateRoot: oldBlobStateRootA, + oldBlobAccInputHash: oldBlobAccInputHashA, + oldBlobNum: oldBlobNumA, + chainId, + forkId, + newStateRoot: newStateRootA, + newBlobStateRoot: newBlobStateRootA, + newBlobAccInputHash: newBlobAccInputHashA, + newBlobNum: newBlobNumA, + newLocalExitRoot: newLocalExitRootA + } + + const oldStateRootB = newStateRootA; + const oldBlobStateRootB = newBlobStateRootA; + const oldBlobAccInputHashB = newBlobAccInputHashA; + const oldBlobNumB = newBlobNumA; + const newStateRootB = generateRandomHex(63); + const newBlobStateRootB = generateRandomHex(63); + const newBlobAccInputHashB = generateRandomHex(256); + const newBlobNumB = oldBlobNumB + 10; + const newLocalExitRootB = generateRandomHex(256); + + const publicsBlobB = { + oldStateRoot: oldStateRootB, + oldBlobStateRoot: oldBlobStateRootB, + oldBlobAccInputHash: oldBlobAccInputHashB, + oldBlobNum: oldBlobNumB, + chainId, + forkId, + newStateRoot: newStateRootB, + newBlobStateRoot: newBlobStateRootB, + newBlobAccInputHash: newBlobAccInputHashB, + newBlobNum: newBlobNumB, + newLocalExitRoot: newLocalExitRootB + }; + + const publicsAggregated = { + oldStateRoot: oldStateRootA, + oldBlobStateRoot: oldBlobStateRootA, + oldBlobAccInputHash: oldBlobAccInputHashA, + oldBlobNum: oldBlobNumA, + chainId, + forkId, + newStateRoot: newStateRootB, + newBlobStateRoot: newBlobStateRootB, + newBlobAccInputHash: newBlobAccInputHashB, + newBlobNum: newBlobNumB, + newLocalExitRoot: newLocalExitRootB + } + + return { publicsBlobA, publicsBlobB, publicsAggregated }; + } + + beforeEach( async() => { + const template = await fs.promises.readFile(path.join(__dirname, "../../src/templates", "helpers", "recursive2", "recursive2_checks_blob.circom.ejs"), "utf8"); + const options = { publics: blobOuterPublics, isTest: true} + const content = ejs.render(template, options); + const circuitFile = path.join(new tmp.Dir().path, "circuit.circom"); + await fs.promises.writeFile(circuitFile, content); + circuit = await wasm_tester(circuitFile, {O:1, prime: "goldilocks", include: "node_modules/pil-stark/circuits.gl"}); + }); + + it("Check that correct recursive2 publics are generated in the happy path", async () => { + const { publicsBlobA, publicsBlobB, publicsAggregated } = generatePublics(); + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + const publicsAggregatedCircom = preparePublics(publicsAggregated, blobOuterPublics); + + const witness = await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + + await circuit.assertOut(witness, { publics: publicsAggregatedCircom, a_isOneBatch: 1, b_isOneBatch: 0 }); + }); + + it("Fails if old state root doesn't match", async () => { + const { publicsBlobA, publicsBlobB } = generatePublics(); + publicsBlobA.newStateRoot = generateRandomHex(63, publicsBlobB.oldStateRoot); + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 29")); + } + }); + + it("Fails if old blob state root doesn't match", async () => { + const { publicsBlobA, publicsBlobB } = generatePublics(); + publicsBlobA.newBlobStateRoot = generateRandomHex(63, publicsBlobB.oldBlobStateRoot); + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 33")); + } + }); + + it("Fails if new blob acc input hash root doesn't match", async () => { + const { publicsBlobA, publicsBlobB } = generatePublics(); + publicsBlobA.newBlobAccInputHash = generateRandomHex(256, publicsBlobB.oldBlobAccInputHash); + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 37")); + } + }); + + it("Fails if chain Id doesn't match", async () => { + const { publicsBlobA, publicsBlobB } = generatePublics(); + publicsBlobA.chainId = generateRandomValue(10, publicsBlobB.chainId); + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 44")); + } + }); + + it("Fails if fork Id doesn't match", async () => { + const { publicsBlobA, publicsBlobB } = generatePublics(); + publicsBlobA.forkId = generateRandomValue(10, publicsBlobB.forkId); + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + assert(false); + } catch(err) { + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 47")); + } + }); + + it("Fails if blob number doesn't match", async () => { + const { publicsBlobA, publicsBlobB } = generatePublics(); + publicsBlobB.oldBlobNum = publicsBlobA.newBlobNum - 500; + const publicsBlobACircom = preparePublics(publicsBlobA, blobOuterPublics); + const publicsBlobBCircom = preparePublics(publicsBlobB, blobOuterPublics); + + try { + await circuit.calculateWitness({ a_publics: publicsBlobACircom, b_publics: publicsBlobBCircom}, true); + assert(false); + } catch(err) { + if(err.message.includes("Unspecified AssertiorError")) throw err; + assert(err.message.includes("Error in template VerifyRecursive2_1 line: 50")); + } + }); + + }); +}); diff --git a/tools/build_all.sh b/tools/build_all.sh index 4ffea609..cdfc9d0a 100755 --- a/tools/build_all.sh +++ b/tools/build_all.sh @@ -4,8 +4,11 @@ SKIP=0 STEP_BY_STEP=0 START_OPTIONS=`env|grep -wE 'npm_config_(pil|pilconfig|continue|from|build|starkstruct)'|sed 's/npm_config_//g'|xargs` -echo "####### START $(date +'%Y-%m-%d %H:%M:%S') $START_OPTIONS ########" >> $BDIR/steps.log -LAST_STEP_FILE=$BDIR/last_step.txt +mkdir -p $BDIR/build +cp -f $PWD/package.json $BDIR/build +cp -f $PWD/package-lock.json $BDIR/build +echo "####### START $(date +'%Y-%m-%d %H:%M:%S') $START_OPTIONS ########" >> $BDIR/build/steps.log +LAST_STEP_FILE=$BDIR/build/last_step.txt [ ! -z $npm_config_from ] && SKIP=1 [ ! -z $npm_config_step ] && npm_config_from=$npm_config_step && npm_config_to=$npm_config_step && SKIP=1 [ ! -z $npm_config_step_by_step ] && STEP_BY_STEP=1 @@ -18,12 +21,12 @@ while [ $# -gt 0 ]; do [ "$npm_config_from" = "$STEP" ] && SKIP=0 [ ! -z "$PREV_STEP" ] && [ "$PREV_STEP" = "$LAST_STEP" ] && SKIP=0 [ $SKIP -eq 1 ] && continue - mkdir -p $BDIR/steps + mkdir -p $BDIR/build/steps COMMIT=`git log --pretty=format:'%H' -n 1` CIRCOM_BIN=`whereis -b circom | tr ' ' "\n" | tail -1` CIRCOM_HASH=`sha256sum $CIRCOM_BIN|cut -d' ' -f1` CIRCOM_VERSION=`circom --version` - (echo "COMMIT: $COMMIT" && echo "$CIRCOM_VERSION ($CIRCOM_HASH)" && npm ls) | tee $BDIR/dependencies.txt > $BDIR/steps/$STEP + (echo "COMMIT: $COMMIT" && echo "$CIRCOM_VERSION ($CIRCOM_HASH)" && npm ls) | tee $BDIR/build/dependencies.txt > $BDIR/build/steps/$STEP echo "\e[35;1m####### $STEP #######\e[0m" START_STEP_TIME=$(date +%s) npm run $STEP @@ -34,15 +37,15 @@ while [ $# -gt 0 ]; do TOT_ELAPSED_SECONDS=$((END_STEP_TIME - START_TIME)) TOT_ELAPSED=$(date -ud "@$TOT_ELAPSED_SECONDS" +"$((TOT_ELAPSED_SECONDS/3600)):%M:%S") if [ $RES -ne 0 ]; then - echo "$STEP FAIL $ELAPSED / $TOT_ELAPSED" >> $BDIR/steps.log + echo "$STEP FAIL $ELAPSED / $TOT_ELAPSED" >> $BDIR/build/steps.log echo "$STEP ...[\e[31;1mFAIL\e[0m] $ELAPSED / $TOT_ELAPSED\n" break fi - echo "$STEP OK $ELAPSED / $TOT_ELAPSED" >> $BDIR/steps.log + echo "$STEP OK $ELAPSED / $TOT_ELAPSED" >> $BDIR/build/steps.log echo "$STEP ...[\e[32;1mOK\e[0m] $ELAPSED / $TOT_ELAPSED\n" echo $STEP > $LAST_STEP_FILE [ "$npm_config_to" = "$STEP" ] && break [ $STEP_BY_STEP -eq 1 ] && break sleep 1 done -echo "####### END $(date +'%Y-%m-%d %H:%M:%S') ########" >> $BDIR/steps.log +echo "####### END $(date +'%Y-%m-%d %H:%M:%S') ########" >> $BDIR/build/steps.log diff --git a/tools/copy_files.sh b/tools/copy_files.sh deleted file mode 100755 index db892a42..00000000 --- a/tools/copy_files.sh +++ /dev/null @@ -1,310 +0,0 @@ -#!/bin/bash -usage() -{ - echo "copy_files " - exit 1 -} - -makedir() -{ - if [ $REMOTE -eq 1 ]; then - HOST=`echo $1|sed 's/\([^:]*\):.*/\1/'` - DIR=`echo $1|sed 's/[^:]*://'` - echo "creating directory $DIR => ssh $HOST mkdir -p $DIR" - ssh $HOST mkdir -p $DIR - else - [ ! -d $1 ] && mkdir -p $1 - fi -} -cpfile() -{ - if [ $WAIT -eq 1 ]; then - lsof $1 >/dev/null 2>&1 - if [ $? -eq 0 ]; then - echo -n "waiting file $1 ..." - while true; do lsof $1 >/dev/null 2>&1; [ $? -ne 0 ] && break; echo -n "."; sleep 30; done - echo " (ready)" - fi - fi - if [ ! -z $CP_FLAGS ]; then - $CP_CMD -$CP_FLAGS $1 $2 - else - $CP_CMD $1 $2 - fi -} - -cpdir() -{ - if [ $WAIT -eq 1 ]; then - PF=`find $1 -mmin -1 | wc -l 2>/dev/null` - if [ $PF -gt 0 ]; then - echo -n "waiting directory $1 ..." - while true; do PF=`find $1 -mmin -1 | wc -l 2>/dev/null`; [ $PF -eq 0 ] && break; echo -n "."; sleep 30; done - echo " (ready)" - fi - fi - $CP_CMD -r$CP_FLAGS $1 $2 -} -CP_CMD=cp -CP_FLAGS=v -WAIT=0 -FINAL_PHASE_1=1 -FINAL_PHASE_2=1 -ONLY_HASH=0 -ONLY_CONFIG=0 -REMOTE=0 -while [ $# -gt 0 ]; do - if [ ${1:0:1} = '-' ]; then - case $1 in - --final1) - FINAL_PHASE_1=1 - FINAL_PHASE_2=0 - ;; - --final2) - FINAL_PHASE_1=0 - FINAL_PHASE_2=1 - ;; - --link) - CP_FLAGS="vl" - ;; - --hash) - ONLY_HASH=1 - ;; - --config) - /ONLY_CONFIG=1 - ;; - --scp) - REMOTE=1 - ;; - -w) - WAIT=1 - ;; - *) - echo "Unknow option $1" - usage - ;; - esac - else - if [ -z $BDIR ]; then - BDIR=$1 - elif [ -z $DST ]; then - DST=$1 - else - echo "Unknown option $1" - usage - fi - fi - shift -done - -[ -z $BDIR ] && usage - -BASEDIR=. -if [ $REMOTE -eq 1 ]; then - CP_CMD=scp - CP_FLAGS= -fi -if [ $ONLY_HASH -eq 0 ]; then - [ -z $DST ] && usage - makedir $DST/c_files - makedir $DST/pil - echo "`date +'%Y%m%d_%H%M%S'` DST=$DST FINAL_PHASE_1=$FINAL_PHASE_1 FINAL_PHASE_2=$FINAL_PHASE_2" >> $BDIR/copy.log -fi -if [ $FINAL_PHASE_1 -eq 0 -o $ONLY_HASH -eq 1 ]; then - CP_DEFAULT=0 -else - CP_DEFAULT=1 -fi -CP_SCRIPTS=$CP_DEFAULT -CP_ZKEVM=$CP_DEFAULT -CP_C12A=$CP_DEFAULT -CP_RECURSIVE1=$CP_DEFAULT -CP_RECURSIVE2=$CP_DEFAULT -CP_RECURSIVEF=$CP_DEFAULT -CP_FINAL=1 -CP_CIRCOM=$CP_DEFAULT -CP_BUILDS=1 -GENERATE_HASH=1 - -if [ $ONLY_HASH -eq 1 ]; then - CP_FINAL=0 - CP_BUILDS=0 -fi -if [ $ONLY_HASH -eq 1 ]; then - CP_FINAL=0 - CP_BUILDS=0 -fi - -if [ $CP_SCRIPTS -eq 1 ]; then - # scripts - FULLDST=$DST/config/scripts - makedir $FULLDST - cpfile $BDIR/rom.json $FULLDST - cpfile $BDIR/metadata-rom.txt $FULLDST - cpfile $BDIR/storage_sm_rom.json $FULLDST - cpfile $BASEDIR/src/sm/sm_keccakf/keccak_script.json $FULLDST/keccak_script.json - cpfile $BASEDIR/src/sm/sm_keccakf/keccak_connections.json $FULLDST/keccak_connections.json - cpfile $BASEDIR/src/sm/sm_sha256f/sha256_script.json $FULLDST/sha256_script.json - cpfile $BASEDIR/src/sm/sm_sha256f/sha256_gates.json $FULLDST/sha256_gates.json -fi - -if [ $CP_ZKEVM -eq 1 ]; then - # zkevm - FULLDST=$DST/config/zkevm - makedir $FULLDST - cpfile $BDIR/zkevm.const $FULLDST - cpfile $BDIR/zkevm.verifier_cpp/zkevm.verifier.dat $FULLDST/zkevm.verifier.dat - cpfile $BDIR/zkevm.consttree $FULLDST - cpfile $BDIR/zkevm.starkinfo.json $FULLDST - cpfile $BDIR/zkevm.verkey.json $FULLDST - if [ $ONLY_CONFIG -eq 0 ]; then - cpdir $BDIR/zkevm.verifier_cpp $DST/c_files - cpdir $BDIR/zkevm.chelpers $DST/c_files - cpdir $BDIR/pil/zkevm $DST/pil/ - fi -fi - -if [ $CP_C12A -eq 1 ]; then - # c12a - FULLDST=$DST/config/c12a - makedir $FULLDST - cpfile $BDIR/c12a.const $FULLDST - cpfile $BDIR/c12a.exec $FULLDST - cpfile $BDIR/c12a.consttree $FULLDST - cpfile $BDIR/c12a.verkey.json $FULLDST - cpfile $BDIR/c12a.starkinfo.json $FULLDST - if [ $ONLY_CONFIG -eq 0 ]; then - cpfile $BDIR/c12a.pil $DST/pil - cpdir $BDIR/c12a.chelpers $DST/c_files - fi -fi - -if [ $CP_RECURSIVE1 -eq 1 ]; then - # recursive1 - FULLDST=$DST/config/recursive1 - makedir $FULLDST - cpfile $BDIR/recursive1.const $FULLDST - cpfile $BDIR/recursive1_cpp/recursive1.dat $FULLDST/recursive1.verifier.dat - cpfile $BDIR/recursive1.consttree $FULLDST - cpfile $BDIR/recursive1.exec $FULLDST - cpfile $BDIR/recursive.starkstruct.json $FULLDST/recursive1.starkstruct.json - cpfile $BDIR/recursive1.starkinfo.json $FULLDST - cpfile $BDIR/recursive1.verkey.json $FULLDST - if [ $ONLY_CONFIG -eq 0 ]; then - cpfile $BDIR/recursive1.pil $DST/pil - cpdir $BDIR/recursive1_cpp $DST/c_files - cpdir $BDIR/recursive1.chelpers $DST/c_files - fi -fi - -if [ $CP_RECURSIVE2 -eq 1 ]; then - # recursive 2 - FULLDST=$DST/config/recursive2 - makedir $FULLDST - cpfile $BDIR/recursive2.starkinfo.json $FULLDST - cpfile $BDIR/recursive.starkstruct.json $FULLDST/recursive2.starkstruct.json - cpfile $BDIR/recursive2.exec $FULLDST - cpfile $BDIR/recursive2_cpp/recursive2.dat $FULLDST/recursive2.verifier.dat - cpfile $BDIR/recursive2.verkey.json $FULLDST - cpfile $BDIR/recursive2.consttree $FULLDST - cpfile $BDIR/recursive2.const $FULLDST - if [ $ONLY_CONFIG -eq 0 ]; then - cpfile $BDIR/recursive2.pil $DST/pil - cpdir $BDIR/recursive2_cpp $DST/c_files - cpdir $BDIR/recursive2.chelpers $DST/c_files - fi -fi - -if [ $CP_RECURSIVEF -eq 1 ]; then - # recursive f - FULLDST=$DST/config/recursivef - makedir $FULLDST - cpfile $BDIR/recursivef.verkey.json $FULLDST - cpfile $BDIR/recursivef.consttree $FULLDST - cpfile $BDIR/recursivef.starkinfo.json $FULLDST - cpfile $BDIR/recursivef.exec $FULLDST - cpfile $BDIR/recursivef.const $FULLDST - cpfile $BDIR/recursivef_cpp/recursivef.dat $FULLDST/recursivef.verifier.dat - if [ $ONLY_CONFIG -eq 0 ]; then - cpfile $BDIR/recursivef.pil $DST/pil - cpdir $BDIR/recursivef_cpp $DST/c_files - cpdir $BDIR/recursivef.chelpers $DST/c_files - fi -fi - -if [ $CP_FINAL -eq 1 ]; then - # final - FULLDST=$DST/config/final - makedir $FULLDST - if [ $FINAL_PHASE_2 -eq 1 ]; then - cpfile $BDIR/final.fflonk.zkey $FULLDST - cpfile $BDIR/final.fflonk.verkey.json $FULLDST - fi - if [ $FINAL_PHASE_1 -eq 1 ]; then - cpfile $BDIR/final_cpp/final.dat $FULLDST/final.verifier.dat - if [ $ONLY_CONFIG -eq 0 ]; then - cpdir $BDIR/final_cpp $DST/c_files - fi - fi -fi - -if [ $ONLY_CONFIG -eq 0 ]; then - if [ $CP_CIRCOM -eq 1 ]; then - # circom - FULLDST=$DST/circom - makedir $FULLDST - for F in $BDIR/*.circom; do - cpfile $F $FULLDST - done - fi - - if [ $GENERATE_HASH -eq 1 ]; then - FIND_EXTRA_ARG="" - HASHFILE=$BDIR/sha256.txt - TMPHASHFILE=$HASHFILE".tmp" - if [ -f $HASHFILE -a $FINAL_PHASE_1 -eq 0 -a $FINAL_PHASE_2 -eq 1 ]; then - echo "calculating sha256 of newer files ...." - FIND_EXTRA_ARG=" -newer steps/fflonk_setup" - cp $HASHFILE $TMPHASHFILE - mv $HASHFILE $HASHFILE"."`date +'%Y%m%d_%H%M%S'` - else - [ -f $TMPHASHFILE ] && rm -f $TMPHASHFILE - fi - if [ ! -f $HASHFILE ]; then - echo "calculating sha256 ...." - for F in `LC_ALL=C; cd $BDIR; find * -type f ! -name "steps.log" ! -path "steps*" ! -name "sha256.txt*" ! -name "last_step.txt"$FIND_EXTRA_ARG|sort`; do - echo " sha256($F) ...." - (cd $BDIR; sha256sum $F) >> $TMPHASHFILE - done - mv $TMPHASHFILE $HASHFILE - fi - fi - - if [ $CP_BUILDS -eq 1 ]; then - # builds - FULLDST=$DST/build - makedir $FULLDST - - cpfile package.json $FULLDST - cpfile package-lock.json $FULLDST - - DEPENDENCIES=$BDIR/dependencies.txt - cpfile $DEPENDENCIES $FULLDST - cpdir $BDIR/steps $FULLDST - - BUILDS="" - if [ -f $HASHFILE ]; then - BUILDS=`basename $HASHFILE`" " - fi - cpfile $BDIR/steps.log $FULLDST - if [ $FINAL_PHASE_2 -eq 1 ]; then - cpfile $BDIR/final.fflonk.verifier.sol $FULLDST - fi - if [ $FINAL_PHASE_1 -eq 1 ]; then - BUILDS="c12a.starkstruct.json final.r1cs final.sym recursive.starkstruct.json recursive1.r1cs recursive1.sym recursive2.r1cs recursive2.sym recursivef.r1cs recursivef.starkstruct.json recursivef.sym zkevm.starkstruct.json zkevm.verifier.r1cs zkevm.verifier.sym" - for F in $BUILDS; do - cpfile $BDIR/$F $FULLDST - done - fi - fi -fi