diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml index 6b2de798b2..b4a012237b 100644 --- a/.github/actions/setup/action.yml +++ b/.github/actions/setup/action.yml @@ -37,11 +37,11 @@ runs: ~/.cargo/git/db/ target/ ~/.rustup/ - key: rust-nightly-2024-04-17-${{ hashFiles('**/Cargo.toml') }} - restore-keys: rust-nightly-2024-04-17- + key: rust-1.79.0-${{ hashFiles('**/Cargo.toml') }} + restore-keys: rust-1.79.0- - name: Setup toolchain id: rustc-toolchain shell: bash run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain nightly-2024-04-17 -y + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain 1.79.0 -y diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d9cc194302..09e7cbaded 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -17,3 +17,4 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" diff --git a/.github/workflows/book.yml b/.github/workflows/book.yml index ba45d2b522..82449ca963 100644 --- a/.github/workflows/book.yml +++ b/.github/workflows/book.yml @@ -108,7 +108,7 @@ jobs: . - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: github-pages path: ${{ runner.temp }}/artifact.tar @@ -135,4 +135,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v2 \ No newline at end of file + uses: actions/deploy-pages@v4 \ No newline at end of file diff --git a/.github/workflows/docker-gnark.yml b/.github/workflows/docker-gnark.yml index 8f9ca2b854..faeda798a1 100644 --- a/.github/workflows/docker-gnark.yml +++ b/.github/workflows/docker-gnark.yml @@ -15,7 +15,13 @@ on: jobs: test-docker: name: Test - runs-on: runs-on,runner=64cpu-linux-arm64,spot=false + runs-on: + [ + runs-on, + runner=64cpu-linux-arm64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -35,5 +41,5 @@ jobs: SP1_GNARK_IMAGE: sp1-gnark with: command: test - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --release -p sp1-prover -- --exact tests::test_e2e diff --git a/.github/workflows/docker-publish-gnark.yml b/.github/workflows/docker-publish-gnark.yml index c36a28d302..34be51d005 100644 --- a/.github/workflows/docker-publish-gnark.yml +++ b/.github/workflows/docker-publish-gnark.yml @@ -34,7 +34,7 @@ jobs: uses: actions/checkout@v4 - name: Install Docker BuildX - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 id: buildx with: install: true @@ -44,7 +44,7 @@ jobs: - name: Log into registry ${{ env.REGISTRY }} # Ensure this doesn't trigger on PR's if: github.event_name != 'pull_request' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} @@ -54,7 +54,7 @@ jobs: # https://github.com/docker/metadata-action - name: Extract Docker metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} @@ -86,7 +86,7 @@ jobs: # https://github.com/docker/build-push-action # https://github.com/docker/build-push-action/blob/master/docs/advanced/cache.md - name: Build and push Docker image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . file: ./Dockerfile.gnark-ffi diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index ebedfcea29..b490ff2d2e 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -31,7 +31,7 @@ jobs: uses: actions/checkout@v4 - name: Install Docker BuildX - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 id: buildx with: install: true @@ -41,7 +41,7 @@ jobs: - name: Log into registry ${{ env.REGISTRY }} # Ensure this doesn't trigger on PR's if: github.event_name != 'pull_request' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} @@ -51,7 +51,7 @@ jobs: # https://github.com/docker/metadata-action - name: Extract Docker metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} @@ -83,7 +83,7 @@ jobs: # https://github.com/docker/build-push-action # https://github.com/docker/build-push-action/blob/master/docs/advanced/cache.md - name: Build and push Docker image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: ./cli/docker file: ./cli/docker/Dockerfile diff --git a/.github/workflows/eval.yml b/.github/workflows/eval.yml index 77bead45df..02e4e3c2cf 100644 --- a/.github/workflows/eval.yml +++ b/.github/workflows/eval.yml @@ -41,7 +41,7 @@ jobs: ./eval.sh - name: Upload Benchmark as Artifact - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: benchmark-results-${{ matrix.arch }} path: benchmark.csv diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 075774d3e9..02e6535f4f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -20,7 +20,17 @@ concurrency: jobs: plonk: name: Plonk Native - runs-on: runs-on,cpu=64,ram=256,family=m7i+m7a,hdd=80,image=ubuntu22-full-x64,spot=false + runs-on: + [ + runs-on, + cpu=64, + ram=256, + family=m7i+m7a, + hdd=80, + image=ubuntu22-full-x64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -34,7 +44,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: test - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --release -p sp1-sdk --features native-gnark -- test_e2e_prove_plonk --nocapture env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 -C target-cpu=native @@ -42,7 +52,17 @@ jobs: plonk-docker: name: Plonk Docker - runs-on: runs-on,cpu=64,ram=256,family=m7i+m7a,hdd=80,image=ubuntu22-full-x64,spot=false + runs-on: + [ + runs-on, + cpu=64, + ram=256, + family=m7i+m7a, + hdd=80, + image=ubuntu22-full-x64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -56,7 +76,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: test - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --release -p sp1-sdk -- test_e2e_prove_plonk --nocapture env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 -C target-cpu=native @@ -64,17 +84,23 @@ jobs: check-branch: name: Check branch - runs-on: ubuntu-latest + runs-on: [ubuntu-latest, "run-id=${{ github.run_id }}"] steps: - name: Check branch - if: github.head_ref != 'dev' && !startsWith(github.ref, 'release/') && !startsWith(github.ref, 'hotfix/') + if: github.head_ref != 'dev' && !startsWith(github.head_ref, 'release/') && !startsWith(github.head_ref, 'hotfix/') run: | echo "ERROR: You can only merge to main from dev, release/*, or hotfix/*." exit 1 ssz-withdrawals: name: Example (SSZ Withdrawals) - runs-on: runs-on,runner=64cpu-linux-arm64,spot=false + runs-on: + [ + runs-on, + runner=64cpu-linux-arm64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -108,7 +134,13 @@ jobs: tendermint: name: Example (Tendermint) - runs-on: runs-on,runner=64cpu-linux-arm64,spot=false + runs-on: + [ + runs-on, + runner=64cpu-linux-arm64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 3fe34c62a8..ef4831f9a6 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -41,14 +41,14 @@ jobs: uses: actions-rs/cargo@v1 with: command: check - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --all-targets --all-features - name: Run cargo test core-v2 uses: actions-rs/cargo@v1 with: command: test - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --release --package sp1-recursion-core-v2 --package sp1-recursion-circuit-v2 --features native-gnark env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 -C target-cpu=native @@ -60,7 +60,13 @@ jobs: test-x86: name: Test (x86-64) - runs-on: runs-on,runner=64cpu-linux-x64,spot=false + runs-on: + [ + runs-on, + runner=64cpu-linux-x64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -74,14 +80,14 @@ jobs: uses: actions-rs/cargo@v1 with: command: check - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --all-targets --all-features - name: Run cargo test uses: actions-rs/cargo@v1 with: command: test - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --release --features native-gnark env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 -C target-cpu=native @@ -91,7 +97,13 @@ jobs: test-arm: name: Test (ARM) - runs-on: runs-on,runner=64cpu-linux-arm64,spot=false + runs-on: + [ + runs-on, + runner=64cpu-linux-arm64, + spot=false, + "run-id=${{ github.run_id }}", + ] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -105,14 +117,14 @@ jobs: uses: actions-rs/cargo@v1 with: command: check - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --all-targets --all-features - name: Run cargo test uses: actions-rs/cargo@v1 with: command: test - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 args: --release --features native-gnark env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 -C target-cpu=native @@ -122,7 +134,7 @@ jobs: lint: name: Formatting & Clippy - runs-on: runs-on,runner=8cpu-linux-x64 + runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -150,7 +162,7 @@ jobs: examples: name: Examples - runs-on: runs-on,runner=8cpu-linux-x64 + runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: @@ -180,7 +192,7 @@ jobs: cli: name: CLI - runs-on: runs-on,runner=8cpu-linux-x64 + runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"] env: CARGO_NET_GIT_FETCH_WITH_CLI: "true" steps: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 507d599a7f..ca4b6bf786 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,7 +16,7 @@ env: jobs: prepare: name: Prepare release - runs-on: runs-on,runner=8cpu-linux-x64 + runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"] timeout-minutes: 30 outputs: tag_name: ${{ steps.release_info.outputs.tag_name }} @@ -32,7 +32,7 @@ jobs: with: pull_token: ${{ secrets.PULL_TOKEN }} - # If it's a nightly release, tag with the release time. If the tag is `main`, we want to use + # If it's a nightly release, tag with the release time. If the tag is `main`, we want to use # `latest` as the tag name. Else, use the tag name as is. - name: Compute release name and tag id: release_info @@ -117,7 +117,7 @@ jobs: id: rustc-toolchain uses: actions-rs/toolchain@v1 with: - toolchain: nightly-2024-04-17 + toolchain: 1.79.0 profile: minimal override: true targets: ${{ matrix.target }} @@ -126,12 +126,12 @@ jobs: with: key: ${{ matrix.target }} cache-on-failure: true - + - name: Install go uses: actions/setup-go@v5 with: - go-version: '^1.22.1' - + go-version: "^1.22.1" + - name: Check go installation run: | go version @@ -201,7 +201,7 @@ jobs: # Creates the release for this specific version - name: Create release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: name: ${{ needs.prepare.outputs.release_name }} tag_name: ${{ needs.prepare.outputs.tag_name }} @@ -215,7 +215,7 @@ jobs: # tagged `nightly` for compatibility with `sp1up` - name: Update nightly release if: ${{ env.IS_NIGHTLY }} - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: name: "Nightly" tag_name: "nightly" @@ -265,4 +265,4 @@ jobs: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: update_existing: true - filename: .github/RELEASE_FAILURE_ISSUE_TEMPLATE.md \ No newline at end of file + filename: .github/RELEASE_FAILURE_ISSUE_TEMPLATE.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2dfd143b04..85557b6352 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,7 +10,7 @@ or an expert, we can use your help. This document will help you get started. **Do not let the document intimidate you**. It should be considered as a guide to help you navigate the process. -The [Telegram](https://t.me/succinct_sp1) is available for any concerns you may have that are not covered in this guide. +The [Telegram](https://t.me/+AzG4ws-kD24yMGYx) is available for any concerns you may have that are not covered in this guide. If you contribute to this project, your contributions will be made to the project under both Apache 2.0 and the MIT license. diff --git a/Cargo.lock b/Cargo.lock index 94f481b3d9..439931343f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -58,7 +58,7 @@ dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy", + "zerocopy 0.7.35", ] [[package]] @@ -78,9 +78,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-primitives" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f783611babedbbe90db3478c120fb5f5daacceffc210b39adc0af4fe0da70bad" +checksum = "ccb3ead547f4532bc8af961649942f0b9c16ee9226e26caa3f38420651cc0bf4" dependencies = [ "alloy-rlp", "bytes", @@ -100,9 +100,9 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b155716bab55763c95ba212806cf43d05bcc70e5f35b02bad20cf5ec7fe11fed" +checksum = "a43b18702501396fa9bcdeecd533bc85fac75150d308fc0f6800a01e6234a003" dependencies = [ "arrayvec", "bytes", @@ -110,56 +110,56 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bad41a7c19498e3f6079f7744656328699f8ea3e783bdd10d85788cd439f572" +checksum = "2b40397ddcdcc266f59f959770f601ce1280e699a91fc1862f29cef91707cd09" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9899da7d011b4fe4c406a524ed3e3f963797dbc93b45479d60341d3a27b252" +checksum = "867a5469d61480fea08c7333ffeca52d5b621f5ca2e44f271b117ec1fc9a0525" dependencies = [ "alloy-sol-macro-input", "const-hex", "heck", - "indexmap 2.2.6", + "indexmap 2.3.0", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32d595768fdc61331a132b6f65db41afae41b9b97d36c21eb1b955c422a7e60" +checksum = "2e482dc33a32b6fadbc0f599adea520bd3aaa585c141a80b404d0a3e3fa72528" dependencies = [ "const-hex", "dunce", "heck", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", "syn-solidity", ] [[package]] name = "alloy-sol-types" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a49042c6d3b66a9fe6b2b5a8bf0d39fc2ae1ee0310a2a26ffedd79fb097878dd" +checksum = "a91ca40fa20793ae9c3841b83e74569d1cc9af29a2f5237314fd3452d51e38c7" dependencies = [ "alloy-primitives", "alloy-sol-macro", @@ -199,9 +199,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", @@ -214,33 +214,33 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -381,9 +381,9 @@ dependencies = [ [[package]] name = "arrayref" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" +checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" [[package]] name = "arrayvec" @@ -399,7 +399,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -427,7 +427,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -447,9 +447,9 @@ dependencies = [ "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.3.1", + "hyper 1.4.1", "hyper-util", "itoa", "matchit", @@ -480,7 +480,7 @@ dependencies = [ "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -558,7 +558,7 @@ version = "0.69.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cexpr", "clang-sys", "itertools 0.12.1", @@ -571,7 +571,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.67", + "syn 2.0.72", "which", ] @@ -598,9 +598,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "bitvec" @@ -636,16 +636,16 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52" +checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", - "rayon", + "rayon-core", ] [[package]] @@ -694,9 +694,9 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.16.1" +version = "1.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b236fc92302c97ed75b38da1f4917b5cdda4984745740f153a5d3059e48d725e" +checksum = "102087e286b4677862ea56cf8fc58bb2cdfa8725c40ffb80fe3a008eb7f2fc83" [[package]] name = "byteorder" @@ -706,9 +706,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +checksum = "fca2be1d5c43812bae364ee3f30b3afcb7877cf59f4aeb94c66f313a41d2fac9" dependencies = [ "serde", ] @@ -753,9 +753,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "324c74f2155653c90b04f25b2a47a8a631360cb908f92a772695f430c7e31052" +checksum = "26a5c3fd7bfa1ce3897a3a3501d362b2d87b7f2583ebcb4a949ec25911025cbc" dependencies = [ "jobserver", "libc", @@ -776,6 +776,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + [[package]] name = "chrono" version = "0.4.38" @@ -786,7 +792,7 @@ dependencies = [ "iana-time-zone", "num-traits", "serde", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -839,9 +845,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.9" +version = "4.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462" +checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc" dependencies = [ "clap_builder", "clap_derive", @@ -849,9 +855,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.9" +version = "4.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942" +checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99" dependencies = [ "anstream", "anstyle", @@ -861,21 +867,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.8" +version = "4.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" +checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" [[package]] name = "cobs" @@ -937,9 +943,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "console" @@ -1129,6 +1135,16 @@ dependencies = [ "cipher", ] +[[package]] +name = "ctrlc" +version = "3.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" +dependencies = [ + "nix", + "windows-sys 0.52.0", +] + [[package]] name = "curve25519-dalek" version = "4.1.3" @@ -1152,14 +1168,14 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] name = "darling" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -1167,27 +1183,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] name = "darling_macro" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -1231,7 +1247,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.0", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -1317,9 +1333,9 @@ dependencies = [ [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "elf" @@ -1534,7 +1550,7 @@ dependencies = [ "regex", "serde", "serde_json", - "syn 2.0.67", + "syn 2.0.72", "toml", "walkdir", ] @@ -1552,7 +1568,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -1570,7 +1586,7 @@ dependencies = [ "ethabi", "generic-array 0.14.7", "k256", - "num_enum 0.7.2", + "num_enum 0.7.3", "once_cell", "open-fastrlp", "rand", @@ -1578,7 +1594,7 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.67", + "syn 2.0.72", "tempfile", "thiserror", "tiny-keccak", @@ -1751,6 +1767,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "fnv" version = "1.0.7" @@ -1853,7 +1875,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -1951,11 +1973,11 @@ checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "git2" -version = "0.18.3" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" +checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", "libgit2-sys", "log", @@ -2015,7 +2037,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.2.6", + "indexmap 2.3.0", "slab", "tokio", "tokio-util", @@ -2034,7 +2056,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap 2.2.6", + "indexmap 2.3.0", "slab", "tokio", "tokio-util", @@ -2177,9 +2199,9 @@ dependencies = [ [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http 1.1.0", @@ -2194,7 +2216,7 @@ dependencies = [ "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -2212,9 +2234,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.29" +version = "0.14.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" dependencies = [ "bytes", "futures-channel", @@ -2236,16 +2258,16 @@ dependencies = [ [[package]] name = "hyper" -version = "1.3.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ "bytes", "futures-channel", "futures-util", "h2 0.4.5", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -2263,7 +2285,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.29", + "hyper 0.14.30", "rustls 0.21.12", "tokio", "tokio-rustls 0.24.1", @@ -2277,9 +2299,9 @@ checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" dependencies = [ "futures-util", "http 1.1.0", - "hyper 1.3.1", + "hyper 1.4.1", "hyper-util", - "rustls 0.23.10", + "rustls 0.23.12", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", @@ -2295,7 +2317,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.3.1", + "hyper 1.4.1", "hyper-util", "native-tls", "tokio", @@ -2305,16 +2327,16 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" +checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", - "http-body 1.0.0", - "hyper 1.3.1", + "http-body 1.0.1", + "hyper 1.4.1", "pin-project-lite", "socket2", "tokio", @@ -2419,9 +2441,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" dependencies = [ "equivalent", "hashbrown 0.14.5", @@ -2478,9 +2500,9 @@ dependencies = [ [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" @@ -2517,9 +2539,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] @@ -2617,9 +2639,9 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libgit2-sys" -version = "0.16.2+1.7.2" +version = "0.17.0+1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" +checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" dependencies = [ "cc", "libc", @@ -2629,12 +2651,12 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2649,7 +2671,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", ] @@ -2737,15 +2759,22 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" dependencies = [ + "hermit-abi", "libc", "wasi", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + [[package]] name = "native-tls" version = "0.2.12" @@ -2763,6 +2792,18 @@ dependencies = [ "tempfile", ] +[[package]] +name = "nix" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "cfg_aliases", + "libc", +] + [[package]] name = "nohash-hasher" version = "0.2.0" @@ -2912,11 +2953,11 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" +checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" dependencies = [ - "num_enum_derive 0.7.2", + "num_enum_derive 0.7.3", ] [[package]] @@ -2933,14 +2974,14 @@ dependencies = [ [[package]] name = "num_enum_derive" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" +checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -2960,9 +3001,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.36.0" +version = "0.36.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576dfe1fc8f9df304abb159d767a29d0476f7750fbf8aa7ad07816004a207434" +checksum = "3f203fa8daa7bb185f760ae12bd8e097f63d17041dcdcaf675ac54cdf863170e" dependencies = [ "memchr", ] @@ -2981,9 +3022,9 @@ checksum = "e296cf87e61c9cfc1a61c3c63a0f7f286ed4554e0e22be84e8a38e1d264a2a29" [[package]] name = "oorandom" -version = "11.1.3" +version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "open-fastrlp" @@ -3012,11 +3053,11 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.64" +version = "0.10.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if", "foreign-types", "libc", @@ -3033,7 +3074,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -3044,9 +3085,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.102" +version = "0.9.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" dependencies = [ "cc", "libc", @@ -3392,7 +3433,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -3467,15 +3508,25 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.10" +version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "560131c633294438da9f7c4b08189194b20946c8274c6b9e38881a7874dc8ee8" +checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" dependencies = [ "memchr", "thiserror", "ucd-trie", ] +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.3.0", +] + [[package]] name = "pharos" version = "0.5.3" @@ -3503,7 +3554,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -3564,9 +3615,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" [[package]] name = "postcard" @@ -3587,9 +3638,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "dee4364d9f3b902ef14fab8a1ddffb783a1cb6b4bba3bfc1fa3922732c7de97f" +dependencies = [ + "zerocopy 0.6.6", +] [[package]] name = "prettyplease" @@ -3598,7 +3652,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" dependencies = [ "proc-macro2", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -3669,13 +3723,13 @@ dependencies = [ [[package]] name = "proptest" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.5.0", + "bitflags 2.6.0", "lazy_static", "num-traits", "rand", @@ -3694,7 +3748,38 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", - "prost-derive", + "prost-derive 0.12.6", +] + +[[package]] +name = "prost" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc" +dependencies = [ + "bytes", + "prost-derive 0.13.1", +] + +[[package]] +name = "prost-build" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bb182580f71dd070f88d01ce3de9f4da5021db7115d2e1c3605a754153b77c1" +dependencies = [ + "bytes", + "heck", + "itertools 0.13.0", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost 0.13.1", + "prost-types", + "regex", + "syn 2.0.72", + "tempfile", ] [[package]] @@ -3707,7 +3792,29 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", +] + +[[package]] +name = "prost-derive" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" +dependencies = [ + "anyhow", + "itertools 0.13.0", + "proc-macro2", + "quote", + "syn 2.0.72", +] + +[[package]] +name = "prost-types" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cee5168b05f49d4b0ca581206eb14a7b22fafd963efe729ac48eb03266e25cc2" +dependencies = [ + "prost 0.13.1", ] [[package]] @@ -3736,7 +3843,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls 0.23.10", + "rustls 0.23.12", "thiserror", "tokio", "tracing", @@ -3752,7 +3859,7 @@ dependencies = [ "rand", "ring 0.17.8", "rustc-hash", - "rustls 0.23.10", + "rustls 0.23.12", "slab", "thiserror", "tinyvec", @@ -3761,14 +3868,13 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +checksum = "8bffec3605b73c6f1754535084a85229fa8a30f86014e6c81aeec4abb68b0285" dependencies = [ "libc", "once_cell", "socket2", - "tracing", "windows-sys 0.52.0", ] @@ -3857,11 +3963,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] @@ -3933,7 +4039,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.29", + "hyper 0.14.30", "hyper-rustls 0.24.2", "ipnet", "js-sys", @@ -3973,9 +4079,9 @@ dependencies = [ "futures-util", "h2 0.4.5", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.3.1", + "hyper 1.4.1", "hyper-rustls 0.27.2", "hyper-tls", "hyper-util", @@ -3988,7 +4094,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.10", + "rustls 0.23.12", "rustls-pemfile 2.1.2", "rustls-pki-types", "serde", @@ -4179,7 +4285,7 @@ version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", @@ -4200,14 +4306,14 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.10" +version = "0.23.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" +checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" dependencies = [ "once_cell", "ring 0.17.8", "rustls-pki-types", - "rustls-webpki 0.102.4", + "rustls-webpki 0.102.6", "subtle", "zeroize", ] @@ -4249,9 +4355,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.4" +version = "0.102.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff448f7e92e913c4b7d4c6d8e4540a1724b319b4152b8aef6d4cf8339712b33e" +checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" dependencies = [ "ring 0.17.8", "rustls-pki-types", @@ -4326,9 +4432,9 @@ dependencies = [ [[package]] name = "scc" -version = "2.1.1" +version = "2.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76ad2bbb0ae5100a07b7a6f2ed7ab5fd0045551a4c507989b7a620046ea3efdc" +checksum = "05ccfb12511cdb770157ace92d7dda771e498445b78f9886e8cdbc5140a4eced" dependencies = [ "sdd", ] @@ -4372,9 +4478,9 @@ dependencies = [ [[package]] name = "sdd" -version = "0.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84345e4c9bd703274a082fb80caaa99b7612be48dfaa1dd9266577ec412309d" +checksum = "177258b64c0faaa9ffd3c65cd3262c2bc7e2588dbbd9c1641d0346145c1bbda8" [[package]] name = "sec1" @@ -4392,11 +4498,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -4405,9 +4511,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" dependencies = [ "core-foundation-sys", "libc", @@ -4469,16 +4575,17 @@ checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] name = "serde_json" -version = "1.0.120" +version = "1.0.121" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" +checksum = "4ab380d7d9f22ef3f21ad3e6c1ebe8e4fc7a2000ccba2e4d71fc96f15b2cb609" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] @@ -4495,9 +4602,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -4524,7 +4631,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.6", + "indexmap 2.3.0", "serde", "serde_derive", "serde_json", @@ -4541,7 +4648,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -4566,7 +4673,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -4689,16 +4796,17 @@ dependencies = [ [[package]] name = "sp1-build" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "cargo_metadata", "clap", + "dirs", ] [[package]] name = "sp1-cli" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anstyle", "anyhow", @@ -4726,7 +4834,7 @@ dependencies = [ [[package]] name = "sp1-core" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "arrayref", @@ -4775,6 +4883,7 @@ dependencies = [ "sp1-derive", "sp1-primitives", "sp1-zkvm", + "static_assertions", "strum", "strum_macros", "tempfile", @@ -4789,7 +4898,7 @@ dependencies = [ [[package]] name = "sp1-derive" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "proc-macro2", "quote", @@ -4798,7 +4907,7 @@ dependencies = [ [[package]] name = "sp1-eval" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "clap", "csv", @@ -4809,7 +4918,7 @@ dependencies = [ [[package]] name = "sp1-helper" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "cargo_metadata", "chrono", @@ -4818,7 +4927,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -4831,7 +4940,7 @@ dependencies = [ [[package]] name = "sp1-primitives" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "itertools 0.13.0", "lazy_static", @@ -4843,7 +4952,7 @@ dependencies = [ [[package]] name = "sp1-prover" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -4858,6 +4967,7 @@ dependencies = [ "p3-challenger", "p3-commit", "p3-field", + "p3-matrix", "rayon", "serde", "serde_json", @@ -4878,7 +4988,7 @@ dependencies = [ [[package]] name = "sp1-recursion-circuit" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "ff 0.13.0", @@ -4909,7 +5019,7 @@ dependencies = [ [[package]] name = "sp1-recursion-circuit-v2" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "ff 0.13.0", @@ -4947,7 +5057,7 @@ dependencies = [ [[package]] name = "sp1-recursion-compiler" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "backtrace", "itertools 0.13.0", @@ -4976,7 +5086,7 @@ dependencies = [ [[package]] name = "sp1-recursion-core" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "arrayref", "backtrace", @@ -5011,7 +5121,7 @@ dependencies = [ [[package]] name = "sp1-recursion-core-v2" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "arrayref", "backtrace", @@ -5048,7 +5158,7 @@ dependencies = [ [[package]] name = "sp1-recursion-derive" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "proc-macro2", "quote", @@ -5057,7 +5167,7 @@ dependencies = [ [[package]] name = "sp1-recursion-gnark-cli" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "clap", @@ -5066,7 +5176,7 @@ dependencies = [ [[package]] name = "sp1-recursion-gnark-ffi" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -5090,7 +5200,7 @@ dependencies = [ [[package]] name = "sp1-recursion-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "itertools 0.13.0", "p3-air", @@ -5118,7 +5228,7 @@ dependencies = [ [[package]] name = "sp1-sdk" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "alloy-sol-types", "anyhow", @@ -5139,7 +5249,7 @@ dependencies = [ "p3-field", "p3-fri", "p3-matrix", - "prost", + "prost 0.12.6", "reqwest 0.12.5", "reqwest-middleware", "serde", @@ -5158,9 +5268,29 @@ dependencies = [ "vergen", ] +[[package]] +name = "sp1-server" +version = "1.0.1" +dependencies = [ + "bincode", + "ctrlc", + "prost 0.13.1", + "prost-build", + "prost-types", + "serde", + "serde_json", + "sp1-core", + "sp1-prover", + "tokio", + "tracing", + "tracing-subscriber", + "twirp", + "twirp-build", +] + [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", @@ -5243,14 +5373,14 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] name = "subtle" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d0208408ba0c3df17ed26eb06992cb1a1268d41b2c0e12e65203fbe3972cee5" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "subtle-encoding" @@ -5274,9 +5404,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.67" +version = "2.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff8655ed1d86f3af4ee3fd3263786bc14245ad17c4c7e85ba7187fb3ae028c90" +checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" dependencies = [ "proc-macro2", "quote", @@ -5285,14 +5415,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d71e19bca02c807c9faa67b5a47673ff231b6e7449b251695188522f1dc44b2" +checksum = "c837dc8852cb7074e46b444afb81783140dab12c58867b49fb3898fbafedf7ea" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -5351,9 +5481,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.15" +version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4873307b7c257eddcb50c9bedf158eb669578359fb28428bef438fec8e6ba7c2" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" @@ -5369,22 +5499,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -5451,9 +5581,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -5466,32 +5596,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.38.0" +version = "1.39.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -5520,7 +5649,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.10", + "rustls 0.23.12", "rustls-pki-types", "tokio", ] @@ -5540,21 +5669,21 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.14", + "toml_edit 0.22.20", ] [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] @@ -5565,7 +5694,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.3.0", "toml_datetime", "winnow 0.5.40", ] @@ -5576,22 +5705,22 @@ version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.3.0", "toml_datetime", "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.22.14" +version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.3.0", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.13", + "winnow 0.6.18", ] [[package]] @@ -5642,7 +5771,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -5713,6 +5842,35 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "twirp" +version = "0.5.0" +source = "git+https://github.com/github/twirp-rs.git#d99f5362200479842caef7de2fffda747812102e" +dependencies = [ + "async-trait", + "axum", + "futures", + "http 1.1.0", + "http-body-util", + "hyper 1.4.1", + "prost 0.13.1", + "reqwest 0.12.5", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "url", +] + +[[package]] +name = "twirp-build" +version = "0.6.0" +source = "git+https://github.com/github/twirp-rs.git#d99f5362200479842caef7de2fffda747812102e" +dependencies = [ + "prost-build", +] + [[package]] name = "twirp-rs" version = "0.3.0" @@ -5725,8 +5883,8 @@ dependencies = [ "futures", "http 1.1.0", "http-body-util", - "hyper 1.3.1", - "prost", + "hyper 1.4.1", + "prost 0.12.6", "reqwest 0.12.5", "serde", "serde_json", @@ -5852,9 +6010,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "8.3.1" +version = "8.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e27d6bdd219887a9eadd19e1c34f32e47fa332301184935c6d9bca26f3cca525" +checksum = "2990d9ea5967266ea0ccf413a4aa5c42a93dbcfda9cb49a97de6931726b12566" dependencies = [ "anyhow", "cfg-if", @@ -5865,9 +6023,9 @@ dependencies = [ [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" @@ -5924,7 +6082,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", "wasm-bindgen-shared", ] @@ -5958,7 +6116,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6067,7 +6225,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ "windows-core", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -6076,7 +6234,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -6094,7 +6252,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -6114,18 +6272,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -6136,9 +6294,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -6148,9 +6306,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -6160,15 +6318,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -6178,9 +6336,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -6190,9 +6348,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -6202,9 +6360,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -6214,9 +6372,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" @@ -6229,9 +6387,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.13" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] @@ -6292,22 +6450,43 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "zerocopy" -version = "0.7.34" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +checksum = "854e949ac82d619ee9a14c66a1b674ac730422372ccb759ce0c39cabcf2bf8e6" dependencies = [ - "zerocopy-derive", + "byteorder", + "zerocopy-derive 0.6.6", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "zerocopy-derive 0.7.35", +] + +[[package]] +name = "zerocopy-derive" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "125139de3f6b9d625c39e2efdd73d41bdac468ccd556556440e322be0e1bbd91" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", ] [[package]] name = "zerocopy-derive" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] @@ -6327,7 +6506,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.67", + "syn 2.0.72", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 0075bbd96d..d4a32e4971 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,7 @@ members = [ "recursion/gnark-cli", "recursion/gnark-ffi", "recursion/program", + "server", "sdk", "zkvm/*", ] @@ -24,7 +25,7 @@ exclude = ["examples/target"] resolver = "2" [workspace.package] -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" license = "MIT OR Apache-2.0" repository = "https://github.com/succinctlabs/sp1" @@ -43,25 +44,25 @@ debug = true debug-assertions = true [workspace.dependencies] -sp1-build = { path = "build", version = "1.0.0-rc.1" } -sp1-derive = { path = "derive", version = "1.0.0-rc.1" } -sp1-core = { path = "core", version = "1.0.0-rc.1" } -sp1-cli = { path = "cli", version = "1.0.0-rc.1", default-features = false } -sp1-eval = { path = "eval", version = "1.0.0-rc.1", default-features = false } -sp1-helper = { path = "helper", version = "1.0.0-rc.1", default-features = false } -sp1-primitives = { path = "primitives", version = "1.0.0-rc.1" } -sp1-prover = { path = "prover", version = "1.0.0-rc.1" } -sp1-recursion-compiler = { path = "recursion/compiler", version = "1.0.0-rc.1" } -sp1-recursion-core = { path = "recursion/core", version = "1.0.0-rc.1", default-features = false } -sp1-recursion-core-v2 = { path = "recursion/core-v2", version = "1.0.0-rc.1", default-features = false } -sp1-recursion-derive = { path = "recursion/derive", version = "1.0.0-rc.1", default-features = false } -sp1-recursion-gnark-ffi = { path = "recursion/gnark-ffi", version = "1.0.0-rc.1", default-features = false } -sp1-recursion-program = { path = "recursion/program", version = "1.0.0-rc.1", default-features = false } -sp1-recursion-circuit = { path = "recursion/circuit", version = "1.0.0-rc.1", default-features = false } -sp1-recursion-circuit-v2 = { path = "recursion/circuit-v2", version = "1.0.0-rc.1", default-features = false } -sp1-sdk = { path = "sdk", version = "1.0.0-rc.1" } -sp1-lib = { path = "zkvm/lib", version = "1.0.0-rc.1", default-features = false } -sp1-zkvm = { path = "zkvm/entrypoint", version = "1.0.0-rc.1", default-features = false } +sp1-build = { path = "build", version = "1.0.1" } +sp1-derive = { path = "derive", version = "1.0.1" } +sp1-core = { path = "core", version = "1.0.1" } +sp1-cli = { path = "cli", version = "1.0.1", default-features = false } +sp1-eval = { path = "eval", version = "1.0.1", default-features = false } +sp1-helper = { path = "helper", version = "1.0.1", default-features = false } +sp1-primitives = { path = "primitives", version = "1.0.1" } +sp1-prover = { path = "prover", version = "1.0.1" } +sp1-recursion-compiler = { path = "recursion/compiler", version = "1.0.1" } +sp1-recursion-core = { path = "recursion/core", version = "1.0.1", default-features = false } +sp1-recursion-core-v2 = { path = "recursion/core-v2", version = "1.0.1", default-features = false } +sp1-recursion-derive = { path = "recursion/derive", version = "1.0.1", default-features = false } +sp1-recursion-gnark-ffi = { path = "recursion/gnark-ffi", version = "1.0.1", default-features = false } +sp1-recursion-program = { path = "recursion/program", version = "1.0.1", default-features = false } +sp1-recursion-circuit = { path = "recursion/circuit", version = "1.0.1", default-features = false } +sp1-recursion-circuit-v2 = { path = "recursion/circuit-v2", version = "1.0.1", default-features = false } +sp1-sdk = { path = "sdk", version = "1.0.1" } +sp1-lib = { path = "zkvm/lib", version = "1.0.1", default-features = false } +sp1-zkvm = { path = "zkvm/entrypoint", version = "1.0.1", default-features = false } p3-air = "0.1.3-succinct" p3-field = "0.1.3-succinct" p3-commit = "0.1.3-succinct" diff --git a/README.md b/README.md index b78ede376c..163fd5a4d3 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,6 @@ # SP1 -[![Telegram Chat][tg-badge]][tg-url] - -![](./assets/sp1.png) +![SP1](./assets/sp1.png) SP1 is a performant, 100% open-source, contributor-friendly zero-knowledge virtual machine (zkVM) that can prove the execution of arbitrary Rust (or any LLVM-compiled language) programs. SP1 democratizes access to ZKPs by allowing developers to use programmable truth with popular programming languages. @@ -11,14 +9,10 @@ SP1 is inspired by the open-source software movement and takes a collaborative a **[Install](https://succinctlabs.github.io/sp1/getting-started/install.html)** | [Docs](https://succinctlabs.github.io/sp1) | [Examples](https://github.com/succinctlabs/sp1/tree/main/examples) - -[tg-badge]: https://img.shields.io/endpoint?color=neon&logo=telegram&label=chat&url=https://tg.sumanjay.workers.dev/succinct_sp1 -[tg-url]: https://t.me/succinct_sp1 +| [Telegram Chat](https://t.me/+AzG4ws-kD24yMGYx) ## For Developers: Build with SP1 -**Note that SP1 is still in alpha and is not yet ready for production use.** - Today, developers can write programs, including complex, large programs like a ZK Tendermint light client, in Rust (with std support), generate proofs and verify them. Most Rust crates should be supported and can be used seamlessly by your program. Example programs can be found in the [examples](https://github.com/succinctlabs/sp1/tree/main/examples) folder. To get started, make sure you have [Rust](https://www.rust-lang.org/tools/install) installed. Then follow the [installation](https://succinctlabs.github.io/sp1/getting-started/install.html) guide in the SP1 book and read the [getting started](https://succinctlabs.github.io/sp1/getting-started/quickstart.html) section. diff --git a/book/SUMMARY.md b/book/SUMMARY.md index fd8cf06fa7..e1b6d0a19e 100644 --- a/book/SUMMARY.md +++ b/book/SUMMARY.md @@ -64,6 +64,10 @@ # Developers +- [Usage in CI](./developers/usage-in-ci.md) + - [Building PLONK Artifacts](./developers/building-plonk-artifacts.md) - [Common Issues](./developers/common-issues.md) + +- [RV32IM Specification](./developers/rv32im-specification.md) diff --git a/book/developers/common-issues.md b/book/developers/common-issues.md index b5c5f7be79..ad65d124ac 100644 --- a/book/developers/common-issues.md +++ b/book/developers/common-issues.md @@ -4,7 +4,7 @@ If you are using a library that depends on `alloy_sol_types`, and encounter an error like this: -``` +```txt perhaps two different versions of crate `alloy_sol_types` are being used? ``` @@ -12,19 +12,17 @@ This is likely due to two different versions of `alloy_sol_types` being used. To ```toml [dependencies] -sp1-sdk = { version = "0.1.0", default-features = false } +sp1-sdk = { version = "1.0.1", default-features = false } ``` -This will configure out the `network` feature which will remove the dependency on `alloy_sol_types` -and configure out the `NetworkProver`. - +This will configure out the `network` feature which will remove the dependency on `alloy_sol_types` and configure out the `NetworkProver`. ## Rust Version Errors If you are using `alloy` or another library that has an MSRV (minimum support rust version) of 1.76.0 or higher, you may encounter an error like this when building your program. -``` +```txt package `alloy v0.1.1 cannot be built because it requires rustc 1.76 or newer, while the currently active rustc version is 1.75.0-nightly` ``` @@ -35,10 +33,14 @@ go away. To fix this, you can: - If using `cargo prove build` directly, pass the `--ignore-rust-version` flag: - ``` + + ```bash cargo prove build --ignore-rust-version ``` -- If using `build_program`, set `ignore_rust_version` to true inside the `BuildArgs` struct and use `build_program_with_args`: + +- If using `build_program`, set `ignore_rust_version` to true inside the `BuildArgs` struct and use + `build_program_with_args`: + ```rust let args = BuildArgs { ignore_rust_version: true, @@ -48,13 +50,42 @@ To fix this, you can: ``` ## Stack Overflow Errors + If you encounter the following in a script using `sp1-sdk`: -``` +```txt thread 'main' has overflowed its stack fatal runtime error: stack overflow ``` -``` + +```txt Segmentation fault (core dumped) ``` -Re-run your script with `--release`. \ No newline at end of file + +Re-run your script with `--release`. + +## C Binding Errors + +If you are building a program that uses C bindings or has dependencies that use C bindings, you may encounter the following errors: + +```txt +cc did not execute successfully +``` + +```txt +Failed to find tool. Is `riscv32-unknown-elf-gcc` installed? +``` + +To resolve this, re-install sp1 with the `--c-toolchain` flag: + +```bash +sp1up --c-toolchain +``` + +This will install the C++ toolchain for RISC-V and set the `CC_riscv32im_succinct_zkvm_elf` environment +variable to the path of the installed `riscv32-unknown-elf-gcc` binary. You can also use your own +C++ toolchain be setting this variable manually: + +```bash +export CC_riscv32im_succinct_zkvm_elf=/path/to/toolchain +``` diff --git a/book/developers/rv32im-specification.md b/book/developers/rv32im-specification.md new file mode 100644 index 0000000000..eff0d8f665 --- /dev/null +++ b/book/developers/rv32im-specification.md @@ -0,0 +1,8 @@ +# RV32IM Specification + +SP1 implements the RISC-V RV32IM instruction set with some implementation details that make it more suitable for proving. + +- LW/SW memory access must be word aligned. +- LH/LHU/SH memory access must be half-word aligned. +- Memory access is only valid for addresses [0x20, 0x78000000]. Accessing addresses outside of this range will result in undefined behavior. The global heap allocator in `sp1_zkvm` will panic if memory exceeds this range. +- The ECALL instruction is used for system calls and precompiles. Only valid syscall IDs should be called, and only using the specific convention of loading the ID into register T0 and arguments into registers A0 and A1. If the arguments are addresses, they must be word-aligned. Failure to follow this convention can result in UB. Correct usages can be found in the `sp1_zkvm` crate. diff --git a/book/developers/usage-in-ci.md b/book/developers/usage-in-ci.md new file mode 100644 index 0000000000..5d7b6a996a --- /dev/null +++ b/book/developers/usage-in-ci.md @@ -0,0 +1,27 @@ +# Usage in CI + +You may want to use SP1 in your [Github Actions](https://docs.github.com/en/actions) CI workflow. + +You first need have Rust installed, and you can use +[actions-rs/toolchain](https://github.com/actions-rs/toolchain) for this: + +```yaml +- name: Install Rust Toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: 1.79.0 + profile: default + override: true + default: true + components: llvm-tools, rustc-dev +``` + +And then you can install the SP1 toolchain: + +```yaml +- name: Install SP1 toolchain + run: | + curl -L https://sp1.succinct.xyz | bash + ~/.sp1/bin/sp1up + ~/.sp1/bin/cargo-prove prove --version +``` diff --git a/book/generating-proofs/advanced.md b/book/generating-proofs/advanced.md index b550215fe6..cff65d9d85 100644 --- a/book/generating-proofs/advanced.md +++ b/book/generating-proofs/advanced.md @@ -48,7 +48,7 @@ RUSTFLAGS='-C target-cpu=native' cargo run --release Currently there is support for AVX512 and NEON SIMD instructions. For NEON, you must also enable the `sp1-sdk` feature `neon` in your script crate's `Cargo.toml` file. ```toml -sp1-sdk = { git = "https://github.com/succinctlabs/sp1", features = ["neon"] } +sp1-sdk = { version = "1.0.1", features = ["neon"] } ``` ## Performance diff --git a/book/generating-proofs/setup.md b/book/generating-proofs/setup.md index 4b2252d26e..42726a99fa 100644 --- a/book/generating-proofs/setup.md +++ b/book/generating-proofs/setup.md @@ -11,7 +11,6 @@ cargo prove new cd script ``` - ## Manual You can also manually setup a project. First create a new cargo project: @@ -33,7 +32,7 @@ name = "script" edition = "2021" [dependencies] -sp1-sdk = { git = "https://github.com/succinctlabs/sp1.git" } +sp1-sdk = "1.0.1" ``` The `sp1-sdk` crate includes the necessary utilities to generate, save, and verify proofs. diff --git a/book/introduction.md b/book/introduction.md index e70e264b94..a0b2de2ae0 100644 --- a/book/introduction.md +++ b/book/introduction.md @@ -9,7 +9,7 @@ SP1 is a performant, 100% open-source, contributor-friendly zero-knowledge virtual machine (zkVM) that verifies the execution of arbitrary Rust (or any LLVM-compiled language) programs. [tg-badge]: https://img.shields.io/endpoint?color=neon&logo=telegram&label=chat&url=https%3A%2F%2Ftg.sumanjay.workers.dev%2Fsuccinct%5Fsp1 -[tg-url]: https://t.me/succinct_sp1 +[tg-url]: https://t.me/+AzG4ws-kD24yMGYx ## The future of truth is programmable diff --git a/book/onchain-verification/contract-addresses.md b/book/onchain-verification/contract-addresses.md index 70fd26b07a..77fca8a330 100644 --- a/book/onchain-verification/contract-addresses.md +++ b/book/onchain-verification/contract-addresses.md @@ -2,9 +2,9 @@ When using SP1, we recommend using our deployed verifiers. Each contract is a [SP1VerifierGateway](https://github.com/succinctlabs/sp1-contracts/blob/main/contracts/src/ISP1VerifierGateway.sol) which can automatically routes your SP1 proof to the correct verifier based on the prover version. - | Chain ID | Chain | Gateway | -|----------|------------------|---------------------------------------------------------------------------------------------------------------------------------| +| -------- | ---------------- | ------------------------------------------------------------------------------------------------------------------------------- | +| 1 | Mainnet | [0x3B6041173B80E77f038f3F2C0f9744f04837185e](https://etherscan.io/address/0x3B6041173B80E77f038f3F2C0f9744f04837185e) | | 11155111 | Sepolia | [0x3B6041173B80E77f038f3F2C0f9744f04837185e](https://sepolia.etherscan.io/address/0x3B6041173B80E77f038f3F2C0f9744f04837185e) | | 17000 | Holesky | [0x3B6041173B80E77f038f3F2C0f9744f04837185e](https://holesky.etherscan.io/address/0x3B6041173B80E77f038f3F2C0f9744f04837185e) | | 42161 | Arbitrum One | [0x3B6041173B80E77f038f3F2C0f9744f04837185e](https://arbiscan.io/address/0x3B6041173B80E77f038f3F2C0f9744f04837185e) | @@ -14,7 +14,7 @@ When using SP1, we recommend using our deployed verifiers. Each contract is a [S | 8453 | Base | [0x3B6041173B80E77f038f3F2C0f9744f04837185e](https://basescan.org/address/0x3B6041173B80E77f038f3F2C0f9744f04837185e) | | 84532 | Base Sepolia | [0x3B6041173B80E77f038f3F2C0f9744f04837185e](https://sepolia.basescan.org/address/0x3B6041173B80E77f038f3F2C0f9744f04837185e) | -**Currently officially supported versions of SP1 are v1.0.7 and v1.0.8.** If you'd like official support for a verifier on a different chain, please ask in the [SP1 Telegram](https://t.me/succinct_sp1). +**Currently officially supported version of SP1 is v1.0.1.** If you'd like official support for a verifier on a different chain, please ask in the [SP1 Telegram](https://t.me/+AzG4ws-kD24yMGYx). ## ISP1Verifier Interface @@ -40,4 +40,4 @@ interface ISP1Verifier { bytes calldata proofBytes ) external view; } -``` \ No newline at end of file +``` diff --git a/book/onchain-verification/getting-started.md b/book/onchain-verification/getting-started.md index f9eb36b1a1..39fad85ba5 100644 --- a/book/onchain-verification/getting-started.md +++ b/book/onchain-verification/getting-started.md @@ -27,7 +27,5 @@ You can run the above script with `RUST_LOG=info cargo run --bin plonk_bn254 --r If you would like to run the PLONK prover directly without Docker, you must have Go 1.22 installed and enable the `native-plonk` feature in `sp1-sdk`. This path is not recommended and may require additional native dependencies. ```toml -sp1-sdk = { features = ["native-plonk"] } +sp1-sdk = { version = "1.0.1", features = ["native-plonk"] } ``` - - diff --git a/book/onchain-verification/solidity-sdk.md b/book/onchain-verification/solidity-sdk.md index 48f033e73a..164ec7a4cc 100644 --- a/book/onchain-verification/solidity-sdk.md +++ b/book/onchain-verification/solidity-sdk.md @@ -41,30 +41,57 @@ contract Fibonacci { address public verifier; /// @notice The verification key for the fibonacci program. - bytes32 public fibonacciProgramVkey; + bytes32 public fibonacciProgramVKey; - constructor(address _verifier, bytes32 _fibonacciProgramVkey) { + constructor(address _verifier, bytes32 _fibonacciProgramVKey) { verifier = _verifier; - fibonacciProgramVkey = _fibonacciProgramVkey; + fibonacciProgramVKey = _fibonacciProgramVKey; } /// @notice The entrypoint for verifying the proof of a fibonacci number. - /// @param proof The encoded proof. - /// @param publicValues The encoded public values. - function verifyFibonacciProof(bytes calldata proof, bytes calldata publicValues) + /// @param _proofBytes The encoded proof. + /// @param _publicValues The encoded public values. + function verifyFibonacciProof(bytes calldata _publicValues, bytes calldata _proofBytes) public view returns (uint32, uint32, uint32) { - ISP1Verifier(verifier).verifyProof(fibonacciProgramVkey, publicValues, proof); - (uint32 n, uint32 a, uint32 b) = abi.decode(publicValues, (uint32, uint32, uint32)); + ISP1Verifier(verifier).verifyProof(fibonacciProgramVKey, _publicValues, _proofBytes); + (uint32 n, uint32 a, uint32 b) = abi.decode(_publicValues, (uint32, uint32, uint32)); return (n, a, b); } } + ``` For more details on the contracts, refer to the [sp1-contracts](https://github.com/succinctlabs/sp1-contracts) repo. ### Testing -To test the contract, we recommend setting up [Foundry Tests](https://book.getfoundry.sh/forge/tests). We have an example of such a test in the [SP1 Project Template](https://github.com/succinctlabs/sp1-project-template/blob/dev/contracts/test/Fibonacci.t.sol). \ No newline at end of file +To test the contract, we recommend setting up [Foundry +Tests](https://book.getfoundry.sh/forge/tests). We have an example of such a test in the [SP1 +Project +Template](https://github.com/succinctlabs/sp1-project-template/blob/dev/contracts/test/Fibonacci.t.sol). + +### Solidity Versions + +The officially deployed contracts are built using Solidity 0.8.20 and exist on the +[sp1-contracts main](https://github.com/succinctlabs/sp1-contracts/tree/main) branch. + +If you need to use different versions that are compatible with your contracts, there are also other +branches you can install that contain different versions. For +example for branch [main-0.8.15](https://github.com/succinctlabs/sp1-contracts/tree/main-0.8.15) +contains the contracts with: + +```c++ +pragma solidity ^0.8.15; +``` + +and you can install it with: + +```sh +forge install succinctlabs/sp1-contracts@main-0.8.15 +``` + +If there is different versions that you need but there aren't branches for them yet, please ask in +the [SP1 Telegram](https://t.me/+AzG4ws-kD24yMGYx). diff --git a/book/prover-network/setup.md b/book/prover-network/setup.md index 1417885c29..575b333f45 100644 --- a/book/prover-network/setup.md +++ b/book/prover-network/setup.md @@ -1,6 +1,6 @@ # Prover Network: Setup -> **Currently, the supported version of SP1 on the prover network is `v1.0.5-testnet`.** +> **Currently, the supported version of SP1 on the prover network is `v1.0.1`.** So far we've explored how to generate proofs locally, but this can actually be inconvenient on local machines due to high memory / CPU requirements, especially for very large programs. @@ -16,6 +16,7 @@ Network. Completing this form requires you to complete the [key setup](#key-setu The prover network uses Secp256k1 keypairs for authentication, similar to Ethereum wallets. You may generate a new keypair explicitly for use with the prover network, or use an existing keypair. **You do not need to hold any funds in this account, it is used solely for access control.** ### Generate a new keypair with `cast` + Prover network keypair credentials can be generated using the [cast](https://book.getfoundry.sh/cast/) CLI tool. diff --git a/book/prover-network/usage.md b/book/prover-network/usage.md index 4b695d25f2..e21304e1b7 100644 --- a/book/prover-network/usage.md +++ b/book/prover-network/usage.md @@ -1,6 +1,6 @@ # Prover Network: Usage -> **Currently, the supported version of SP1 on the prover network is `v1.0.5-testnet`.** +> **Currently, the supported version of SP1 on the prover network is `v1.0.1`.** ## Sending a proof request diff --git a/book/prover-network/versions.md b/book/prover-network/versions.md index a6dfc1b6f3..adfb4d8abb 100644 --- a/book/prover-network/versions.md +++ b/book/prover-network/versions.md @@ -4,7 +4,7 @@ The prover network currently only supports specific versions of SP1: | Environment | RPC URL | Supported Version | | ----------- | -------------------------- | ----------------- | -| Prod | `https://rpc.succinct.xyz` | v1.0.5-testnet | +| Prod | `https://rpc.succinct.xyz` | v1.0.1 | If you submit a proof request to the prover network and your are not using the supported version, you will receive an error message. @@ -14,14 +14,14 @@ You must switch to a supported version before submitting a proof. To do so, repl ```toml [dependencies] -sp1-zkvm = { git = "https://github.com/succinctlabs/sp1.git", tag = "v1.0.5-testnet" } +sp1-zkvm = "1.0.1" ``` replace the `sp1-sdk` version in your script's `Cargo.toml`: ```toml [dependencies] -sp1-sdk = { git = "https://github.com/succinctlabs/sp1.git", tag = "v1.0.5-testnet" } +sp1-sdk = "1.0.1" ``` Re-build your program and script, and then try again. diff --git a/book/writing-programs/build-script.md b/book/writing-programs/build-script.md index fa673ad806..734409a5d9 100644 --- a/book/writing-programs/build-script.md +++ b/book/writing-programs/build-script.md @@ -14,7 +14,7 @@ Make sure to also add `sp1-helper` as a build dependency in `script/Cargo.toml`: ```toml [build-dependencies] -sp1-helper = { git = "https://github.com/succinctlabs/sp1.git" } +sp1-helper = "1.0.1" ``` If you run `RUST_LOG=info cargo run --release -vv`, you will see the following output from the build script if the program has changed, indicating that the program was rebuilt: diff --git a/book/writing-programs/cycle-tracking.md b/book/writing-programs/cycle-tracking.md index 953feff808..975c591ccd 100644 --- a/book/writing-programs/cycle-tracking.md +++ b/book/writing-programs/cycle-tracking.md @@ -14,7 +14,7 @@ Note that to use the macro, you must add the `sp1-derive` crate to your dependen ```toml [dependencies] -sp1-derive = { git = "https://github.com/succinctlabs/sp1.git" } +sp1-derive = "1.0.1" ``` In the script for proof generation, setup the logger with `utils::setup_logger()` and run the script with `RUST_LOG=info cargo run --release`. You should see the following output: @@ -25,17 +25,17 @@ $ RUST_LOG=info cargo run --release Running `target/release/cycle-tracking-script` 2024-03-13T02:03:40.567500Z INFO execute: loading memory image 2024-03-13T02:03:40.567751Z INFO execute: starting execution -2024-03-13T02:03:40.567760Z INFO execute: clk = 0 pc = 0x2013b8 -2024-03-13T02:03:40.567822Z INFO execute: ┌╴setup -2024-03-13T02:03:40.568095Z INFO execute: └╴4,398 cycles -2024-03-13T02:03:40.568122Z INFO execute: ┌╴main-body -2024-03-13T02:03:40.568149Z INFO execute: │ ┌╴expensive_function -2024-03-13T02:03:40.568250Z INFO execute: │ └╴1,368 cycles +2024-03-13T02:03:40.567760Z INFO execute: clk = 0 pc = 0x2013b8 +2024-03-13T02:03:40.567822Z INFO execute: ┌╴setup +2024-03-13T02:03:40.568095Z INFO execute: └╴4,398 cycles +2024-03-13T02:03:40.568122Z INFO execute: ┌╴main-body +2024-03-13T02:03:40.568149Z INFO execute: │ ┌╴expensive_function +2024-03-13T02:03:40.568250Z INFO execute: │ └╴1,368 cycles stdout: result: 5561 -2024-03-13T02:03:40.568373Z INFO execute: │ ┌╴expensive_function -2024-03-13T02:03:40.568470Z INFO execute: │ └╴1,368 cycles +2024-03-13T02:03:40.568373Z INFO execute: │ ┌╴expensive_function +2024-03-13T02:03:40.568470Z INFO execute: │ └╴1,368 cycles stdout: result: 2940 -2024-03-13T02:03:40.568556Z INFO execute: └╴5,766 cycles +2024-03-13T02:03:40.568556Z INFO execute: └╴5,766 cycles 2024-03-13T02:03:40.568566Z INFO execute: finished execution clk = 11127 pc = 0x0 2024-03-13T02:03:40.569251Z INFO execute: close time.busy=1.78ms time.idle=21.1µs ``` diff --git a/book/writing-programs/patched-crates.md b/book/writing-programs/patched-crates.md index bceae2b10d..31333eb338 100644 --- a/book/writing-programs/patched-crates.md +++ b/book/writing-programs/patched-crates.md @@ -16,8 +16,6 @@ Under the hood, we use [precompiles](./precompiles.md) to achieve tremendous per | ed25519-consensus | [sp1-patches/ed25519-consensus](http://github.com/sp1-patches/ed25519-consensus) | ed25519 verify | | curve25519-dalek-ng | [sp1-patches/curve25519-dalek-ng](https://github.com/sp1-patches/curve25519-dalek-ng) | ed25519 verify | | curve25519-dalek | [sp1-patches/curve25519-dalek](https://github.com/sp1-patches/curve25519-dalek) | ed25519 verify | -| revm-precompile | [sp1-patches/revm](https://github.com/sp1-patches/revm) | ecrecover precompile | -| reth-primitives | [sp1-patches/reth](https://github.com/sp1-patches/reth) | ecrecover transactions | ## Using Patched Crates @@ -36,8 +34,6 @@ curve25519-dalek = { git = "https://github.com/sp1-patches/curve25519-dalek", br curve25519-dalek-ng = { git = "https://github.com/sp1-patches/curve25519-dalek-ng", branch = "patch-v4.1.1" } ed25519-consensus = { git = "https://github.com/sp1-patches/ed25519-consensus", branch = "patch-v2.1.0" } tiny-keccak = { git = "https://github.com/sp1-patches/tiny-keccak", branch = "patch-v2.0.2" } -revm = { git = "https://github.com/sp1-patches/revm", branch = "patch-v5.0.0" } -reth-primitives = { git = "https://github.com/sp1-patches/reth", default-features = false, branch = "sp1-reth" } ``` If you are patching a crate from Github instead of from crates.io, you need to specify the diff --git a/book/writing-programs/setup.md b/book/writing-programs/setup.md index ca0dac1aa2..5e89f84509 100644 --- a/book/writing-programs/setup.md +++ b/book/writing-programs/setup.md @@ -30,7 +30,7 @@ This will compile the ELF that can be executed in the zkVM and put the executabl For production builds of programs, you can build your program inside a Docker container which will generate a **reproducible ELF** on all platforms. To do so, just use the `--docker` flag and the `--tag` flag with the release version you want to use. For example: ```bash -cargo prove build --docker --tag v1.0.5-testnet +cargo prove build --docker --tag v1.0.1 ``` To verify that your build is reproducible, you can compute the SHA-512 hash of the ELF on different platforms and systems with: @@ -61,7 +61,7 @@ name = "program" edition = "2021" [dependencies] -sp1-zkvm = { git = "https://github.com/succinctlabs/sp1.git" } +sp1-zkvm = "1.0.1" ``` The `sp1-zkvm` crate includes necessary utilities for your program, including handling inputs and outputs, diff --git a/build/Cargo.toml b/build/Cargo.toml index 87a17b2b5d..5941dd3c02 100644 --- a/build/Cargo.toml +++ b/build/Cargo.toml @@ -12,3 +12,4 @@ categories = { workspace = true } cargo_metadata = "0.18.1" anyhow = { version = "1.0.83" } clap = { version = "4.5.9", features = ["derive", "env"] } +dirs = "5.0.1" diff --git a/build/src/lib.rs b/build/src/lib.rs index 735f7edbf1..fbb618f057 100644 --- a/build/src/lib.rs +++ b/build/src/lib.rs @@ -1,17 +1,17 @@ mod docker; +use anyhow::{Context, Result}; +use cargo_metadata::camino::Utf8PathBuf; use clap::Parser; +use dirs::home_dir; use std::{ - fs, + env, fs, io::{BufRead, BufReader}, path::PathBuf, process::{exit, Command, Stdio}, thread, }; -use anyhow::{Context, Result}; -use cargo_metadata::camino::Utf8PathBuf; - const BUILD_TARGET: &str = "riscv32im-succinct-zkvm-elf"; const DEFAULT_TAG: &str = "latest"; const DEFAULT_OUTPUT_DIR: &str = "elf"; @@ -132,6 +132,21 @@ fn create_local_command(args: &BuildArgs, program_dir: &Utf8PathBuf) -> Command let canonicalized_program_dir = program_dir .canonicalize() .expect("Failed to canonicalize program directory"); + + // If CC_riscv32im_succinct_zkvm_elf is not set, set it to the default C++ toolchain + // downloaded by 'sp1up --c-toolchain'. + if env::var("CC_riscv32im_succinct_zkvm_elf").is_err() { + if let Some(home_dir) = home_dir() { + let cc_path = home_dir + .join(".sp1") + .join("bin") + .join("riscv32-unknown-elf-gcc"); + if cc_path.exists() { + command.env("CC_riscv32im_succinct_zkvm_elf", cc_path); + } + } + } + command .current_dir(canonicalized_program_dir) .env("RUSTUP_TOOLCHAIN", "succinct") diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 6068e0a1bd..1a7f309ac8 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -39,9 +39,9 @@ rand = "0.8" downloader = { version = "0.2", default-features = false, features = [ "rustls-tls", ] } -serde_json = "1.0.120" +serde_json = "1.0.121" yansi = "1.0.1" hex = "0.4.3" -anstyle = "1.0.7" +anstyle = "1.0.8" target-lexicon = "0.12.15" tempfile = "3.10.1" diff --git a/cli/docker/Dockerfile b/cli/docker/Dockerfile index b8b35b3467..f639579a00 100644 --- a/cli/docker/Dockerfile +++ b/cli/docker/Dockerfile @@ -1,11 +1,24 @@ FROM ubuntu:24.04@sha256:e3f92abc0967a6c19d0dfa2d55838833e947b9d74edbcb0113e48535ad4be12a -RUN apt-get update -RUN apt-get install -y --no-install-recommends ca-certificates clang curl libssl-dev pkg-config git dialog -RUN curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL 'https://sh.rustup.rs' | sh -s -- -y +RUN apt-get update \ + && apt-get install -y --no-install-recommends ca-certificates clang curl libssl-dev pkg-config git dialog \ + && curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL 'https://sh.rustup.rs' | sh -s -- -y + ENV PATH="/root/.cargo/bin:${PATH}" + RUN curl -L https://sp1.succinct.xyz | bash && ~/.sp1/bin/sp1up +# Install the C++ toolchain for RISC-V and create a symlink to it in /root/.sp1/bin +RUN mkdir -p /root/.sp1/riscv \ + && curl -L https://github.com/risc0/toolchain/releases/download/2022.03.25/riscv32im-linux-x86_64.tar.xz -o /tmp/riscv32im-linux-x86_64.tar.xz \ + && tar -xvf /tmp/riscv32im-linux-x86_64.tar.xz -C /root/.sp1/riscv/ \ + && rm -f /tmp/riscv32im-linux-x86_64.tar.xz \ + && find /root/.sp1/riscv -name 'riscv32-unknown-elf-gcc' -type f -exec ln -sf {} /root/.sp1/bin/riscv32-unknown-elf-gcc \; + +# Add the C++ toolchain to the path and the CC environment variable +ENV PATH="/root/.sp1/bin:${PATH}" +ENV CC_riscv32im_succinct_zkvm_elf=/root/.sp1/bin/riscv32-unknown-elf-gcc + WORKDIR /root/program ENV CARGO_TERM_COLOR=always diff --git a/cli/src/commands/build_toolchain.rs b/cli/src/commands/build_toolchain.rs index 70175db685..8b921221ae 100644 --- a/cli/src/commands/build_toolchain.rs +++ b/cli/src/commands/build_toolchain.rs @@ -65,12 +65,7 @@ impl BuildToolchainCmd { }; // Install our config.toml. - let ci = std::env::var("CI").unwrap_or("false".to_string()) == "true"; - let config_toml = if ci { - include_str!("config-ci.toml") - } else { - include_str!("config.toml") - }; + let config_toml = include_str!("config.toml"); let config_file = rust_dir.join("config.toml"); std::fs::write(&config_file, config_toml) .with_context(|| format!("while writing configuration to {:?}", config_file))?; diff --git a/cli/src/commands/config-ci.toml b/cli/src/commands/config-ci.toml deleted file mode 100644 index 29e6ce2f41..0000000000 --- a/cli/src/commands/config-ci.toml +++ /dev/null @@ -1,16 +0,0 @@ -changelog-seen = 2 - -[build] -target = ["riscv32im-succinct-zkvm-elf"] -extended = true -tools = ["cargo", "cargo-clippy", "clippy", "rustfmt"] -configure-args = [] -cargo-native-static = true - -[rust] -lld = true -llvm-tools = true -channel = "nightly" - -[llvm] -download-ci-llvm = false diff --git a/cli/src/commands/config.toml b/cli/src/commands/config.toml index bfb56db6d0..f965b210a1 100644 --- a/cli/src/commands/config.toml +++ b/cli/src/commands/config.toml @@ -1,15 +1,13 @@ -changelog-seen = 2 - [build] target = ["riscv32im-succinct-zkvm-elf"] extended = true tools = ["cargo", "cargo-clippy", "clippy", "rustfmt"] configure-args = [] +cargo-native-static = true [rust] lld = true llvm-tools = true -channel = "nightly" [llvm] download-ci-llvm = false diff --git a/core/Cargo.toml b/core/Cargo.toml index 4aeaccb3d7..12a2592294 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -42,7 +42,7 @@ anyhow = "1.0.83" amcl = { package = "snowbridge-amcl", version = "1.0.2", default-features = false, features = [ "bls381", ] } -arrayref = "0.3.7" +arrayref = "0.3.8" blake3 = "1.5" cfg-if = "1.0.0" generic-array = { version = "1.1.0", features = ["alloc", "serde"] } @@ -62,11 +62,12 @@ strum_macros = "0.26" strum = "0.26" web-time = "1.1.0" rayon-scan = "0.1.1" -thiserror = "1.0.60" +thiserror = "1.0.63" num-bigint = { version = "0.4.6", default-features = false } rand = "0.8.5" bytemuck = "1.16.0" hashbrown = { version = "0.14.5", features = ["serde", "inline-more"] } +static_assertions = "1.1.0" [dev-dependencies] tiny-keccak = { version = "2.0.2", features = ["keccak"] } diff --git a/core/benches/main.rs b/core/benches/main.rs index 1d9eb54ba6..ff186cce11 100644 --- a/core/benches/main.rs +++ b/core/benches/main.rs @@ -1,7 +1,7 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; use sp1_core::io::SP1Stdin; use sp1_core::runtime::{Program, Runtime}; -use sp1_core::stark::DefaultProver; +use sp1_core::stark::CpuProver; use sp1_core::utils::{prove, BabyBearPoseidon2, SP1CoreOpts}; #[allow(unreachable_code)] @@ -21,7 +21,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { format!("main:{}:{}", p.split('/').last().unwrap(), cycles), |b| { b.iter(|| { - prove::<_, DefaultProver<_, _>>( + prove::<_, CpuProver<_, _>>( black_box(program.clone()), &SP1Stdin::new(), BabyBearPoseidon2::new(), diff --git a/core/src/air/polynomial.rs b/core/src/air/polynomial.rs index 72c9dbd2fc..afae6f8249 100644 --- a/core/src/air/polynomial.rs +++ b/core/src/air/polynomial.rs @@ -61,11 +61,12 @@ impl Polynomial { { let len = self.coefficients.len(); let mut result = Vec::with_capacity(len - 1); + let r_inv = r.inverse(); - result.push(-self.coefficients[0] / r); + result.push(-self.coefficients[0] * r_inv); for i in 1..len - 1 { let element = result[i - 1] - self.coefficients[i]; - result.push(element / r); + result.push(element * r_inv); } Self { coefficients: result, diff --git a/core/src/alu/add_sub/mod.rs b/core/src/alu/add_sub/mod.rs index 4765385576..b1eb2d1f9f 100644 --- a/core/src/alu/add_sub/mod.rs +++ b/core/src/alu/add_sub/mod.rs @@ -171,7 +171,7 @@ impl AddSubChip { ) { let is_add = event.opcode == Opcode::ADD; cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.is_add = F::from_bool(is_add); cols.is_sub = F::from_bool(!is_add); @@ -286,7 +286,7 @@ mod tests { let mut challenger = config.challenger(); let mut shard = ExecutionRecord::default(); - for i in 0..1000 { + for i in 0..255 { let operand_1 = thread_rng().gen_range(0..u32::MAX); let operand_2 = thread_rng().gen_range(0..u32::MAX); let result = operand_1.wrapping_add(operand_2); @@ -300,7 +300,7 @@ mod tests { operand_2, )); } - for i in 0..1000 { + for i in 0..255 { let operand_1 = thread_rng().gen_range(0..u32::MAX); let operand_2 = thread_rng().gen_range(0..u32::MAX); let result = operand_1.wrapping_sub(operand_2); diff --git a/core/src/alu/bitwise/mod.rs b/core/src/alu/bitwise/mod.rs index 7a1cae7b21..5c35df0b14 100644 --- a/core/src/alu/bitwise/mod.rs +++ b/core/src/alu/bitwise/mod.rs @@ -141,7 +141,7 @@ impl BitwiseChip { let c = event.c.to_le_bytes(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.a = Word::from(event.a); cols.b = Word::from(event.b); cols.c = Word::from(event.c); @@ -155,10 +155,10 @@ impl BitwiseChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::from(event.opcode), - a1: b_a as u32, + a1: b_a as u16, a2: 0, - b: b_b as u32, - c: b_c as u32, + b: b_b, + c: b_c, }; blu.add_byte_lookup_event(byte_event); } diff --git a/core/src/alu/divrem/mod.rs b/core/src/alu/divrem/mod.rs index 6779b44f22..673c10a6cd 100644 --- a/core/src/alu/divrem/mod.rs +++ b/core/src/alu/divrem/mod.rs @@ -244,7 +244,7 @@ impl MachineAir for DivRemChip { cols.b = Word::from(event.b); cols.c = Word::from(event.c); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.is_real = F::one(); cols.is_divu = F::from_bool(event.opcode == Opcode::DIVU); cols.is_remu = F::from_bool(event.opcode == Opcode::REMU); @@ -307,9 +307,9 @@ impl MachineAir for DivRemChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::MSB, - a1: get_msb(*word) as u32, + a1: get_msb(*word) as u16, a2: 0, - b: most_significant_byte as u32, + b: most_significant_byte, c: 0, }); } diff --git a/core/src/alu/lt/mod.rs b/core/src/alu/lt/mod.rs index 2e5af527da..97674d5268 100644 --- a/core/src/alu/lt/mod.rs +++ b/core/src/alu/lt/mod.rs @@ -181,7 +181,7 @@ impl LtChip { let c = event.c.to_le_bytes(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.a = Word(a.map(F::from_canonical_u8)); cols.b = Word(b.map(F::from_canonical_u8)); cols.c = Word(c.map(F::from_canonical_u8)); @@ -197,19 +197,19 @@ impl LtChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::AND, - a1: masked_b as u32, + a1: masked_b as u16, a2: 0, - b: b[3] as u32, - c: 0x7f as u32, + b: b[3], + c: 0x7f, }); blu.add_byte_lookup_event(ByteLookupEvent { shard: event.shard, channel: event.channel, opcode: ByteOpcode::AND, - a1: masked_c as u32, + a1: masked_c as u16, a2: 0, - b: c[3] as u32, - c: 0x7f as u32, + b: c[3], + c: 0x7f, }); let mut b_comp = b; @@ -261,10 +261,10 @@ impl LtChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::LTU, - a1: cols.sltu.as_canonical_u32(), + a1: cols.sltu.as_canonical_u32() as u16, a2: 0, - b: cols.comparison_bytes[0].as_canonical_u32(), - c: cols.comparison_bytes[1].as_canonical_u32(), + b: cols.comparison_bytes[0].as_canonical_u32() as u8, + c: cols.comparison_bytes[1].as_canonical_u32() as u8, }); } } diff --git a/core/src/alu/mod.rs b/core/src/alu/mod.rs index 535b6bee08..6ece69703d 100644 --- a/core/src/alu/mod.rs +++ b/core/src/alu/mod.rs @@ -29,7 +29,7 @@ pub struct AluEvent { pub shard: u32, /// The channel number, used for byte lookup table. - pub channel: u32, + pub channel: u8, /// The clock cycle that the operation occurs on. pub clk: u32, @@ -51,7 +51,7 @@ pub struct AluEvent { impl AluEvent { /// Creates a new `AluEvent`. - pub fn new(shard: u32, channel: u32, clk: u32, opcode: Opcode, a: u32, b: u32, c: u32) -> Self { + pub fn new(shard: u32, channel: u8, clk: u32, opcode: Opcode, a: u32, b: u32, c: u32) -> Self { Self { lookup_id: 0, shard, diff --git a/core/src/alu/mul/mod.rs b/core/src/alu/mul/mod.rs index 1351e78c38..dd7d27de56 100644 --- a/core/src/alu/mul/mod.rs +++ b/core/src/alu/mul/mod.rs @@ -199,9 +199,9 @@ impl MachineAir for MulChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::MSB, - a1: get_msb(*word) as u32, + a1: get_msb(*word) as u16, a2: 0, - b: most_significant_byte as u32, + b: most_significant_byte, c: 0, }); } @@ -220,7 +220,7 @@ impl MachineAir for MulChip { // Calculate the correct product using the `product` array. We store the correct carry // value for verification. - let base = 1 << BYTE_SIZE; + let base = (1 << BYTE_SIZE) as u32; let mut carry = [0u32; PRODUCT_SIZE]; for i in 0..PRODUCT_SIZE { carry[i] = product[i] / base; @@ -241,11 +241,15 @@ impl MachineAir for MulChip { cols.is_mulhu = F::from_bool(event.opcode == Opcode::MULHU); cols.is_mulhsu = F::from_bool(event.opcode == Opcode::MULHSU); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); // Range check. { - record.add_u16_range_checks(event.shard, event.channel, &carry); + record.add_u16_range_checks( + event.shard, + event.channel, + &carry.map(|x| x as u16), + ); record.add_u8_range_checks( event.shard, event.channel, diff --git a/core/src/alu/sll/mod.rs b/core/src/alu/sll/mod.rs index a3caadbcad..d40da149a0 100644 --- a/core/src/alu/sll/mod.rs +++ b/core/src/alu/sll/mod.rs @@ -200,7 +200,7 @@ impl ShiftLeft { let b = event.b.to_le_bytes(); let c = event.c.to_le_bytes(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.a = Word(a.map(F::from_canonical_u8)); cols.b = Word(b.map(F::from_canonical_u8)); cols.c = Word(c.map(F::from_canonical_u8)); diff --git a/core/src/alu/sr/mod.rs b/core/src/alu/sr/mod.rs index db093cf109..a905e717fc 100644 --- a/core/src/alu/sr/mod.rs +++ b/core/src/alu/sr/mod.rs @@ -233,7 +233,7 @@ impl ShiftRightChip { // Initialize cols with basic operands and flags derived from the current event. { cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.a = Word::from(event.a); cols.b = Word::from(event.b); cols.c = Word::from(event.c); @@ -255,9 +255,9 @@ impl ShiftRightChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::MSB, - a1: ((most_significant_byte >> 7) & 1) as u32, + a1: ((most_significant_byte >> 7) & 1) as u16, a2: 0, - b: most_significant_byte as u32, + b: most_significant_byte, c: 0, }]); } @@ -305,10 +305,10 @@ impl ShiftRightChip { shard: event.shard, channel: event.channel, opcode: ByteOpcode::ShrCarry, - a1: shift as u32, - a2: carry as u32, - b: byte_shift_result[i] as u32, - c: num_bits_to_shift as u32, + a1: shift as u16, + a2: carry, + b: byte_shift_result[i], + c: num_bits_to_shift as u8, }; blu.add_byte_lookup_event(byte_event); diff --git a/core/src/bytes/air.rs b/core/src/bytes/air.rs index 69d2ba82fe..7f0bd3f3eb 100644 --- a/core/src/bytes/air.rs +++ b/core/src/bytes/air.rs @@ -28,7 +28,7 @@ impl Air for ByteChip { // Send all the lookups for each operation. for channel in 0..NUM_BYTE_LOOKUP_CHANNELS { - let channel_f = AB::F::from_canonical_u32(channel); + let channel_f = AB::F::from_canonical_u8(channel); let channel = channel as usize; for (i, opcode) in ByteOpcode::all().iter().enumerate() { let field_op = opcode.as_field::(); diff --git a/core/src/bytes/event.rs b/core/src/bytes/event.rs index 874db6586b..d96d8ea24d 100644 --- a/core/src/bytes/event.rs +++ b/core/src/bytes/event.rs @@ -1,3 +1,5 @@ +use std::hash::{Hash, Hasher}; + use hashbrown::HashMap; use itertools::Itertools; use p3_field::PrimeField32; @@ -9,28 +11,41 @@ use serde::{Deserialize, Serialize}; use super::ByteOpcode; /// A byte lookup event. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub struct ByteLookupEvent { /// The shard number, used for byte lookup table. pub shard: u32, // The channel multiplicity identifier. - pub channel: u32, + pub channel: u8, /// The opcode of the operation. pub opcode: ByteOpcode, /// The first output operand. - pub a1: u32, + pub a1: u16, /// The second output operand. - pub a2: u32, + pub a2: u8, /// The first input operand. - pub b: u32, + pub b: u8, /// The second input operand. - pub c: u32, + pub c: u8, +} + +impl Hash for ByteLookupEvent { + fn hash(&self, state: &mut H) { + let combined_limb_1 = self.shard as u64 + + ((self.channel as u64) << 32) + + ((self.opcode as u64) << 40) + + ((self.a1 as u64) << 48); + let combined_limb_2 = self.a2 as u64 + ((self.b as u64) << 8) + ((self.c as u64) << 16); + let combined = combined_limb_1 as u128 + ((combined_limb_2 as u128) << 64); + + combined.hash(state); + } } /// A type that can record byte lookup events. @@ -52,20 +67,20 @@ pub trait ByteRecord { } /// Adds a `ByteLookupEvent` to verify `a` and `b are indeed bytes to the shard. - fn add_u8_range_check(&mut self, shard: u32, channel: u32, a: u8, b: u8) { + fn add_u8_range_check(&mut self, shard: u32, channel: u8, a: u8, b: u8) { self.add_byte_lookup_event(ByteLookupEvent { shard, channel, opcode: ByteOpcode::U8Range, a1: 0, a2: 0, - b: a as u32, - c: b as u32, + b: a, + c: b, }); } /// Adds a `ByteLookupEvent` to verify `a` is indeed u16. - fn add_u16_range_check(&mut self, shard: u32, channel: u32, a: u32) { + fn add_u16_range_check(&mut self, shard: u32, channel: u8, a: u16) { self.add_byte_lookup_event(ByteLookupEvent { shard, channel, @@ -78,7 +93,7 @@ pub trait ByteRecord { } /// Adds `ByteLookupEvent`s to verify that all the bytes in the input slice are indeed bytes. - fn add_u8_range_checks(&mut self, shard: u32, channel: u32, bytes: &[u8]) { + fn add_u8_range_checks(&mut self, shard: u32, channel: u8, bytes: &[u8]) { let mut index = 0; while index + 1 < bytes.len() { self.add_u8_range_check(shard, channel, bytes[index], bytes[index + 1]); @@ -95,7 +110,7 @@ pub trait ByteRecord { fn add_u8_range_checks_field( &mut self, shard: u32, - channel: u32, + channel: u8, field_values: &[F], ) { self.add_u8_range_checks( @@ -109,21 +124,21 @@ pub trait ByteRecord { } /// Adds `ByteLookupEvent`s to verify that all the bytes in the input slice are indeed bytes. - fn add_u16_range_checks(&mut self, shard: u32, channel: u32, ls: &[u32]) { + fn add_u16_range_checks(&mut self, shard: u32, channel: u8, ls: &[u16]) { ls.iter() .for_each(|x| self.add_u16_range_check(shard, channel, *x)); } /// Adds a `ByteLookupEvent` to compute the bitwise OR of the two input values. - fn lookup_or(&mut self, shard: u32, channel: u32, b: u8, c: u8) { + fn lookup_or(&mut self, shard: u32, channel: u8, b: u8, c: u8) { self.add_byte_lookup_event(ByteLookupEvent { shard, channel, opcode: ByteOpcode::OR, - a1: (b | c) as u32, + a1: (b | c) as u16, a2: 0, - b: b as u32, - c: c as u32, + b, + c, }); } } @@ -131,15 +146,7 @@ pub trait ByteRecord { impl ByteLookupEvent { /// Creates a new `ByteLookupEvent`. #[inline(always)] - pub fn new( - shard: u32, - channel: u32, - opcode: ByteOpcode, - a1: u32, - a2: u32, - b: u32, - c: u32, - ) -> Self { + pub fn new(shard: u32, channel: u8, opcode: ByteOpcode, a1: u16, a2: u8, b: u8, c: u8) -> Self { Self { shard, channel, diff --git a/core/src/bytes/mod.rs b/core/src/bytes/mod.rs index e751bb6cb0..ec87c06987 100644 --- a/core/src/bytes/mod.rs +++ b/core/src/bytes/mod.rs @@ -23,7 +23,7 @@ use crate::bytes::trace::NUM_ROWS; pub const NUM_BYTE_OPS: usize = 9; /// The number of different byte lookup channels. -pub const NUM_BYTE_LOOKUP_CHANNELS: u32 = 16; +pub const NUM_BYTE_LOOKUP_CHANNELS: u8 = 16; /// A chip for computing byte operations. /// @@ -64,66 +64,46 @@ impl ByteChip { ByteOpcode::AND => { let and = b & c; col.and = F::from_canonical_u8(and); - ByteLookupEvent::new( - shard, channel, *opcode, and as u32, 0, b as u32, c as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, and as u16, 0, b, c) } ByteOpcode::OR => { let or = b | c; col.or = F::from_canonical_u8(or); - ByteLookupEvent::new( - shard, channel, *opcode, or as u32, 0, b as u32, c as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, or as u16, 0, b, c) } ByteOpcode::XOR => { let xor = b ^ c; col.xor = F::from_canonical_u8(xor); - ByteLookupEvent::new( - shard, channel, *opcode, xor as u32, 0, b as u32, c as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, xor as u16, 0, b, c) } ByteOpcode::SLL => { let sll = b << (c & 7); col.sll = F::from_canonical_u8(sll); - ByteLookupEvent::new( - shard, channel, *opcode, sll as u32, 0, b as u32, c as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, sll as u16, 0, b, c) } ByteOpcode::U8Range => { - ByteLookupEvent::new(shard, channel, *opcode, 0, 0, b as u32, c as u32) + ByteLookupEvent::new(shard, channel, *opcode, 0, 0, b, c) } ByteOpcode::ShrCarry => { let (res, carry) = shr_carry(b, c); col.shr = F::from_canonical_u8(res); col.shr_carry = F::from_canonical_u8(carry); - ByteLookupEvent::new( - shard, - channel, - *opcode, - res as u32, - carry as u32, - b as u32, - c as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, res as u16, carry, b, c) } ByteOpcode::LTU => { let ltu = b < c; col.ltu = F::from_bool(ltu); - ByteLookupEvent::new( - shard, channel, *opcode, ltu as u32, 0, b as u32, c as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, ltu as u16, 0, b, c) } ByteOpcode::MSB => { let msb = (b & 0b1000_0000) != 0; col.msb = F::from_bool(msb); - ByteLookupEvent::new( - shard, channel, *opcode, msb as u32, 0, b as u32, 0 as u32, - ) + ByteLookupEvent::new(shard, channel, *opcode, msb as u16, 0, b, 0) } ByteOpcode::U16Range => { let v = ((b as u32) << 8) + c as u32; col.value_u16 = F::from_canonical_u32(v); - ByteLookupEvent::new(shard, channel, *opcode, v, 0, 0, 0) + ByteLookupEvent::new(shard, channel, *opcode, v as u16, 0, 0, 0) } }; } diff --git a/core/src/bytes/trace.rs b/core/src/bytes/trace.rs index abbdacdf2c..3db90683ff 100644 --- a/core/src/bytes/trace.rs +++ b/core/src/bytes/trace.rs @@ -56,7 +56,7 @@ impl MachineAir for ByteChip { .iter() { let row = if lookup.opcode != ByteOpcode::U16Range { - ((lookup.b << 8) + lookup.c) as usize + (((lookup.b as u16) << 8) + lookup.c as u16) as usize } else { lookup.a1 as usize }; diff --git a/core/src/cpu/columns/channel.rs b/core/src/cpu/columns/channel.rs index ba6f763eea..bc55a9acad 100644 --- a/core/src/cpu/columns/channel.rs +++ b/core/src/cpu/columns/channel.rs @@ -13,7 +13,7 @@ pub struct ChannelSelectorCols { impl ChannelSelectorCols { #[inline(always)] - pub fn populate(&mut self, channel: u32) { + pub fn populate(&mut self, channel: u8) { self.channel_selectors = [F::zero(); NUM_BYTE_LOOKUP_CHANNELS as usize]; self.channel_selectors[channel as usize] = F::one(); } diff --git a/core/src/cpu/columns/opcode_specific.rs b/core/src/cpu/columns/opcode_specific.rs index e40edcbbdc..4a6d9207f7 100644 --- a/core/src/cpu/columns/opcode_specific.rs +++ b/core/src/cpu/columns/opcode_specific.rs @@ -2,6 +2,8 @@ use crate::cpu::columns::{AuipcCols, BranchCols, JumpCols, MemoryColumns}; use std::fmt::{Debug, Formatter}; use std::mem::{size_of, transmute}; +use static_assertions::const_assert; + use super::ecall::EcallCols; pub const NUM_OPCODE_SPECIFIC_COLS: usize = size_of::>(); @@ -19,6 +21,9 @@ pub union OpcodeSpecificCols { impl Default for OpcodeSpecificCols { fn default() -> Self { + // We must use the largest field to avoid uninitialized padding bytes. + const_assert!(size_of::>() == size_of::>()); + OpcodeSpecificCols { memory: MemoryColumns::default(), } diff --git a/core/src/cpu/event.rs b/core/src/cpu/event.rs index 2e68eddbf7..06fb627726 100644 --- a/core/src/cpu/event.rs +++ b/core/src/cpu/event.rs @@ -10,7 +10,7 @@ pub struct CpuEvent { pub shard: u32, /// The current channel. - pub channel: u32, + pub channel: u8, /// The current clock. pub clk: u32, diff --git a/core/src/cpu/trace.rs b/core/src/cpu/trace.rs index 2029629774..48428b4f74 100644 --- a/core/src/cpu/trace.rs +++ b/core/src/cpu/trace.rs @@ -165,8 +165,8 @@ impl CpuChip { opcode: ByteOpcode::U8Range, a1: 0, a2: 0, - b: a_bytes[0], - c: a_bytes[1], + b: a_bytes[0] as u8, + c: a_bytes[1] as u8, }); blu_events.add_byte_lookup_event(ByteLookupEvent { shard: event.shard, @@ -174,8 +174,8 @@ impl CpuChip { opcode: ByteOpcode::U8Range, a1: 0, a2: 0, - b: a_bytes[2], - c: a_bytes[3], + b: a_bytes[2] as u8, + c: a_bytes[3] as u8, }); // Populate memory accesses for reading from memory. @@ -214,13 +214,13 @@ impl CpuChip { blu_events: &mut impl ByteRecord, ) { cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); - let clk_16bit_limb = event.clk & 0xffff; - let clk_8bit_limb = (event.clk >> 16) & 0xff; - cols.clk_16bit_limb = F::from_canonical_u32(clk_16bit_limb); - cols.clk_8bit_limb = F::from_canonical_u32(clk_8bit_limb); + let clk_16bit_limb = (event.clk & 0xffff) as u16; + let clk_8bit_limb = ((event.clk >> 16) & 0xff) as u8; + cols.clk_16bit_limb = F::from_canonical_u16(clk_16bit_limb); + cols.clk_8bit_limb = F::from_canonical_u8(clk_8bit_limb); cols.channel_selectors.populate(event.channel); @@ -228,7 +228,7 @@ impl CpuChip { event.shard, event.channel, U16Range, - event.shard, + event.shard as u16, 0, 0, 0, @@ -249,7 +249,7 @@ impl CpuChip { 0, 0, 0, - clk_8bit_limb, + clk_8bit_limb as u8, )); } @@ -399,8 +399,8 @@ impl CpuChip { opcode: ByteOpcode::U8Range, a1: 0, a2: 0, - b: byte_pair[0] as u32, - c: byte_pair[1] as u32, + b: byte_pair[0], + c: byte_pair[1], }); } } @@ -773,7 +773,7 @@ mod tests { use crate::runtime::tests::ssz_withdrawals_program; use crate::runtime::{tests::simple_program, Runtime}; - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::utils::{run_test, setup_logger, SP1CoreOpts}; // #[test] @@ -835,6 +835,6 @@ mod tests { fn prove_trace() { setup_logger(); let program = simple_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } diff --git a/core/src/lib.rs b/core/src/lib.rs index 2e9fecd64d..e9e1e56099 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -37,4 +37,4 @@ use stark::StarkGenericConfig; /// This string should be updated whenever any step in verifying an SP1 proof changes, including /// core, recursion, and plonk-bn254. This string is used to download SP1 artifacts and the gnark /// docker image. -pub const SP1_CIRCUIT_VERSION: &str = "v1.0.0-rc.1"; +pub const SP1_CIRCUIT_VERSION: &str = "v1.0.1"; diff --git a/core/src/memory/global.rs b/core/src/memory/global.rs index afa97c844f..bdd5b7e82f 100644 --- a/core/src/memory/global.rs +++ b/core/src/memory/global.rs @@ -53,6 +53,10 @@ impl MachineAir for MemoryChip { } } + fn generate_dependencies(&self, _input: &ExecutionRecord, _output: &mut ExecutionRecord) { + // Do nothing since this chip has no dependencies. + } + fn generate_trace( &self, input: &ExecutionRecord, diff --git a/core/src/memory/trace.rs b/core/src/memory/trace.rs index 678dfcf7bc..d3f60e9a3a 100644 --- a/core/src/memory/trace.rs +++ b/core/src/memory/trace.rs @@ -7,7 +7,7 @@ use crate::runtime::{MemoryReadRecord, MemoryRecord, MemoryRecordEnum, MemoryWri impl MemoryWriteCols { pub fn populate( &mut self, - channel: u32, + channel: u8, record: MemoryWriteRecord, output: &mut impl ByteRecord, ) { @@ -30,7 +30,7 @@ impl MemoryWriteCols { impl MemoryReadCols { pub fn populate( &mut self, - channel: u32, + channel: u8, record: MemoryReadRecord, output: &mut impl ByteRecord, ) { @@ -52,7 +52,7 @@ impl MemoryReadCols { impl MemoryReadWriteCols { pub fn populate( &mut self, - channel: u32, + channel: u8, record: MemoryRecordEnum, output: &mut impl ByteRecord, ) { @@ -66,7 +66,7 @@ impl MemoryReadWriteCols { pub fn populate_write( &mut self, - channel: u32, + channel: u8, record: MemoryWriteRecord, output: &mut impl ByteRecord, ) { @@ -87,7 +87,7 @@ impl MemoryReadWriteCols { pub fn populate_read( &mut self, - channel: u32, + channel: u8, record: MemoryReadRecord, output: &mut impl ByteRecord, ) { @@ -110,7 +110,7 @@ impl MemoryReadWriteCols { impl MemoryAccessCols { pub(crate) fn populate_access( &mut self, - channel: u32, + channel: u8, current_record: MemoryRecord, prev_record: MemoryRecord, output: &mut impl ByteRecord, @@ -135,8 +135,8 @@ impl MemoryAccessCols { }; let diff_minus_one = current_time_value - prev_time_value - 1; - let diff_16bit_limb = diff_minus_one & 0xffff; - self.diff_16bit_limb = F::from_canonical_u32(diff_16bit_limb); + let diff_16bit_limb = (diff_minus_one & 0xffff) as u16; + self.diff_16bit_limb = F::from_canonical_u16(diff_16bit_limb); let diff_8bit_limb = (diff_minus_one >> 16) & 0xff; self.diff_8bit_limb = F::from_canonical_u32(diff_8bit_limb); diff --git a/core/src/operations/add.rs b/core/src/operations/add.rs index 0ede31dd93..3b9a2b90fb 100644 --- a/core/src/operations/add.rs +++ b/core/src/operations/add.rs @@ -23,7 +23,7 @@ impl AddOperation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a_u32: u32, b_u32: u32, ) -> u32 { diff --git a/core/src/operations/add4.rs b/core/src/operations/add4.rs index 39449571b7..1bcb2f1cae 100644 --- a/core/src/operations/add4.rs +++ b/core/src/operations/add4.rs @@ -37,7 +37,7 @@ impl Add4Operation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a_u32: u32, b_u32: u32, c_u32: u32, diff --git a/core/src/operations/add5.rs b/core/src/operations/add5.rs index b25c41243b..60b86dd6cc 100644 --- a/core/src/operations/add5.rs +++ b/core/src/operations/add5.rs @@ -40,7 +40,7 @@ impl Add5Operation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a_u32: u32, b_u32: u32, c_u32: u32, diff --git a/core/src/operations/and.rs b/core/src/operations/and.rs index 947c2d1384..1eb5cb42ea 100644 --- a/core/src/operations/and.rs +++ b/core/src/operations/and.rs @@ -22,7 +22,7 @@ impl AndOperation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, x: u32, y: u32, ) -> u32 { @@ -37,10 +37,10 @@ impl AndOperation { shard, channel, opcode: ByteOpcode::AND, - a1: and as u32, + a1: and as u16, a2: 0, - b: x_bytes[i] as u32, - c: y_bytes[i] as u32, + b: x_bytes[i], + c: y_bytes[i], }; record.add_byte_lookup_event(byte_event); } diff --git a/core/src/operations/field/field_den.rs b/core/src/operations/field/field_den.rs index b1f73e68ec..1367a67f5d 100644 --- a/core/src/operations/field/field_den.rs +++ b/core/src/operations/field/field_den.rs @@ -33,7 +33,7 @@ impl FieldDenCols { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a: &BigUint, b: &BigUint, sign: bool, diff --git a/core/src/operations/field/field_inner_product.rs b/core/src/operations/field/field_inner_product.rs index 1a585e4e4c..859e1642e9 100644 --- a/core/src/operations/field/field_inner_product.rs +++ b/core/src/operations/field/field_inner_product.rs @@ -34,7 +34,7 @@ impl FieldInnerProductCols { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a: &[BigUint], b: &[BigUint], ) -> BigUint { diff --git a/core/src/operations/field/field_op.rs b/core/src/operations/field/field_op.rs index 995142c2f2..848099b7f5 100644 --- a/core/src/operations/field/field_op.rs +++ b/core/src/operations/field/field_op.rs @@ -113,7 +113,7 @@ impl FieldOpCols { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a: &BigUint, b: &BigUint, modulus: &BigUint, @@ -174,7 +174,7 @@ impl FieldOpCols { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a: &BigUint, b: &BigUint, op: FieldOperation, diff --git a/core/src/operations/field/field_sqrt.rs b/core/src/operations/field/field_sqrt.rs index 802cb395a5..0f1e7f69e0 100644 --- a/core/src/operations/field/field_sqrt.rs +++ b/core/src/operations/field/field_sqrt.rs @@ -41,7 +41,7 @@ impl FieldSqrtCols { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a: &BigUint, sqrt_fn: impl Fn(&BigUint) -> BigUint, ) -> BigUint { @@ -76,9 +76,9 @@ impl FieldSqrtCols { shard, channel, opcode: ByteOpcode::AND, - a1: self.lsb.as_canonical_u32(), + a1: self.lsb.as_canonical_u32() as u16, a2: 0, - b: sqrt_bytes[0] as u32, + b: sqrt_bytes[0], c: 1, }; record.add_byte_lookup_event(and_event); diff --git a/core/src/operations/field/range.rs b/core/src/operations/field/range.rs index 04c7699ad7..3f652021a1 100644 --- a/core/src/operations/field/range.rs +++ b/core/src/operations/field/range.rs @@ -36,7 +36,7 @@ impl FieldLtCols { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, lhs: &BigUint, rhs: &BigUint, ) { @@ -63,8 +63,8 @@ impl FieldLtCols { channel, a1: 1, a2: 0, - b: *byte as u32, - c: *modulus_byte as u32, + b: *byte, + c: *modulus_byte, }); break; } diff --git a/core/src/operations/field/util.rs b/core/src/operations/field/util.rs index 201fd135ca..079f90d79c 100644 --- a/core/src/operations/field/util.rs +++ b/core/src/operations/field/util.rs @@ -23,12 +23,13 @@ pub fn compute_root_quotient_and_shift( nb_limbs: usize, ) -> Vec { // Evaluate the vanishing polynomial at x = 2^nb_bits_per_limb. + let p_vanishing_eval = p_vanishing .coefficients() .iter() .enumerate() .map(|(i, x)| { - biguint_to_field::(BigUint::from(2u32).pow(nb_bits_per_limb * i as u32)) * *x + biguint_to_field::(BigUint::from(2u32) << (nb_bits_per_limb * i as u32)) * *x }) .sum::(); debug_assert_eq!(p_vanishing_eval, F::zero()); diff --git a/core/src/operations/fixed_rotate_right.rs b/core/src/operations/fixed_rotate_right.rs index 4de8eee4aa..3979925311 100644 --- a/core/src/operations/fixed_rotate_right.rs +++ b/core/src/operations/fixed_rotate_right.rs @@ -44,7 +44,7 @@ impl FixedRotateRightOperation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, input: u32, rotation: usize, ) -> u32 { @@ -78,10 +78,10 @@ impl FixedRotateRightOperation { shard, channel, opcode: ByteOpcode::ShrCarry, - a1: shift as u32, - a2: carry as u32, - b: b as u32, - c: c as u32, + a1: shift as u16, + a2: carry, + b, + c, }; record.add_byte_lookup_event(byte_event); diff --git a/core/src/operations/fixed_shift_right.rs b/core/src/operations/fixed_shift_right.rs index 42fdc637d9..7ec611faff 100644 --- a/core/src/operations/fixed_shift_right.rs +++ b/core/src/operations/fixed_shift_right.rs @@ -44,7 +44,7 @@ impl FixedShiftRightOperation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, input: u32, rotation: usize, ) -> u32 { @@ -77,10 +77,10 @@ impl FixedShiftRightOperation { shard, channel, opcode: ByteOpcode::ShrCarry, - a1: shift as u32, - a2: carry as u32, - b: b as u32, - c: c as u32, + a1: shift as u16, + a2: carry, + b, + c, }; record.add_byte_lookup_event(byte_event); diff --git a/core/src/operations/lt.rs b/core/src/operations/lt.rs index fcc344db24..619e96fc9b 100644 --- a/core/src/operations/lt.rs +++ b/core/src/operations/lt.rs @@ -28,7 +28,7 @@ impl AssertLtColsBytes { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, a: &[u8], b: &[u8], ) { @@ -48,8 +48,8 @@ impl AssertLtColsBytes { channel, a1: 1, a2: 0, - b: *a_byte as u32, - c: *b_byte as u32, + b: *a_byte, + c: *b_byte, }); break; } diff --git a/core/src/operations/not.rs b/core/src/operations/not.rs index 769d255030..84b6b32ffe 100644 --- a/core/src/operations/not.rs +++ b/core/src/operations/not.rs @@ -22,7 +22,7 @@ impl NotOperation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, x: u32, ) -> u32 { let expected = !x; diff --git a/core/src/operations/or.rs b/core/src/operations/or.rs index b30821532a..7f2e78b3b9 100644 --- a/core/src/operations/or.rs +++ b/core/src/operations/or.rs @@ -22,7 +22,7 @@ impl OrOperation { &mut self, record: &mut ExecutionRecord, shard: u32, - channel: u32, + channel: u8, x: u32, y: u32, ) -> u32 { diff --git a/core/src/operations/xor.rs b/core/src/operations/xor.rs index fe0cadf966..003ce48c2f 100644 --- a/core/src/operations/xor.rs +++ b/core/src/operations/xor.rs @@ -22,7 +22,7 @@ impl XorOperation { &mut self, record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, x: u32, y: u32, ) -> u32 { @@ -37,10 +37,10 @@ impl XorOperation { shard, channel, opcode: ByteOpcode::XOR, - a1: xor as u32, + a1: xor as u16, a2: 0, - b: x_bytes[i] as u32, - c: y_bytes[i] as u32, + b: x_bytes[i], + c: y_bytes[i], }; record.add_byte_lookup_event(byte_event); } diff --git a/core/src/runtime/hooks.rs b/core/src/runtime/hooks.rs index ff93af248b..80a4bcf1bf 100644 --- a/core/src/runtime/hooks.rs +++ b/core/src/runtime/hooks.rs @@ -130,7 +130,7 @@ pub fn hook_ecrecover(_env: HookEnv, buf: &[u8]) -> Vec> { pub mod tests { use crate::{ runtime::Program, - stark::DefaultProver, + stark::CpuProver, utils::{self, tests::ECRECOVER_ELF}, }; @@ -157,6 +157,6 @@ pub mod tests { fn test_ecrecover_program_prove() { utils::setup_logger(); let program = Program::from(ECRECOVER_ELF); - utils::run_test::>(program).unwrap(); + utils::run_test::>(program).unwrap(); } } diff --git a/core/src/runtime/io.rs b/core/src/runtime/io.rs index 6eebbb4bd2..cbc927e459 100644 --- a/core/src/runtime/io.rs +++ b/core/src/runtime/io.rs @@ -59,7 +59,7 @@ impl<'a> Runtime<'a> { pub mod tests { use super::*; use crate::runtime::Program; - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::utils::tests::IO_ELF; use crate::utils::{self, prove_simple, BabyBearBlake3, SP1CoreOpts}; use serde::Deserialize; @@ -116,6 +116,6 @@ pub mod tests { runtime.write_stdin(&points.1); runtime.run().unwrap(); let config = BabyBearBlake3::new(); - prove_simple::<_, DefaultProver<_, _>>(config, runtime).unwrap(); + prove_simple::<_, CpuProver<_, _>>(config, runtime).unwrap(); } } diff --git a/core/src/runtime/mod.rs b/core/src/runtime/mod.rs index beded5eb66..920f5012bd 100644 --- a/core/src/runtime/mod.rs +++ b/core/src/runtime/mod.rs @@ -35,6 +35,7 @@ use std::io::BufWriter; use std::io::Write; use std::sync::Arc; +use serde::{Deserialize, Serialize}; use thiserror::Error; use crate::alu::create_alu_lookup_id; @@ -118,7 +119,7 @@ pub struct Runtime<'a> { pub max_cycles: Option, } -#[derive(Error, Debug)] +#[derive(Error, Debug, Serialize, Deserialize)] pub enum ExecutionError { #[error("execution failed with exit code {0}")] HaltWithNonZeroExitCode(u32), @@ -266,7 +267,7 @@ impl<'a> Runtime<'a> { } #[inline] - pub fn channel(&self) -> u32 { + pub fn channel(&self) -> u8 { self.state.channel } @@ -434,7 +435,7 @@ impl<'a> Runtime<'a> { fn emit_cpu( &mut self, shard: u32, - channel: u32, + channel: u8, clk: u32, pc: u32, next_pc: u32, diff --git a/core/src/runtime/record.rs b/core/src/runtime/record.rs index c989c3be05..2dfec4bd15 100644 --- a/core/src/runtime/record.rs +++ b/core/src/runtime/record.rs @@ -104,7 +104,7 @@ pub struct ExecutionRecord { pub nonce_lookup: HashMap, } -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct SplitOpts { pub deferred_shift_threshold: usize, pub keccak_split_threshold: usize, @@ -120,7 +120,7 @@ impl SplitOpts { keccak_split_threshold: deferred_shift_threshold / 24, sha_extend_split_threshold: deferred_shift_threshold / 48, sha_compress_split_threshold: deferred_shift_threshold / 80, - memory_split_threshold: deferred_shift_threshold, + memory_split_threshold: deferred_shift_threshold * 4, } } } @@ -520,8 +520,11 @@ impl ExecutionRecord { opts.deferred_shift_threshold, last ); + // _ = last_pct; if last { + // shards.push(last_shard); + self.memory_initialize_events .sort_by_key(|event| event.addr); self.memory_finalize_events.sort_by_key(|event| event.addr); diff --git a/core/src/runtime/state.rs b/core/src/runtime/state.rs index d5a1f3bdf1..0368fe4718 100644 --- a/core/src/runtime/state.rs +++ b/core/src/runtime/state.rs @@ -31,7 +31,7 @@ pub struct ExecutionState { /// The channel alternates between 0 and [crate::bytes::NUM_BYTE_LOOKUP_CHANNELS], /// used to controll byte lookup multiplicity. - pub channel: u32, + pub channel: u8, /// The program counter. pub pc: u32, diff --git a/core/src/runtime/syscall.rs b/core/src/runtime/syscall.rs index 2cf3299219..7a6465ee30 100644 --- a/core/src/runtime/syscall.rs +++ b/core/src/runtime/syscall.rs @@ -203,7 +203,7 @@ impl<'a, 'b> SyscallContext<'a, 'b> { self.rt.state.current_shard } - pub fn current_channel(&self) -> u32 { + pub fn current_channel(&self) -> u8 { self.rt.state.channel } diff --git a/core/src/stark/machine.rs b/core/src/stark/machine.rs index e01f5887cc..dc6271d2be 100644 --- a/core/src/stark/machine.rs +++ b/core/src/stark/machine.rs @@ -503,7 +503,7 @@ pub mod tests { use crate::runtime::Instruction; use crate::runtime::Opcode; use crate::runtime::Program; - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::stark::RiscvAir; use crate::stark::StarkProvingKey; use crate::stark::StarkVerifyingKey; @@ -518,7 +518,7 @@ pub mod tests { fn test_simple_prove() { utils::setup_logger(); let program = simple_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] @@ -540,7 +540,7 @@ pub mod tests { Instruction::new(*shift_op, 31, 29, 3, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } } @@ -554,7 +554,7 @@ pub mod tests { Instruction::new(Opcode::SUB, 31, 30, 29, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] @@ -566,7 +566,7 @@ pub mod tests { Instruction::new(Opcode::ADD, 31, 30, 29, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] @@ -588,7 +588,7 @@ pub mod tests { Instruction::new(*mul_op, 31, 30, 29, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } } @@ -604,7 +604,7 @@ pub mod tests { Instruction::new(*lt_op, 31, 30, 29, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } @@ -620,7 +620,7 @@ pub mod tests { Instruction::new(*bitwise_op, 31, 30, 29, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } @@ -643,7 +643,7 @@ pub mod tests { Instruction::new(*div_rem_op, 31, 29, 30, false, false), ]; let program = Program::new(instructions, 0, 0); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } } @@ -652,7 +652,7 @@ pub mod tests { fn test_fibonacci_prove_simple() { setup_logger(); let program = fibonacci_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] @@ -664,7 +664,7 @@ pub mod tests { let mut opts = SP1CoreOpts::default(); opts.shard_size = 1024; opts.shard_batch_size = 2; - prove::<_, DefaultProver<_, _>>(program, &stdin, BabyBearPoseidon2::new(), opts).unwrap(); + prove::<_, CpuProver<_, _>>(program, &stdin, BabyBearPoseidon2::new(), opts).unwrap(); } #[test] @@ -672,7 +672,7 @@ pub mod tests { setup_logger(); let program = fibonacci_program(); let stdin = SP1Stdin::new(); - prove::<_, DefaultProver<_, _>>( + prove::<_, CpuProver<_, _>>( program, &stdin, BabyBearPoseidon2::new(), @@ -685,14 +685,14 @@ pub mod tests { fn test_simple_memory_program_prove() { setup_logger(); let program = simple_memory_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_ssz_withdrawal() { setup_logger(); let program = ssz_withdrawals_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] diff --git a/core/src/stark/prover.rs b/core/src/stark/prover.rs index 9a961453e9..3806a136ce 100644 --- a/core/src/stark/prover.rs +++ b/core/src/stark/prover.rs @@ -1,3 +1,4 @@ +use core::fmt::Display; use serde::de::DeserializeOwned; use serde::Serialize; use std::cmp::Reverse; @@ -33,6 +34,13 @@ use crate::utils::SP1CoreOpts; pub trait MachineProver>: 'static + Send + Sync { + /// The type used to store the traces. + type DeviceMatrix; + + /// The type used to store the polynomial commitment schemes data. + type DeviceProverData; + + /// The type used for error handling. type Error: Error + Send + Sync; /// Create a new prover from a given machine. @@ -41,14 +49,57 @@ pub trait MachineProver>: /// A reference to the machine that this prover is using. fn machine(&self) -> &StarkMachine; - /// Calculate the main commitment for a given record. - fn commit(&self, record: &A::Record) -> Com; + /// Setup the preprocessed data into a proving and verifying key. + fn setup(&self, program: &A::Program) -> (StarkProvingKey, StarkVerifyingKey) { + self.machine().setup(program) + } + + /// Generate the main traces. + fn generate_traces(&self, record: &A::Record) -> Vec<(String, RowMajorMatrix>)> { + // Filter the chips based on what is used. + let shard_chips = self.shard_chips(record).collect::>(); + + // For each chip, generate the trace. + let parent_span = tracing::debug_span!("generate traces for shard"); + parent_span.in_scope(|| { + shard_chips + .par_iter() + .map(|chip| { + let chip_name = chip.name(); + let trace = tracing::debug_span!(parent: &parent_span, "generate trace for chip", %chip_name) + .in_scope(|| chip.generate_trace(record, &mut A::Record::default())); + (chip_name, trace) + }) + .collect::>() + }) + } - /// Commit and generate a proof for a given record, using the given challenger. - fn commit_and_open( + /// Commit to the main traces. + fn commit( &self, - pk: &StarkProvingKey, record: A::Record, + traces: Vec<(String, RowMajorMatrix>)>, + ) -> ShardMainData; + + /// Observe the main commitment and public values and update the challenger. + fn observe( + &self, + challenger: &mut SC::Challenger, + commitment: Com, + public_values: &[SC::Val], + ) { + // Observe the commitment. + challenger.observe(commitment); + + // Observe the public values. + challenger.observe_slice(public_values); + } + + /// Compute the openings of the traces. + fn open( + &self, + pk: &StarkProvingKey, + data: ShardMainData, challenger: &mut SC::Challenger, ) -> Result, Self::Error>; @@ -68,38 +119,24 @@ pub trait MachineProver>: self.machine().config() } + /// The number of public values elements. fn num_pv_elts(&self) -> usize { self.machine().num_pv_elts() } + /// The chips that will be necessary to prove this record. fn shard_chips<'a, 'b>( &'a self, - shard: &'b A::Record, + record: &'b A::Record, ) -> impl Iterator> where 'a: 'b, SC: 'b, { - self.machine().shard_chips(shard) - } - - fn setup(&self, program: &A::Program) -> (StarkProvingKey, StarkVerifyingKey) { - self.machine().setup(program) - } - - /// Update the challenger with the given shard data - fn update( - &self, - challenger: &mut SC::Challenger, - commitment: Com, - public_values: &[SC::Val], - ) { - // Observe the commitment. - challenger.observe(commitment); - // Observe the public values. - challenger.observe_slice(public_values); + self.machine().shard_chips(record) } + /// Debug the constraints for the given inputs. fn debug_constraints( &self, pk: &StarkProvingKey, @@ -113,33 +150,14 @@ pub trait MachineProver>: } } -#[allow(dead_code)] -pub fn chunk_vec(mut vec: Vec, chunk_size: usize) -> Vec> { - let mut result = Vec::new(); - while !vec.is_empty() { - let current_chunk_size = std::cmp::min(chunk_size, vec.len()); - let current_chunk = vec.drain(..current_chunk_size).collect::>(); - result.push(current_chunk); - } - result -} - -pub struct DefaultProver { +pub struct CpuProver { machine: StarkMachine, } #[derive(Debug, Clone, Copy)] -pub struct DefaultProverError; +pub struct CpuProverError; -impl std::fmt::Display for DefaultProverError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "DefaultProverError") - } -} - -impl Error for DefaultProverError {} - -impl MachineProver for DefaultProver +impl MachineProver for CpuProver where SC: 'static + StarkGenericConfig + Send + Sync, A: MachineAir @@ -149,12 +167,13 @@ where A::Record: MachineRecord, SC::Val: PrimeField32, Com: Send + Sync, - PcsProverData: Send + Sync, + PcsProverData: Send + Sync + Serialize + DeserializeOwned, OpeningProof: Send + Sync, - ShardMainData: Serialize + DeserializeOwned, SC::Challenger: Clone, { - type Error = DefaultProverError; + type DeviceMatrix = RowMajorMatrix>; + type DeviceProverData = PcsProverData; + type Error = CpuProverError; fn new(machine: StarkMachine) -> Self { Self { machine } @@ -164,111 +183,11 @@ where &self.machine } - fn commit(&self, record: &A::Record) -> Com { - self.commit_main(record).main_commit - } - - /// Prove the execution record is valid. - /// - /// Given a proving key `pk` and a matching execution record `record`, this function generates - /// a STARK proof that the execution record is valid. - fn prove( - &self, - pk: &StarkProvingKey, - mut records: Vec, - challenger: &mut SC::Challenger, - opts: ::Config, - ) -> Result, Self::Error> - where - A: for<'a> Air, SC::Challenge>>, - { - let chips = self.machine().chips(); - records.iter_mut().for_each(|record| { - chips.iter().for_each(|chip| { - let mut output = A::Record::default(); - chip.generate_dependencies(record, &mut output); - record.append(&mut output); - }); - record.register_nonces(&opts); - }); - - // Observe the preprocessed commitment. - pk.observe_into(challenger); - - // Generate and commit the traces for each shard. - let shard_data = records - .into_par_iter() - .map(|record| self.commit_main(&record)) - .collect::>(); - - // Observe the challenges for each segment. - tracing::debug_span!("observing all challenges").in_scope(|| { - shard_data.iter().for_each(|data| { - challenger.observe(data.main_commit.clone()); - challenger.observe_slice(&data.public_values[0..self.num_pv_elts()]); - }); - }); - - let shard_proofs = tracing::info_span!("prove_shards").in_scope(|| { - shard_data - .into_par_iter() - .map(|data| self.prove_shard(pk, data, &mut challenger.clone())) - .collect::, _>>() - })?; - - Ok(MachineProof { shard_proofs }) - } - - /// Prove the program for the given shard and given a commitment to the main data. - fn commit_and_open( + fn commit( &self, - pk: &StarkProvingKey, record: A::Record, - challenger: &mut ::Challenger, - ) -> Result, Self::Error> { - let shard_data = self.commit_main(&record); - self.prove_shard(pk, shard_data, challenger) - } -} - -impl DefaultProver -where - SC: 'static + StarkGenericConfig + Send + Sync, - A: MachineAir - + for<'a> Air> - + Air>> - + for<'a> Air>, - A::Record: MachineRecord, - SC::Val: PrimeField32, - Com: Send + Sync, - PcsProverData: Send + Sync, - OpeningProof: Send + Sync, - ShardMainData: Serialize + DeserializeOwned, - SC::Challenger: Clone, -{ - fn commit_main(&self, shard: &A::Record) -> ShardMainData { - // Filter the chips based on what is used. - let shard_chips = self.shard_chips(shard).collect::>(); - - // For each chip, generate the trace. - let parent_span = tracing::debug_span!("generate traces for shard"); - let mut named_traces = parent_span.in_scope(|| { - shard_chips - .par_iter() - .map(|chip| { - let chip_name = chip.name(); - - // We need to create an outer span here because, for some reason, - // the #[instrument] macro on the chip impl isn't attaching its span to `parent_span` - // to avoid the unnecessary span, remove the #[instrument] macro. - let trace = - tracing::debug_span!(parent: &parent_span, "generate trace for chip", %chip_name) - .in_scope(|| chip.generate_trace(shard, &mut A::Record::default())); - (chip_name, trace) - }) - .collect::>() - }); - + mut named_traces: Vec<(String, RowMajorMatrix>)>, + ) -> ShardMainData { // Order the chips and traces by trace size (biggest first), and get the ordering map. named_traces.sort_by_key(|(_, trace)| Reverse(trace.height())); @@ -302,23 +221,24 @@ where main_commit, main_data, chip_ordering, - public_values: shard.public_values(), + public_values: record.public_values(), } } - fn prove_shard( + /// Prove the program for the given shard and given a commitment to the main data. + fn open( &self, pk: &StarkProvingKey, - mut shard_data: ShardMainData, - challenger: &mut SC::Challenger, - ) -> Result, DefaultProverError> { + mut data: ShardMainData, + challenger: &mut ::Challenger, + ) -> Result, Self::Error> { let chips = self .machine() - .shard_chips_ordered(&shard_data.chip_ordering) + .shard_chips_ordered(&data.chip_ordering) .collect::>(); let config = self.machine().config(); // Get the traces. - let traces = &mut shard_data.traces; + let traces = &mut data.traces; let degrees = traces .iter() @@ -450,11 +370,7 @@ where ]) }); let main_trace_on_quotient_domains = pcs - .get_evaluations_on_domain( - &shard_data.main_data, - i, - *quotient_domain, - ) + .get_evaluations_on_domain(&data.main_data, i, *quotient_domain) .to_row_major_matrix(); let permutation_trace_on_quotient_domains = pcs .get_evaluations_on_domain(&permutation_data, i, *quotient_domain) @@ -469,7 +385,7 @@ where permutation_trace_on_quotient_domains, &packed_perm_challenges, alpha, - &shard_data.public_values, + &data.public_values, ) }) }) @@ -537,7 +453,7 @@ where pcs.open( vec![ (&pk.data, preprocessed_opening_points), - (&shard_data.main_data, trace_opening_points.clone()), + (&data.main_data, trace_opening_points.clone()), (&permutation_data, trace_opening_points), ("ient_data, quotient_opening_points), ], @@ -609,7 +525,7 @@ where Ok(ShardProof:: { commitment: ShardCommitment { - main_commit: shard_data.main_commit.clone(), + main_commit: data.main_commit.clone(), permutation_commit, quotient_commit, }, @@ -617,8 +533,63 @@ where chips: opened_values, }, opening_proof, - chip_ordering: shard_data.chip_ordering, - public_values: shard_data.public_values, + chip_ordering: data.chip_ordering, + public_values: data.public_values, }) } + + /// Prove the execution record is valid. + /// + /// Given a proving key `pk` and a matching execution record `record`, this function generates + /// a STARK proof that the execution record is valid. + fn prove( + &self, + pk: &StarkProvingKey, + mut records: Vec, + challenger: &mut SC::Challenger, + opts: ::Config, + ) -> Result, Self::Error> + where + A: for<'a> Air, SC::Challenge>>, + { + // Generate dependencies. + self.machine().generate_dependencies(&mut records, &opts); + + // Observe the preprocessed commitment. + pk.observe_into(challenger); + + // Generate and commit the traces for each shard. + let shard_data = records + .into_par_iter() + .map(|record| { + let named_traces = self.generate_traces(&record); + self.commit(record, named_traces) + }) + .collect::>(); + + // Observe the challenges for each segment. + tracing::debug_span!("observing all challenges").in_scope(|| { + shard_data.iter().for_each(|data| { + challenger.observe(data.main_commit.clone()); + challenger.observe_slice(&data.public_values[0..self.num_pv_elts()]); + }); + }); + + let shard_proofs = tracing::info_span!("prove_shards").in_scope(|| { + shard_data + .into_par_iter() + .map(|data| self.open(pk, data, &mut challenger.clone())) + .collect::, _>>() + })?; + + Ok(MachineProof { shard_proofs }) + } +} + +impl Display for CpuProverError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "DefaultProverError") + } } + +impl Error for CpuProverError {} diff --git a/core/src/stark/types.rs b/core/src/stark/types.rs index f0b6fb49b9..2b52572a66 100644 --- a/core/src/stark/types.rs +++ b/core/src/stark/types.rs @@ -1,31 +1,27 @@ use std::fmt::Debug; use hashbrown::HashMap; -use p3_matrix::dense::RowMajorMatrix; use p3_matrix::dense::RowMajorMatrixView; use p3_matrix::stack::VerticalPair; use serde::{Deserialize, Serialize}; -use super::{Challenge, Com, OpeningProof, PcsProverData, StarkGenericConfig, Val}; +use super::{Challenge, Com, OpeningProof, StarkGenericConfig, Val}; pub type QuotientOpenedValues = Vec; -#[derive(Serialize, Deserialize)] -#[serde(bound(serialize = "PcsProverData: Serialize"))] -#[serde(bound(deserialize = "PcsProverData: Deserialize<'de>"))] -pub struct ShardMainData { - pub traces: Vec>>, +pub struct ShardMainData { + pub traces: Vec, pub main_commit: Com, - pub main_data: PcsProverData, + pub main_data: P, pub chip_ordering: HashMap, pub public_values: Vec, } -impl ShardMainData { +impl ShardMainData { pub const fn new( - traces: Vec>>, + traces: Vec, main_commit: Com, - main_data: PcsProverData, + main_data: P, chip_ordering: HashMap, public_values: Vec>, ) -> Self { diff --git a/core/src/syscall/hint.rs b/core/src/syscall/hint.rs index d0fb51d8be..27d89b396b 100644 --- a/core/src/syscall/hint.rs +++ b/core/src/syscall/hint.rs @@ -83,7 +83,7 @@ mod tests { use crate::{ io::SP1Stdin, runtime::Program, - stark::DefaultProver, + stark::CpuProver, utils::{prove, setup_logger, BabyBearPoseidon2, SP1CoreOpts}, }; @@ -105,6 +105,6 @@ mod tests { let program = Program::from(HINT_IO_ELF); let config = BabyBearPoseidon2::new(); - prove::<_, DefaultProver<_, _>>(program, &stdin, config, SP1CoreOpts::default()).unwrap(); + prove::<_, CpuProver<_, _>>(program, &stdin, config, SP1CoreOpts::default()).unwrap(); } } diff --git a/core/src/syscall/precompiles/edwards/ed_add.rs b/core/src/syscall/precompiles/edwards/ed_add.rs index a51956a388..34e4746cd2 100644 --- a/core/src/syscall/precompiles/edwards/ed_add.rs +++ b/core/src/syscall/precompiles/edwards/ed_add.rs @@ -89,7 +89,7 @@ impl EdAddAssignChip { fn populate_field_ops( record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, cols: &mut EdAddAssignCols, p_x: BigUint, p_y: BigUint, @@ -252,7 +252,7 @@ impl EdAddAssignChip { // Populate basic columns. cols.is_real = F::one(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.p_ptr = F::from_canonical_u32(event.p_ptr); cols.q_ptr = F::from_canonical_u32(event.q_ptr); @@ -431,7 +431,7 @@ where #[cfg(test)] mod tests { - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::utils; use crate::utils::tests::{ED25519_ELF, ED_ADD_ELF}; use crate::Program; @@ -440,13 +440,13 @@ mod tests { fn test_ed_add_simple() { utils::setup_logger(); let program = Program::from(ED_ADD_ELF); - utils::run_test::>(program).unwrap(); + utils::run_test::>(program).unwrap(); } #[test] fn test_ed25519_program() { utils::setup_logger(); let program = Program::from(ED25519_ELF); - utils::run_test::>(program).unwrap(); + utils::run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/precompiles/edwards/ed_decompress.rs b/core/src/syscall/precompiles/edwards/ed_decompress.rs index 2fdd7071d5..b708fa8126 100644 --- a/core/src/syscall/precompiles/edwards/ed_decompress.rs +++ b/core/src/syscall/precompiles/edwards/ed_decompress.rs @@ -55,7 +55,7 @@ use super::{WordsFieldElement, WORDS_FIELD_ELEMENT}; pub struct EdDecompressEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub ptr: u32, pub sign: bool, @@ -103,7 +103,7 @@ impl EdDecompressCols { let mut new_byte_lookup_events = Vec::new(); self.is_real = F::from_bool(true); self.shard = F::from_canonical_u32(event.shard); - self.channel = F::from_canonical_u32(event.channel); + self.channel = F::from_canonical_u8(event.channel); self.clk = F::from_canonical_u32(event.clk); self.ptr = F::from_canonical_u32(event.ptr); self.nonce = F::from_canonical_u32( @@ -137,7 +137,7 @@ impl EdDecompressCols { &mut self, blu_events: &mut Vec, shard: u32, - channel: u32, + channel: u8, y: &BigUint, ) { let one = BigUint::one(); @@ -462,7 +462,7 @@ where pub mod tests { use crate::{ runtime::Program, - stark::DefaultProver, + stark::CpuProver, utils::{self, tests::ED_DECOMPRESS_ELF}, }; @@ -470,6 +470,6 @@ pub mod tests { fn test_ed_decompress() { utils::setup_logger(); let program = Program::from(ED_DECOMPRESS_ELF); - utils::run_test::>(program).unwrap(); + utils::run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/precompiles/keccak256/air.rs b/core/src/syscall/precompiles/keccak256/air.rs index 3d4acbdf95..2e70c20d0a 100644 --- a/core/src/syscall/precompiles/keccak256/air.rs +++ b/core/src/syscall/precompiles/keccak256/air.rs @@ -153,7 +153,7 @@ where mod test { use crate::io::{SP1PublicValues, SP1Stdin}; use crate::runtime::Program; - use crate::stark::{DefaultProver, RiscvAir, StarkGenericConfig}; + use crate::stark::{CpuProver, RiscvAir, StarkGenericConfig}; use crate::utils::SP1CoreOpts; use crate::utils::{prove, setup_logger, tests::KECCAK256_ELF, BabyBearPoseidon2}; @@ -191,8 +191,7 @@ mod test { let program = Program::from(KECCAK256_ELF); let (proof, public_values, _) = - prove::<_, DefaultProver<_, _>>(program, &stdin, config, SP1CoreOpts::default()) - .unwrap(); + prove::<_, CpuProver<_, _>>(program, &stdin, config, SP1CoreOpts::default()).unwrap(); let mut public_values = SP1PublicValues::from(&public_values); let config = BabyBearPoseidon2::new(); diff --git a/core/src/syscall/precompiles/keccak256/mod.rs b/core/src/syscall/precompiles/keccak256/mod.rs index d5e1d01190..5be4e52797 100644 --- a/core/src/syscall/precompiles/keccak256/mod.rs +++ b/core/src/syscall/precompiles/keccak256/mod.rs @@ -17,7 +17,7 @@ const STATE_NUM_WORDS: usize = STATE_SIZE * 2; pub struct KeccakPermuteEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub pre_state: [u64; STATE_SIZE], pub post_state: [u64; STATE_SIZE], @@ -41,7 +41,7 @@ impl KeccakPermuteChip { #[cfg(test)] pub mod permute_tests { use crate::runtime::SyscallCode; - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::utils::{run_test, SP1CoreOpts}; use crate::{ runtime::{Instruction, Opcode, Program, Runtime}, @@ -86,13 +86,13 @@ pub mod permute_tests { utils::setup_logger(); let program = keccak_permute_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_keccak_permute_program_prove() { utils::setup_logger(); let program = Program::from(KECCAK_PERMUTE_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/precompiles/keccak256/trace.rs b/core/src/syscall/precompiles/keccak256/trace.rs index 9ca3839535..3b9b973b92 100644 --- a/core/src/syscall/precompiles/keccak256/trace.rs +++ b/core/src/syscall/precompiles/keccak256/trace.rs @@ -69,7 +69,7 @@ impl MachineAir for KeccakPermuteChip { let cols: &mut KeccakMemCols = row.as_mut_slice().borrow_mut(); cols.shard = F::from_canonical_u32(shard); - cols.channel = F::from_canonical_u32(channel); + cols.channel = F::from_canonical_u8(channel); cols.clk = F::from_canonical_u32(start_clk); cols.state_addr = F::from_canonical_u32(event.state_addr); cols.is_real = F::one(); diff --git a/core/src/syscall/precompiles/mod.rs b/core/src/syscall/precompiles/mod.rs index 43aab6f7fc..aaf751fd2d 100644 --- a/core/src/syscall/precompiles/mod.rs +++ b/core/src/syscall/precompiles/mod.rs @@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize}; pub struct ECAddEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub p_ptr: u32, pub p: Vec, @@ -86,7 +86,7 @@ pub fn create_ec_add_event( pub struct ECDoubleEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub p_ptr: u32, pub p: Vec, @@ -136,7 +136,7 @@ pub fn create_ec_double_event( pub struct ECDecompressEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub ptr: u32, pub sign_bit: bool, diff --git a/core/src/syscall/precompiles/sha256/compress/mod.rs b/core/src/syscall/precompiles/sha256/compress/mod.rs index dcf3749b3a..5dbf5fdd29 100644 --- a/core/src/syscall/precompiles/sha256/compress/mod.rs +++ b/core/src/syscall/precompiles/sha256/compress/mod.rs @@ -22,7 +22,7 @@ pub const SHA_COMPRESS_K: [u32; 64] = [ pub struct ShaCompressEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub w_ptr: u32, pub h_ptr: u32, @@ -54,7 +54,7 @@ pub mod compress_tests { use crate::{ runtime::{Instruction, Opcode, Program, SyscallCode}, - stark::DefaultProver, + stark::CpuProver, utils::{run_test, setup_logger, tests::SHA_COMPRESS_ELF}, }; @@ -94,13 +94,13 @@ pub mod compress_tests { fn prove_babybear() { setup_logger(); let program = sha_compress_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_sha_compress_program() { setup_logger(); let program = Program::from(SHA_COMPRESS_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/precompiles/sha256/compress/trace.rs b/core/src/syscall/precompiles/sha256/compress/trace.rs index 9a1eaad76b..d0f4a4da27 100644 --- a/core/src/syscall/precompiles/sha256/compress/trace.rs +++ b/core/src/syscall/precompiles/sha256/compress/trace.rs @@ -128,7 +128,7 @@ impl ShaCompressChip { let cols: &mut ShaCompressCols = row.as_mut_slice().borrow_mut(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.w_ptr = F::from_canonical_u32(event.w_ptr); cols.h_ptr = F::from_canonical_u32(event.h_ptr); @@ -172,7 +172,7 @@ impl ShaCompressChip { cols.octet_num[octet_num_idx] = F::one(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.w_ptr = F::from_canonical_u32(event.w_ptr); cols.h_ptr = F::from_canonical_u32(event.h_ptr); @@ -280,7 +280,7 @@ impl ShaCompressChip { let cols: &mut ShaCompressCols = row.as_mut_slice().borrow_mut(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.w_ptr = F::from_canonical_u32(event.w_ptr); cols.h_ptr = F::from_canonical_u32(event.h_ptr); diff --git a/core/src/syscall/precompiles/sha256/extend/mod.rs b/core/src/syscall/precompiles/sha256/extend/mod.rs index a28da0045c..b3c365b256 100644 --- a/core/src/syscall/precompiles/sha256/extend/mod.rs +++ b/core/src/syscall/precompiles/sha256/extend/mod.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; pub struct ShaExtendEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub w_ptr: u32, pub w_i_minus_15_reads: Vec, @@ -56,7 +56,7 @@ pub mod extend_tests { air::MachineAir, alu::AluEvent, runtime::{ExecutionRecord, Instruction, Opcode, Program, SyscallCode}, - stark::DefaultProver, + stark::CpuProver, utils::{ self, run_test, tests::{SHA2_ELF, SHA_EXTEND_ELF}, @@ -104,20 +104,20 @@ pub mod extend_tests { fn test_sha_prove() { utils::setup_logger(); let program = sha_extend_program(); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_sha256_program() { utils::setup_logger(); let program = Program::from(SHA2_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_sha_extend_program() { utils::setup_logger(); let program = Program::from(SHA_EXTEND_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/precompiles/sha256/extend/trace.rs b/core/src/syscall/precompiles/sha256/extend/trace.rs index d3784e7355..d43f46ffbf 100644 --- a/core/src/syscall/precompiles/sha256/extend/trace.rs +++ b/core/src/syscall/precompiles/sha256/extend/trace.rs @@ -106,7 +106,7 @@ impl ShaExtendChip { cols.is_real = F::one(); cols.populate_flags(j); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.w_ptr = F::from_canonical_u32(event.w_ptr); diff --git a/core/src/syscall/precompiles/uint256/air.rs b/core/src/syscall/precompiles/uint256/air.rs index 9e723646f6..25477692c8 100644 --- a/core/src/syscall/precompiles/uint256/air.rs +++ b/core/src/syscall/precompiles/uint256/air.rs @@ -36,7 +36,7 @@ const NUM_COLS: usize = size_of::>(); pub struct Uint256MulEvent { pub lookup_id: u128, pub shard: u32, - pub channel: u32, + pub channel: u8, pub clk: u32, pub x_ptr: u32, pub x: Vec, @@ -138,7 +138,7 @@ impl MachineAir for Uint256MulChip { // Assign basic values to the columns. cols.is_real = F::one(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.x_ptr = F::from_canonical_u32(event.x_ptr); cols.y_ptr = F::from_canonical_u32(event.y_ptr); diff --git a/core/src/syscall/precompiles/uint256/mod.rs b/core/src/syscall/precompiles/uint256/mod.rs index e7b6b49213..69c740ef02 100644 --- a/core/src/syscall/precompiles/uint256/mod.rs +++ b/core/src/syscall/precompiles/uint256/mod.rs @@ -6,7 +6,7 @@ pub use air::*; mod tests { use crate::operations::field::params::FieldParameters; - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::{ io::SP1Stdin, runtime::Program, @@ -22,7 +22,7 @@ mod tests { fn test_uint256_mul() { utils::setup_logger(); let program = Program::from(UINT256_MUL_ELF); - run_test_io::>(program, SP1Stdin::new()).unwrap(); + run_test_io::>(program, SP1Stdin::new()).unwrap(); } #[test] diff --git a/core/src/syscall/precompiles/weierstrass/weierstrass_add.rs b/core/src/syscall/precompiles/weierstrass/weierstrass_add.rs index 7a25bd57e6..7574912035 100644 --- a/core/src/syscall/precompiles/weierstrass/weierstrass_add.rs +++ b/core/src/syscall/precompiles/weierstrass/weierstrass_add.rs @@ -102,7 +102,7 @@ impl WeierstrassAddAssignChip { fn populate_field_ops( blu_events: &mut Vec, shard: u32, - channel: u32, + channel: u8, cols: &mut WeierstrassAddAssignCols, p_x: BigUint, p_y: BigUint, @@ -248,7 +248,7 @@ impl MachineAir // Populate basic columns. cols.is_real = F::one(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.p_ptr = F::from_canonical_u32(event.p_ptr); cols.q_ptr = F::from_canonical_u32(event.q_ptr); @@ -524,7 +524,7 @@ mod tests { use crate::{ runtime::Program, - stark::DefaultProver, + stark::CpuProver, utils::{ run_test, setup_logger, tests::{ @@ -538,48 +538,48 @@ mod tests { fn test_secp256k1_add_simple() { setup_logger(); let program = Program::from(SECP256K1_ADD_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bn254_add_simple() { setup_logger(); let program = Program::from(BN254_ADD_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bn254_mul_simple() { setup_logger(); let program = Program::from(BN254_MUL_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_secp256k1_mul_simple() { setup_logger(); let program = Program::from(SECP256K1_MUL_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bls12381_add_simple() { setup_logger(); let program = Program::from(BLS12381_ADD_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bls12381_double_simple() { setup_logger(); let program = Program::from(BLS12381_DOUBLE_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bls12381_mul_simple() { setup_logger(); let program = Program::from(BLS12381_MUL_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/precompiles/weierstrass/weierstrass_decompress.rs b/core/src/syscall/precompiles/weierstrass/weierstrass_decompress.rs index 495bf9781c..85c4a5f89d 100644 --- a/core/src/syscall/precompiles/weierstrass/weierstrass_decompress.rs +++ b/core/src/syscall/precompiles/weierstrass/weierstrass_decompress.rs @@ -137,7 +137,7 @@ impl WeierstrassDecompressChip { fn populate_field_ops( record: &mut impl ByteRecord, shard: u32, - channel: u32, + channel: u8, cols: &mut WeierstrassDecompressCols, x: BigUint, ) { @@ -216,8 +216,8 @@ impl MachineAir cols.is_real = F::from_bool(true); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.ptr = F::from_canonical_u32(event.ptr); cols.sign_bit = F::from_bool(event.sign_bit); @@ -611,7 +611,7 @@ where #[cfg(test)] mod tests { use crate::io::SP1Stdin; - use crate::stark::DefaultProver; + use crate::stark::CpuProver; use crate::utils::{self, tests::BLS12381_DECOMPRESS_ELF}; use crate::Program; use amcl::bls381::bls381::basic::key_pair_generate_g2; @@ -639,7 +639,7 @@ mod tests { let stdin = SP1Stdin::from(&compressed); let mut public_values = - run_test_io::>(Program::from(BLS12381_DECOMPRESS_ELF), stdin) + run_test_io::>(Program::from(BLS12381_DECOMPRESS_ELF), stdin) .unwrap(); let mut result = [0; 96]; @@ -671,7 +671,7 @@ mod tests { let inputs = SP1Stdin::from(&compressed); let mut public_values = - run_test_io::>(Program::from(SECP256K1_DECOMPRESS_ELF), inputs) + run_test_io::>(Program::from(SECP256K1_DECOMPRESS_ELF), inputs) .unwrap(); let mut result = [0; 65]; public_values.read_slice(&mut result); diff --git a/core/src/syscall/precompiles/weierstrass/weierstrass_double.rs b/core/src/syscall/precompiles/weierstrass/weierstrass_double.rs index 4a480f6c94..8e40405433 100644 --- a/core/src/syscall/precompiles/weierstrass/weierstrass_double.rs +++ b/core/src/syscall/precompiles/weierstrass/weierstrass_double.rs @@ -102,7 +102,7 @@ impl WeierstrassDoubleAssignChip { fn populate_field_ops( blu_events: &mut Vec, shard: u32, - channel: u32, + channel: u8, cols: &mut WeierstrassDoubleAssignCols, p_x: BigUint, p_y: BigUint, @@ -272,7 +272,7 @@ impl MachineAir // Populate basic columns. cols.is_real = F::one(); cols.shard = F::from_canonical_u32(event.shard); - cols.channel = F::from_canonical_u32(event.channel); + cols.channel = F::from_canonical_u8(event.channel); cols.clk = F::from_canonical_u32(event.clk); cols.p_ptr = F::from_canonical_u32(event.p_ptr); @@ -549,7 +549,7 @@ pub mod tests { use crate::{ runtime::Program, - stark::DefaultProver, + stark::CpuProver, utils::{ run_test, setup_logger, tests::{BLS12381_DOUBLE_ELF, BN254_DOUBLE_ELF, SECP256K1_DOUBLE_ELF}, @@ -560,20 +560,20 @@ pub mod tests { fn test_secp256k1_double_simple() { setup_logger(); let program = Program::from(SECP256K1_DOUBLE_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bn254_double_simple() { setup_logger(); let program = Program::from(BN254_DOUBLE_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } #[test] fn test_bls12381_double_simple() { setup_logger(); let program = Program::from(BLS12381_DOUBLE_ELF); - run_test::>(program).unwrap(); + run_test::>(program).unwrap(); } } diff --git a/core/src/syscall/write.rs b/core/src/syscall/write.rs index fc159c0719..8e9d389cbd 100644 --- a/core/src/syscall/write.rs +++ b/core/src/syscall/write.rs @@ -36,7 +36,7 @@ impl Syscall for SyscallWrite { rt.cycle_tracker .insert(fn_name.to_string(), (rt.state.global_clk, depth)); let padding = (0..depth).map(|_| "│ ").collect::(); - log::debug!("{}┌╴{}", padding, fn_name); + log::info!("{}┌╴{}", padding, fn_name); } else if s.contains("cycle-tracker-end:") { let fn_name = s .split("cycle-tracker-end:") diff --git a/core/src/utils/concurrency.rs b/core/src/utils/concurrency.rs new file mode 100644 index 0000000000..b5dec9024e --- /dev/null +++ b/core/src/utils/concurrency.rs @@ -0,0 +1,32 @@ +use std::sync::{Condvar, Mutex}; + +/// A turn-based synchronization primitive. +pub struct TurnBasedSync { + pub current_turn: Mutex, + pub cv: Condvar, +} + +impl TurnBasedSync { + /// Creates a new [TurnBasedSync]. + pub fn new() -> Self { + TurnBasedSync { + current_turn: Mutex::new(0), + cv: Condvar::new(), + } + } + + /// Waits for the current turn to be equal to the given turn. + pub fn wait_for_turn(&self, my_turn: usize) { + let mut turn = self.current_turn.lock().unwrap(); + while *turn != my_turn { + turn = self.cv.wait(turn).unwrap(); + } + } + + /// Advances the current turn. + pub fn advance_turn(&self) { + let mut turn = self.current_turn.lock().unwrap(); + *turn += 1; + self.cv.notify_all(); + } +} diff --git a/core/src/utils/mod.rs b/core/src/utils/mod.rs index 28c40f54f9..f2a7f53f75 100644 --- a/core/src/utils/mod.rs +++ b/core/src/utils/mod.rs @@ -1,4 +1,5 @@ mod buffer; +pub mod concurrency; mod config; pub mod ec; mod logger; diff --git a/core/src/utils/options.rs b/core/src/utils/options.rs index c092dac136..f0a157bcea 100644 --- a/core/src/utils/options.rs +++ b/core/src/utils/options.rs @@ -1,13 +1,16 @@ use std::env; +use serde::{Deserialize, Serialize}; + use crate::runtime::{SplitOpts, DEFERRED_SPLIT_THRESHOLD}; const DEFAULT_SHARD_SIZE: usize = 1 << 22; const DEFAULT_SHARD_BATCH_SIZE: usize = 16; -const DEFAULT_COMMIT_STREAM_CAPACITY: usize = 1; -const DEFAULT_PROVE_STREAM_CAPACITY: usize = 1; +const DEFAULT_TRACE_GEN_WORKERS: usize = 1; +const DEFAULT_CHECKPOINTS_CHANNEL_CAPACITY: usize = 128; +const DEFAULT_RECORDS_AND_TRACES_CHANNEL_CAPACITY: usize = 1; -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub struct SP1ProverOpts { pub core_opts: SP1CoreOpts, pub recursion_opts: SP1CoreOpts, @@ -22,14 +25,15 @@ impl Default for SP1ProverOpts { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub struct SP1CoreOpts { pub shard_size: usize, pub shard_batch_size: usize, - pub commit_stream_capacity: usize, - pub prove_stream_capacity: usize, pub split_opts: SplitOpts, pub reconstruct_commitments: bool, + pub trace_gen_workers: usize, + pub checkpoints_channel_capacity: usize, + pub records_and_traces_channel_capacity: usize, } impl Default for SP1CoreOpts { @@ -46,16 +50,27 @@ impl Default for SP1CoreOpts { |_| DEFAULT_SHARD_BATCH_SIZE, |s| s.parse::().unwrap_or(DEFAULT_SHARD_BATCH_SIZE), ), - commit_stream_capacity: env::var("COMMIT_STREAM_CAPACITY").map_or_else( - |_| DEFAULT_COMMIT_STREAM_CAPACITY, - |s| s.parse::().unwrap_or(DEFAULT_COMMIT_STREAM_CAPACITY), - ), - prove_stream_capacity: env::var("PROVE_STREAM_CAPACITY").map_or_else( - |_| DEFAULT_PROVE_STREAM_CAPACITY, - |s| s.parse::().unwrap_or(DEFAULT_PROVE_STREAM_CAPACITY), - ), split_opts: SplitOpts::new(split_threshold), reconstruct_commitments: true, + trace_gen_workers: env::var("TRACE_GEN_WORKERS").map_or_else( + |_| DEFAULT_TRACE_GEN_WORKERS, + |s| s.parse::().unwrap_or(DEFAULT_TRACE_GEN_WORKERS), + ), + checkpoints_channel_capacity: env::var("CHECKPOINTS_CHANNEL_CAPACITY").map_or_else( + |_| DEFAULT_CHECKPOINTS_CHANNEL_CAPACITY, + |s| { + s.parse::() + .unwrap_or(DEFAULT_CHECKPOINTS_CHANNEL_CAPACITY) + }, + ), + records_and_traces_channel_capacity: env::var("RECORDS_AND_TRACES_CHANNEL_CAPACITY") + .map_or_else( + |_| DEFAULT_RECORDS_AND_TRACES_CHANNEL_CAPACITY, + |s| { + s.parse::() + .unwrap_or(DEFAULT_RECORDS_AND_TRACES_CHANNEL_CAPACITY) + }, + ), } } } diff --git a/core/src/utils/prove.rs b/core/src/utils/prove.rs index e06856e262..dab7a50d5b 100644 --- a/core/src/utils/prove.rs +++ b/core/src/utils/prove.rs @@ -1,22 +1,25 @@ +use std::collections::VecDeque; use std::fs::File; use std::io::Seek; use std::io::{self}; use std::sync::mpsc::sync_channel; use std::sync::Arc; +use std::sync::Mutex; use web_time::Instant; +use p3_challenger::CanObserve; use p3_maybe_rayon::prelude::*; - use serde::de::DeserializeOwned; use serde::Serialize; use size::Size; +use std::thread::ScopedJoinHandle; use thiserror::Error; pub use baby_bear_blake3::BabyBearBlake3; use p3_baby_bear::BabyBear; use p3_field::PrimeField32; -use crate::air::MachineAir; +use crate::air::{MachineAir, PublicValues}; use crate::io::{SP1PublicValues, SP1Stdin}; use crate::lookup::InteractionBuilder; use crate::runtime::{ExecutionError, NoOpSubproofVerifier, SP1Context}; @@ -30,11 +33,14 @@ use crate::stark::Val; use crate::stark::VerifierConstraintFolder; use crate::stark::{Com, PcsProverData, RiscvAir, StarkProvingKey, UniConfig}; use crate::stark::{MachineRecord, StarkMachine}; -use crate::utils::{sorted_table_lines, SP1CoreOpts}; +use crate::utils::chunk_vec; +use crate::utils::concurrency::TurnBasedSync; +use crate::utils::sorted_table_lines; +use crate::utils::SP1CoreOpts; use crate::{ runtime::{Program, Runtime}, stark::StarkGenericConfig, - stark::{DefaultProver, OpeningProof, ShardMainData}, + stark::{CpuProver, OpeningProof}, }; const LOG_DEGREE_BOUND: usize = 31; @@ -58,7 +64,7 @@ where OpeningProof: Send + Sync, Com: Send + Sync, PcsProverData: Send + Sync, - ShardMainData: Serialize + DeserializeOwned, + // ShardMainData: Serialize + DeserializeOwned, ::Val: PrimeField32, { // Setup the machine. @@ -116,11 +122,13 @@ where { let machine = RiscvAir::machine(config); let prover = P::new(machine); - prove_with_context::(&prover, program, stdin, opts, Default::default()) + let (pk, _) = prover.setup(&program); + prove_with_context::(&prover, &pk, program, stdin, opts, Default::default()) } pub fn prove_with_context>>( prover: &P, + pk: &StarkProvingKey, program: Program, stdin: &SP1Stdin, opts: SP1CoreOpts, @@ -133,241 +141,410 @@ where Com: Send + Sync, PcsProverData: Send + Sync, { - // Record the start of the process. - let proving_start = Instant::now(); - - // Execute the program. + // Setup the runtime. let mut runtime = Runtime::with_context(program.clone(), opts, context); runtime.write_vecs(&stdin.buffer); for proof in stdin.proofs.iter() { runtime.write_proof(proof.0.clone(), proof.1.clone()); } - // Setup the machine. - let (pk, vk) = prover.setup(runtime.program.as_ref()); - - // Execute the program, saving checkpoints at the start of every `shard_batch_size` cycle range. - let create_checkpoints_span = tracing::debug_span!("create checkpoints").entered(); - let mut checkpoints = Vec::new(); - let (public_values_stream, public_values) = loop { - // Execute the runtime until we reach a checkpoint. - let (checkpoint, done) = runtime - .execute_state() - .map_err(SP1CoreProverError::ExecutionError)?; - - // Save the checkpoint to a temp file. - let mut checkpoint_file = tempfile::tempfile().map_err(SP1CoreProverError::IoError)?; - checkpoint - .save(&mut checkpoint_file) - .map_err(SP1CoreProverError::IoError)?; - checkpoints.push(checkpoint_file); - - // If we've reached the final checkpoint, break out of the loop. - if done { - break ( - runtime.state.public_values_stream, - runtime - .records - .last() - .expect("at least one record") - .public_values, - ); - } - }; - create_checkpoints_span.exit(); - - // Commit to the shards. - #[cfg(debug_assertions)] - let mut debug_records: Vec = Vec::new(); - - let mut deferred = ExecutionRecord::new(program.clone().into()); - let mut state = public_values.reset(); - let nb_checkpoints = checkpoints.len(); - let mut challenger = prover.config().challenger(); - vk.observe_into(&mut challenger); - - let scope_span = tracing::Span::current().clone(); + // Record the start of the process. + let proving_start = Instant::now(); + let span = tracing::Span::current().clone(); std::thread::scope(move |s| { - let _span = scope_span.enter(); - - // Spawn a thread for commiting to the shards. - let span = tracing::Span::current().clone(); - let (records_tx, records_rx) = - sync_channel::>(opts.commit_stream_capacity); - let challenger_handle = s.spawn(move || { - let _span = span.enter(); - tracing::debug_span!("phase 1 commiter").in_scope(|| { - for records in records_rx.iter() { - let commitments = tracing::debug_span!("batch").in_scope(|| { + let _span = span.enter(); + + // Spawn the checkpoint generator thread. + let checkpoint_generator_span = tracing::Span::current().clone(); + let (checkpoints_tx, checkpoints_rx) = + sync_channel::<(usize, File, bool)>(opts.checkpoints_channel_capacity); + let checkpoint_generator_handle: ScopedJoinHandle> = + s.spawn(move || { + let _span = checkpoint_generator_span.enter(); + tracing::debug_span!("checkpoint generator").in_scope(|| { + let mut index = 0; + loop { + // Enter the span. + let span = tracing::debug_span!("batch"); + let _span = span.enter(); + + // Execute the runtime until we reach a checkpoint. + let (checkpoint, done) = runtime + .execute_state() + .map_err(SP1CoreProverError::ExecutionError)?; + + // Save the checkpoint to a temp file. + let mut checkpoint_file = + tempfile::tempfile().map_err(SP1CoreProverError::IoError)?; + checkpoint + .save(&mut checkpoint_file) + .map_err(SP1CoreProverError::IoError)?; + + // Send the checkpoint. + checkpoints_tx.send((index, checkpoint_file, done)).unwrap(); + + // If we've reached the final checkpoint, break out of the loop. + if done { + break Ok(runtime.state.public_values_stream); + } + + // Update the index. + index += 1; + } + }) + }); + + // Spawn the workers for phase 1 record generation. + let p1_record_gen_sync = Arc::new(TurnBasedSync::new()); + let p1_trace_gen_sync = Arc::new(TurnBasedSync::new()); + let (p1_records_and_traces_tx, p1_records_and_traces_rx) = + sync_channel::<( + Vec, + Vec>)>>, + )>(opts.records_and_traces_channel_capacity); + let p1_records_and_traces_tx = Arc::new(Mutex::new(p1_records_and_traces_tx)); + let checkpoints_rx = Arc::new(Mutex::new(checkpoints_rx)); + + let checkpoints = Arc::new(Mutex::new(VecDeque::new())); + let state = Arc::new(Mutex::new(PublicValues::::default().reset())); + let deferred = Arc::new(Mutex::new(ExecutionRecord::new(program.clone().into()))); + let mut p1_record_and_trace_gen_handles = Vec::new(); + for _ in 0..opts.trace_gen_workers { + let record_gen_sync = Arc::clone(&p1_record_gen_sync); + let trace_gen_sync = Arc::clone(&p1_trace_gen_sync); + let checkpoints_rx = Arc::clone(&checkpoints_rx); + let records_and_traces_tx = Arc::clone(&p1_records_and_traces_tx); + + let checkpoints = Arc::clone(&checkpoints); + let state = Arc::clone(&state); + let deferred = Arc::clone(&deferred); + let program = program.clone(); + + let span = tracing::Span::current().clone(); + let handle = s.spawn(move || { + let _span = span.enter(); + tracing::debug_span!("phase 1 trace generation").in_scope(|| { + loop { + // Receive the latest checkpoint. + let received = { checkpoints_rx.lock().unwrap().recv() }; + if let Ok((index, mut checkpoint, done)) = received { + // Trace the checkpoint and reconstruct the execution records. + let (mut records, _) = tracing::debug_span!("trace checkpoint") + .in_scope(|| trace_checkpoint(program.clone(), &checkpoint, opts)); + reset_seek(&mut checkpoint); + + // Generate the dependencies. + tracing::debug_span!("generate dependencies").in_scope(|| { + prover.machine().generate_dependencies(&mut records, &opts) + }); + + // Wait for our turn to update the state. + record_gen_sync.wait_for_turn(index); + + // Update the public values & prover state for the shards which contain + // "cpu events". + let mut state = state.lock().unwrap(); + for record in records.iter_mut() { + state.shard += 1; + state.execution_shard = record.public_values.execution_shard; + state.start_pc = record.public_values.start_pc; + state.next_pc = record.public_values.next_pc; + state.committed_value_digest = + record.public_values.committed_value_digest; + state.deferred_proofs_digest = + record.public_values.deferred_proofs_digest; + record.public_values = *state; + } + + // Defer events that are too expensive to include in every shard. + let mut deferred = deferred.lock().unwrap(); + for record in records.iter_mut() { + deferred.append(&mut record.defer()); + } + + // See if any deferred shards are ready to be commited to. + let mut deferred = deferred.split(done, opts.split_opts); + + // Update the public values & prover state for the shards which do not + // contain "cpu events" before committing to them. + if !done { + state.execution_shard += 1; + } + for record in deferred.iter_mut() { + state.shard += 1; + state.previous_init_addr_bits = + record.public_values.previous_init_addr_bits; + state.last_init_addr_bits = + record.public_values.last_init_addr_bits; + state.previous_finalize_addr_bits = + record.public_values.previous_finalize_addr_bits; + state.last_finalize_addr_bits = + record.public_values.last_finalize_addr_bits; + state.start_pc = state.next_pc; + record.public_values = *state; + } + records.append(&mut deferred); + + // Collect the checkpoints to be used again in the phase 2 prover. + let mut checkpoints = checkpoints.lock().unwrap(); + checkpoints.push_back((index, checkpoint, done)); + + // Let another worker update the state. + record_gen_sync.advance_turn(); + + // Generate the traces. + let traces = records + .par_iter() + .map(|record| prover.generate_traces(record)) + .collect::>(); + + // Wait for our turn. + trace_gen_sync.wait_for_turn(index); + + // Send the records to the phase 1 prover. + let chunked_records = chunk_vec(records, opts.shard_batch_size); + let chunked_traces = chunk_vec(traces, opts.shard_batch_size); + chunked_records.into_iter().zip(chunked_traces).for_each( + |(records, traces)| { + records_and_traces_tx + .lock() + .unwrap() + .send((records, traces)) + .unwrap(); + }, + ); + + trace_gen_sync.advance_turn(); + } else { + break; + } + } + }) + }); + p1_record_and_trace_gen_handles.push(handle); + } + drop(p1_records_and_traces_tx); + + // Create the challenger and observe the verifying key. + let mut challenger = prover.config().challenger(); + challenger.observe(pk.commit.clone()); + challenger.observe(pk.pc_start); + + // Spawn the phase 1 prover thread. + let phase_1_prover_span = tracing::Span::current().clone(); + let phase_1_prover_handle = s.spawn(move || { + let _span = phase_1_prover_span.enter(); + tracing::debug_span!("phase 1 prover").in_scope(|| { + for (records, traces) in p1_records_and_traces_rx.iter() { + tracing::debug_span!("batch").in_scope(|| { let span = tracing::Span::current().clone(); - records - .par_iter() + + // Collect the public values. + let public_values = records + .iter() .map(|record| { + record.public_values::()[0..prover.machine().num_pv_elts()] + .to_vec() + }) + .collect::>(); + + // Commit to each shard. + let commitments = records + .into_par_iter() + .zip(traces.into_par_iter()) + .map(|(record, traces)| { let _span = span.enter(); - prover.commit(record) + let data = prover.commit(record, traces); + let main_commit = data.main_commit.clone(); + drop(data); + main_commit }) - .collect::>() + .collect::>(); + + // Observe the commitments. + for (commit, public_values) in + commitments.into_iter().zip(public_values.into_iter()) + { + prover.observe(&mut challenger, commit, &public_values); + } }); - for (commit, record) in commitments.into_iter().zip(records) { - prover.update( - &mut challenger, - commit, - &record.public_values::()[0..prover.machine().num_pv_elts()], - ); - } } }); challenger }); - tracing::debug_span!("phase 1 record generator").in_scope(|| { - for (checkpoint_idx, checkpoint_file) in checkpoints.iter_mut().enumerate() { - // Trace the checkpoint and reconstruct the execution records. - let (mut records, _) = tracing::debug_span!("trace checkpoint") - .in_scope(|| trace_checkpoint(program.clone(), checkpoint_file, opts)); - reset_seek(&mut *checkpoint_file); - - // Update the public values & prover state for the shards which contain "cpu events". - for record in records.iter_mut() { - state.shard += 1; - state.execution_shard = record.public_values.execution_shard; - state.start_pc = record.public_values.start_pc; - state.next_pc = record.public_values.next_pc; - record.public_values = state; - } - - // Generate the dependencies. - tracing::debug_span!("generate dependencies") - .in_scope(|| prover.machine().generate_dependencies(&mut records, &opts)); - - // Defer events that are too expensive to include in every shard. - for record in records.iter_mut() { - deferred.append(&mut record.defer()); - } - - // See if any deferred shards are ready to be commited to. - let is_last_checkpoint = checkpoint_idx == nb_checkpoints - 1; - let mut deferred = deferred.split(is_last_checkpoint, opts.split_opts); - - // Update the public values & prover state for the shards which do not contain "cpu events" - // before committing to them. - if !is_last_checkpoint { - state.execution_shard += 1; - } - for record in deferred.iter_mut() { - state.shard += 1; - state.previous_init_addr_bits = record.public_values.previous_init_addr_bits; - state.last_init_addr_bits = record.public_values.last_init_addr_bits; - state.previous_finalize_addr_bits = - record.public_values.previous_finalize_addr_bits; - state.last_finalize_addr_bits = record.public_values.last_finalize_addr_bits; - state.start_pc = state.next_pc; - record.public_values = state; - } - records.append(&mut deferred); - - #[cfg(debug_assertions)] - { - debug_records.extend(records.clone()); - } - - records_tx.send(records).unwrap(); - } - }); - drop(records_tx); - let challenger = challenger_handle.join().unwrap(); - - // Debug the constraints if debug assertions are enabled. - #[cfg(debug_assertions)] - { - let mut challenger = prover.config().challenger(); - prover.debug_constraints(&pk, debug_records, &mut challenger); + // Wait until the checkpoint generator handle has fully finished. + let public_values_stream = checkpoint_generator_handle.join().unwrap().unwrap(); + + // Wait until the records and traces have been fully generated. + p1_record_and_trace_gen_handles + .into_iter() + .for_each(|handle| handle.join().unwrap()); + + // Wait until the phase 1 prover has completely finished. + let challenger = phase_1_prover_handle.join().unwrap(); + + // Spawn the phase 2 record generator thread. + let p2_record_gen_sync = Arc::new(TurnBasedSync::new()); + let p2_trace_gen_sync = Arc::new(TurnBasedSync::new()); + let (p2_records_and_traces_tx, p2_records_and_traces_rx) = + sync_channel::<( + Vec, + Vec>)>>, + )>(opts.records_and_traces_channel_capacity); + let p2_records_and_traces_tx = Arc::new(Mutex::new(p2_records_and_traces_tx)); + + let report_aggregate = Arc::new(Mutex::new(ExecutionReport::default())); + let state = Arc::new(Mutex::new(PublicValues::::default().reset())); + let deferred = Arc::new(Mutex::new(ExecutionRecord::new(program.clone().into()))); + let mut p2_record_and_trace_gen_handles = Vec::new(); + for _ in 0..opts.trace_gen_workers { + let record_gen_sync = Arc::clone(&p2_record_gen_sync); + let trace_gen_sync = Arc::clone(&p2_trace_gen_sync); + let records_and_traces_tx = Arc::clone(&p2_records_and_traces_tx); + + let report_aggregate = Arc::clone(&report_aggregate); + let checkpoints = Arc::clone(&checkpoints); + let state = Arc::clone(&state); + let deferred = Arc::clone(&deferred); + let program = program.clone(); + + let span = tracing::Span::current().clone(); + let handle = s.spawn(move || { + let _span = span.enter(); + tracing::debug_span!("phase 2 trace generation").in_scope(|| { + loop { + // Receive the latest checkpoint. + let received = { checkpoints.lock().unwrap().pop_front() }; + if let Some((index, mut checkpoint, done)) = received { + // Trace the checkpoint and reconstruct the execution records. + let (mut records, report) = tracing::debug_span!("trace checkpoint") + .in_scope(|| trace_checkpoint(program.clone(), &checkpoint, opts)); + *report_aggregate.lock().unwrap() += report; + reset_seek(&mut checkpoint); + + // Generate the dependencies. + tracing::debug_span!("generate dependencies").in_scope(|| { + prover.machine().generate_dependencies(&mut records, &opts) + }); + + // Wait for our turn to update the state. + record_gen_sync.wait_for_turn(index); + + // Update the public values & prover state for the shards which contain + // "cpu events". + let mut state = state.lock().unwrap(); + for record in records.iter_mut() { + state.shard += 1; + state.execution_shard = record.public_values.execution_shard; + state.start_pc = record.public_values.start_pc; + state.next_pc = record.public_values.next_pc; + state.committed_value_digest = + record.public_values.committed_value_digest; + state.deferred_proofs_digest = + record.public_values.deferred_proofs_digest; + record.public_values = *state; + } + + // Defer events that are too expensive to include in every shard. + let mut deferred = deferred.lock().unwrap(); + for record in records.iter_mut() { + deferred.append(&mut record.defer()); + } + + // See if any deferred shards are ready to be commited to. + let mut deferred = deferred.split(done, opts.split_opts); + + // Update the public values & prover state for the shards which do not + // contain "cpu events" before committing to them. + if !done { + state.execution_shard += 1; + } + for record in deferred.iter_mut() { + state.shard += 1; + state.previous_init_addr_bits = + record.public_values.previous_init_addr_bits; + state.last_init_addr_bits = + record.public_values.last_init_addr_bits; + state.previous_finalize_addr_bits = + record.public_values.previous_finalize_addr_bits; + state.last_finalize_addr_bits = + record.public_values.last_finalize_addr_bits; + state.start_pc = state.next_pc; + record.public_values = *state; + } + records.append(&mut deferred); + + // Let another worker update the state. + record_gen_sync.advance_turn(); + + // Generate the traces. + let traces = records + .par_iter() + .map(|record| prover.generate_traces(record)) + .collect::>(); + + trace_gen_sync.wait_for_turn(index); + + // Send the records to the phase 1 prover. + let chunked_records = chunk_vec(records, opts.shard_batch_size); + let chunked_traces = chunk_vec(traces, opts.shard_batch_size); + chunked_records.into_iter().zip(chunked_traces).for_each( + |(records, traces)| { + records_and_traces_tx + .lock() + .unwrap() + .send((records, traces)) + .unwrap(); + }, + ); + + trace_gen_sync.advance_turn(); + } else { + break; + } + } + }) + }); + p2_record_and_trace_gen_handles.push(handle); } + drop(p2_records_and_traces_tx); - // Prove the shards. - let mut deferred = ExecutionRecord::new(program.clone().into()); - let mut state = public_values.reset(); - let mut report_aggregate = ExecutionReport::default(); - - // Spawn a thread for proving the shards. - let (records_tx, records_rx) = - sync_channel::>(opts.prove_stream_capacity); - - let commit_and_open = tracing::Span::current().clone(); - let shard_proofs = s.spawn(move || { - let _span = commit_and_open.enter(); + // Spawn the phase 2 prover thread. + let p2_prover_span = tracing::Span::current().clone(); + let p2_prover_handle = s.spawn(move || { + let _span = p2_prover_span.enter(); let mut shard_proofs = Vec::new(); tracing::debug_span!("phase 2 prover").in_scope(|| { - for records in records_rx.iter() { + for (records, traces) in p2_records_and_traces_rx.into_iter() { tracing::debug_span!("batch").in_scope(|| { let span = tracing::Span::current().clone(); - shard_proofs.par_extend(records.into_par_iter().map(|record| { - let _span = span.enter(); - prover - .commit_and_open(&pk, record, &mut challenger.clone()) - .unwrap() - })); + shard_proofs.par_extend( + records.into_par_iter().zip(traces.into_par_iter()).map( + |(record, traces)| { + let _span = span.enter(); + let data = prover.commit(record, traces); + prover.open(pk, data, &mut challenger.clone()).unwrap() + }, + ), + ); }); } }); shard_proofs }); - tracing::debug_span!("phase 2 record generator").in_scope(|| { - for (checkpoint_idx, mut checkpoint_file) in checkpoints.into_iter().enumerate() { - // Trace the checkpoint and reconstruct the execution records. - let (mut records, report) = tracing::debug_span!("trace checkpoint") - .in_scope(|| trace_checkpoint(program.clone(), &checkpoint_file, opts)); - report_aggregate += report; - reset_seek(&mut checkpoint_file); - - // Update the public values & prover state for the shards which contain "cpu events". - for record in records.iter_mut() { - state.shard += 1; - state.execution_shard = record.public_values.execution_shard; - state.start_pc = record.public_values.start_pc; - state.next_pc = record.public_values.next_pc; - record.public_values = state; - } - - // Generate the dependencies. - tracing::debug_span!("generate dependencies") - .in_scope(|| prover.machine().generate_dependencies(&mut records, &opts)); - - // Defer events that are too expensive to include in every shard. - for record in records.iter_mut() { - deferred.append(&mut record.defer()); - } - - // See if any deferred shards are ready to be commited to. - let is_last_checkpoint = checkpoint_idx == nb_checkpoints - 1; - let mut deferred = deferred.split(is_last_checkpoint, opts.split_opts); + // Wait until the records and traces have been fully generated for phase 2. + p2_record_and_trace_gen_handles + .into_iter() + .for_each(|handle| handle.join().unwrap()); - // Update the public values & prover state for the shards which do not contain "cpu events" - // before committing to them. - if !is_last_checkpoint { - state.execution_shard += 1; - } - for record in deferred.iter_mut() { - state.shard += 1; - state.previous_init_addr_bits = record.public_values.previous_init_addr_bits; - state.last_init_addr_bits = record.public_values.last_init_addr_bits; - state.previous_finalize_addr_bits = - record.public_values.previous_finalize_addr_bits; - state.last_finalize_addr_bits = record.public_values.last_finalize_addr_bits; - state.start_pc = state.next_pc; - record.public_values = state; - } - records.append(&mut deferred); - - records_tx.send(records).unwrap(); - } - }); - drop(records_tx); - let shard_proofs = shard_proofs.join().unwrap(); + // Wait until the phase 2 prover has finished. + let shard_proofs = p2_prover_handle.join().unwrap(); // Log some of the `ExecutionReport` information. + let report_aggregate = report_aggregate.lock().unwrap(); tracing::info!( "execution report (totals): total_cycles={}, total_syscall_cycles={}", report_aggregate.total_instruction_count(), @@ -386,7 +563,7 @@ where } let proof = MachineProof:: { shard_proofs }; - let cycles = runtime.state.global_clk; + let cycles = report_aggregate.total_instruction_count(); // Print the summary. let proving_time = proving_start.elapsed().as_secs_f64(); @@ -394,7 +571,7 @@ where "summary: cycles={}, e2e={}s, khz={:.2}, proofSize={}", cycles, proving_time, - (runtime.state.global_clk as f64 / (proving_time * 1000.0) as f64), + (cycles as f64 / (proving_time * 1000.0) as f64), bincode::serialize(&proof).unwrap().len(), ); @@ -443,8 +620,10 @@ pub fn run_test_core>>( let config = BabyBearPoseidon2::new(); let machine = RiscvAir::machine(config); let prover = P::new(machine); + let (pk, _) = prover.setup(runtime.program.as_ref()); let (proof, output, _) = prove_with_context( &prover, + &pk, Program::clone(&runtime.program), &inputs, SP1CoreOpts::default(), @@ -479,12 +658,11 @@ where SC::Val: p3_field::PrimeField32, SC::Challenger: Clone, Com: Send + Sync, - PcsProverData: Send + Sync, + PcsProverData: Send + Sync + Serialize + DeserializeOwned, OpeningProof: Send + Sync, - ShardMainData: Serialize + DeserializeOwned, { let start = Instant::now(); - let prover = DefaultProver::new(machine); + let prover = CpuProver::new(machine); let mut challenger = prover.config().challenger(); let proof = prover .prove(&pk, records, &mut challenger, SP1CoreOpts::default()) diff --git a/eval/src/main.rs b/eval/src/main.rs index 9ee05886ad..5721bdba82 100644 --- a/eval/src/main.rs +++ b/eval/src/main.rs @@ -2,7 +2,7 @@ use clap::{command, Parser}; use csv::WriterBuilder; use serde::Serialize; use sp1_core::runtime::{Program, Runtime}; -use sp1_core::stark::DefaultProver; +use sp1_core::stark::CpuProver; use sp1_core::utils::{ prove_simple, BabyBearBlake3, BabyBearKeccak, BabyBearPoseidon2, SP1CoreOpts, }; @@ -145,7 +145,7 @@ fn run_evaluation(hashfn: &HashFnId, program: &Program, _elf: &[u8]) -> (f64, f6 let config = BabyBearBlake3::new(); let prove_start = Instant::now(); - let _proof = prove_simple::<_, DefaultProver<_, _>>(config.clone(), runtime); + let _proof = prove_simple::<_, CpuProver<_, _>>(config.clone(), runtime); let prove_duration = prove_start.elapsed().as_secs_f64(); let verify_start = Instant::now(); @@ -163,7 +163,7 @@ fn run_evaluation(hashfn: &HashFnId, program: &Program, _elf: &[u8]) -> (f64, f6 let config = BabyBearPoseidon2::new(); let prove_start = Instant::now(); - let _proof = prove_simple::<_, DefaultProver<_, _>>(config.clone(), runtime); + let _proof = prove_simple::<_, CpuProver<_, _>>(config.clone(), runtime); let prove_duration = prove_start.elapsed().as_secs_f64(); let verify_start = Instant::now(); @@ -181,7 +181,7 @@ fn run_evaluation(hashfn: &HashFnId, program: &Program, _elf: &[u8]) -> (f64, f6 let config = BabyBearKeccak::new(); let prove_start = Instant::now(); - let _proof = prove_simple::<_, DefaultProver<_, _>>(config.clone(), runtime); + let _proof = prove_simple::<_, CpuProver<_, _>>(config.clone(), runtime); let prove_duration = prove_start.elapsed().as_secs_f64(); let verify_start = Instant::now(); diff --git a/examples/Cargo.lock b/examples/Cargo.lock index 1644e2759e..542394c99d 100644 --- a/examples/Cargo.lock +++ b/examples/Cargo.lock @@ -51,7 +51,7 @@ dependencies = [ [[package]] name = "aggregation-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "hex", "sp1-helper", @@ -88,9 +88,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-primitives" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f783611babedbbe90db3478c120fb5f5daacceffc210b39adc0af4fe0da70bad" +checksum = "ccb3ead547f4532bc8af961649942f0b9c16ee9226e26caa3f38420651cc0bf4" dependencies = [ "alloy-rlp", "bytes", @@ -120,9 +120,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bad41a7c19498e3f6079f7744656328699f8ea3e783bdd10d85788cd439f572" +checksum = "2b40397ddcdcc266f59f959770f601ce1280e699a91fc1862f29cef91707cd09" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", @@ -134,9 +134,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-expander" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9899da7d011b4fe4c406a524ed3e3f963797dbc93b45479d60341d3a27b252" +checksum = "867a5469d61480fea08c7333ffeca52d5b621f5ca2e44f271b117ec1fc9a0525" dependencies = [ "alloy-sol-macro-input", "const-hex", @@ -152,9 +152,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-input" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32d595768fdc61331a132b6f65db41afae41b9b97d36c21eb1b955c422a7e60" +checksum = "2e482dc33a32b6fadbc0f599adea520bd3aaa585c141a80b404d0a3e3fa72528" dependencies = [ "const-hex", "dunce", @@ -167,9 +167,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a49042c6d3b66a9fe6b2b5a8bf0d39fc2ae1ee0310a2a26ffedd79fb097878dd" +checksum = "a91ca40fa20793ae9c3841b83e74569d1cc9af29a2f5237314fd3452d51e38c7" dependencies = [ "alloy-primitives", "alloy-sol-macro", @@ -382,9 +382,9 @@ dependencies = [ [[package]] name = "arrayref" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" +checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" [[package]] name = "arrayvec" @@ -782,7 +782,7 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chess-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-helper", "sp1-sdk", @@ -1114,7 +1114,7 @@ dependencies = [ [[package]] name = "cycle-tracking-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-helper", "sp1-sdk", @@ -1728,7 +1728,7 @@ checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "fibonacci-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "itertools 0.12.1", "sha2 0.10.8", @@ -2462,7 +2462,7 @@ dependencies = [ [[package]] name = "io-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "serde", "sp1-helper", @@ -2477,7 +2477,7 @@ checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-prime-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-sdk", ] @@ -2541,7 +2541,7 @@ dependencies = [ [[package]] name = "json-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "lib", "serde", @@ -2628,7 +2628,7 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "serde", ] @@ -3483,7 +3483,7 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "patch-testing-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "ed25519-consensus", "rand", @@ -3983,7 +3983,7 @@ dependencies = [ [[package]] name = "regex-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-helper", "sp1-sdk", @@ -4210,7 +4210,7 @@ dependencies = [ [[package]] name = "rsa-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "rsa", "sp1-helper", @@ -4810,16 +4810,17 @@ dependencies = [ [[package]] name = "sp1-build" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "cargo_metadata", "clap", + "dirs", ] [[package]] name = "sp1-core" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "arrayref", @@ -4866,6 +4867,7 @@ dependencies = [ "snowbridge-amcl", "sp1-derive", "sp1-primitives", + "static_assertions", "strum", "strum_macros", "tempfile", @@ -4879,7 +4881,7 @@ dependencies = [ [[package]] name = "sp1-derive" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "proc-macro2", "quote", @@ -4888,7 +4890,7 @@ dependencies = [ [[package]] name = "sp1-helper" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "cargo_metadata", "chrono", @@ -4897,7 +4899,7 @@ dependencies = [ [[package]] name = "sp1-primitives" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "itertools 0.13.0", "lazy_static", @@ -4909,7 +4911,7 @@ dependencies = [ [[package]] name = "sp1-prover" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -4924,6 +4926,7 @@ dependencies = [ "p3-challenger", "p3-commit", "p3-field", + "p3-matrix", "rayon", "serde", "serde_json", @@ -4944,7 +4947,7 @@ dependencies = [ [[package]] name = "sp1-recursion-circuit" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "itertools 0.13.0", @@ -4966,7 +4969,7 @@ dependencies = [ [[package]] name = "sp1-recursion-compiler" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "backtrace", "itertools 0.13.0", @@ -4990,7 +4993,7 @@ dependencies = [ [[package]] name = "sp1-recursion-core" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "arrayref", "backtrace", @@ -5024,7 +5027,7 @@ dependencies = [ [[package]] name = "sp1-recursion-derive" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "proc-macro2", "quote", @@ -5033,7 +5036,7 @@ dependencies = [ [[package]] name = "sp1-recursion-gnark-ffi" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -5057,7 +5060,7 @@ dependencies = [ [[package]] name = "sp1-recursion-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "itertools 0.13.0", "p3-air", @@ -5085,7 +5088,7 @@ dependencies = [ [[package]] name = "sp1-sdk" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "alloy-sol-types", "anyhow", @@ -5159,7 +5162,7 @@ dependencies = [ [[package]] name = "ssz-withdrawals-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-helper", "sp1-sdk", @@ -5257,9 +5260,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d71e19bca02c807c9faa67b5a47673ff231b6e7449b251695188522f1dc44b2" +checksum = "c837dc8852cb7074e46b444afb81783140dab12c58867b49fb3898fbafedf7ea" dependencies = [ "paste", "proc-macro2", @@ -5395,7 +5398,7 @@ dependencies = [ [[package]] name = "tendermint-script" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "itertools 0.12.1", @@ -5414,18 +5417,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", @@ -5501,9 +5504,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.38.0" +version = "1.38.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" dependencies = [ "backtrace", "bytes", diff --git a/examples/Cargo.toml b/examples/Cargo.toml index 9aa472af29..6a848d88d9 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -17,7 +17,7 @@ members = [ resolver = "2" [workspace.package] -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" [workspace.dependencies] diff --git a/examples/aggregation/program/Cargo.lock b/examples/aggregation/program/Cargo.lock index 32aa1d0e0c..8cc95185ad 100644 --- a/examples/aggregation/program/Cargo.lock +++ b/examples/aggregation/program/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "aggregation-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "hex", "sha2", @@ -388,7 +388,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -398,7 +398,7 @@ dependencies = [ [[package]] name = "sp1-primitives" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "itertools 0.13.0", "lazy_static", @@ -410,7 +410,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/aggregation/program/Cargo.toml b/examples/aggregation/program/Cargo.toml index 8b9ae99e28..7de447410e 100644 --- a/examples/aggregation/program/Cargo.toml +++ b/examples/aggregation/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "aggregation-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/chess/program/Cargo.lock b/examples/chess/program/Cargo.lock index 041ffce981..973a63cd51 100644 --- a/examples/chess/program/Cargo.lock +++ b/examples/chess/program/Cargo.lock @@ -88,7 +88,7 @@ dependencies = [ [[package]] name = "chess-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "chess", "sp1-zkvm", @@ -365,7 +365,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -375,7 +375,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/chess/program/Cargo.toml b/examples/chess/program/Cargo.toml index 48126fcda7..553eaa4cbc 100644 --- a/examples/chess/program/Cargo.toml +++ b/examples/chess/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "chess-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/cycle-tracking/program/Cargo.lock b/examples/cycle-tracking/program/Cargo.lock index e9c96087c2..d65dfc5c1c 100644 --- a/examples/cycle-tracking/program/Cargo.lock +++ b/examples/cycle-tracking/program/Cargo.lock @@ -53,7 +53,7 @@ dependencies = [ [[package]] name = "cycle-tracking-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-derive", "sp1-zkvm", @@ -201,7 +201,7 @@ dependencies = [ [[package]] name = "sp1-derive" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "proc-macro2", "quote", @@ -210,7 +210,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -220,7 +220,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/cycle-tracking/program/Cargo.toml b/examples/cycle-tracking/program/Cargo.toml index 22958112a4..b3c4ef408c 100644 --- a/examples/cycle-tracking/program/Cargo.toml +++ b/examples/cycle-tracking/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "cycle-tracking-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/fibonacci/program/Cargo.lock b/examples/fibonacci/program/Cargo.lock index ad6e8ec61e..98b446bd1a 100644 --- a/examples/fibonacci/program/Cargo.lock +++ b/examples/fibonacci/program/Cargo.lock @@ -63,7 +63,7 @@ dependencies = [ [[package]] name = "fibonacci-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "sp1-zkvm", ] @@ -200,7 +200,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -210,7 +210,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/fibonacci/program/Cargo.toml b/examples/fibonacci/program/Cargo.toml index c9e2debd4e..266e18510e 100644 --- a/examples/fibonacci/program/Cargo.toml +++ b/examples/fibonacci/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "fibonacci-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/fibonacci/program/elf/riscv32im-succinct-zkvm-elf b/examples/fibonacci/program/elf/riscv32im-succinct-zkvm-elf index 0b8be22e19..79cba24d82 100755 Binary files a/examples/fibonacci/program/elf/riscv32im-succinct-zkvm-elf and b/examples/fibonacci/program/elf/riscv32im-succinct-zkvm-elf differ diff --git a/examples/fibonacci/script/src/main.rs b/examples/fibonacci/script/src/main.rs index 8c28bab03d..809d1a796e 100644 --- a/examples/fibonacci/script/src/main.rs +++ b/examples/fibonacci/script/src/main.rs @@ -8,7 +8,7 @@ fn main() { utils::setup_logger(); // Create an input stream and write '500' to it. - let n = 1u32; + let n = 1000u32; let mut stdin = SP1Stdin::new(); stdin.write(&n); diff --git a/examples/io/program/Cargo.lock b/examples/io/program/Cargo.lock index cd9d99e33a..06d596d9c7 100644 --- a/examples/io/program/Cargo.lock +++ b/examples/io/program/Cargo.lock @@ -84,7 +84,7 @@ dependencies = [ [[package]] name = "io-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "serde", "sp1-zkvm", @@ -201,7 +201,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -211,7 +211,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/io/program/Cargo.toml b/examples/io/program/Cargo.toml index 64e066daa9..6466eb3f3b 100644 --- a/examples/io/program/Cargo.toml +++ b/examples/io/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "io-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/is-prime/program/Cargo.toml b/examples/is-prime/program/Cargo.toml index f30650ec1c..29123eb9c1 100644 --- a/examples/is-prime/program/Cargo.toml +++ b/examples/is-prime/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "is-prime-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/json/program/Cargo.lock b/examples/json/program/Cargo.lock index 6592725c22..0032c912dd 100644 --- a/examples/json/program/Cargo.lock +++ b/examples/json/program/Cargo.lock @@ -90,7 +90,7 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "json-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "lib", "serde", @@ -106,7 +106,7 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "serde", ] @@ -233,7 +233,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -243,7 +243,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/json/program/Cargo.toml b/examples/json/program/Cargo.toml index 60c7a3d9a1..55e50cd71b 100644 --- a/examples/json/program/Cargo.toml +++ b/examples/json/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "json-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/patch-testing/program/Cargo.lock b/examples/patch-testing/program/Cargo.lock index 16f907514f..6de95c9db1 100644 --- a/examples/patch-testing/program/Cargo.lock +++ b/examples/patch-testing/program/Cargo.lock @@ -176,7 +176,7 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "patch-testing-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "curve25519-dalek-ng", "ed25519-consensus", @@ -285,7 +285,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -295,7 +295,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/patch-testing/program/Cargo.toml b/examples/patch-testing/program/Cargo.toml index 34c289601f..110cbed81d 100644 --- a/examples/patch-testing/program/Cargo.toml +++ b/examples/patch-testing/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "patch-testing-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/regex/program/Cargo.lock b/examples/regex/program/Cargo.lock index e99e196c7a..0abaef5418 100644 --- a/examples/regex/program/Cargo.lock +++ b/examples/regex/program/Cargo.lock @@ -200,7 +200,7 @@ dependencies = [ [[package]] name = "regex-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "regex", "sp1-zkvm", @@ -245,7 +245,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -255,7 +255,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/regex/program/Cargo.toml b/examples/regex/program/Cargo.toml index 6ebe448da7..161e664670 100644 --- a/examples/regex/program/Cargo.toml +++ b/examples/regex/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "regex-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/rsa/program/Cargo.lock b/examples/rsa/program/Cargo.lock index 7a55b190c9..6491ffbf8d 100644 --- a/examples/rsa/program/Cargo.lock +++ b/examples/rsa/program/Cargo.lock @@ -332,7 +332,7 @@ dependencies = [ [[package]] name = "rsa-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "digest 0.10.7", "rand", @@ -392,7 +392,7 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -402,7 +402,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", diff --git a/examples/rsa/program/Cargo.toml b/examples/rsa/program/Cargo.toml index d49f9c5152..22d2305dc4 100644 --- a/examples/rsa/program/Cargo.toml +++ b/examples/rsa/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "rsa-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/ssz-withdrawals/program/Cargo.lock b/examples/ssz-withdrawals/program/Cargo.lock index 006ade59e0..add833022a 100644 --- a/examples/ssz-withdrawals/program/Cargo.lock +++ b/examples/ssz-withdrawals/program/Cargo.lock @@ -1312,7 +1312,7 @@ dependencies = [ [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -1322,7 +1322,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", @@ -1348,7 +1348,7 @@ dependencies = [ [[package]] name = "ssz-withdrawals-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "alloy-primitives", "hex", diff --git a/examples/ssz-withdrawals/program/Cargo.toml b/examples/ssz-withdrawals/program/Cargo.toml index d71eb92a64..d5b89c89f2 100644 --- a/examples/ssz-withdrawals/program/Cargo.toml +++ b/examples/ssz-withdrawals/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "ssz-withdrawals-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/examples/tendermint/program/Cargo.lock b/examples/tendermint/program/Cargo.lock index 7aea73e5a3..2eeaaec5f4 100644 --- a/examples/tendermint/program/Cargo.lock +++ b/examples/tendermint/program/Cargo.lock @@ -573,7 +573,7 @@ checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" [[package]] name = "sp1-lib" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "anyhow", "bincode", @@ -583,7 +583,7 @@ dependencies = [ [[package]] name = "sp1-zkvm" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "bincode", "cfg-if", @@ -694,7 +694,7 @@ dependencies = [ [[package]] name = "tendermint-program" -version = "1.0.0-rc.1" +version = "1.0.1" dependencies = [ "serde", "serde_cbor", diff --git a/examples/tendermint/program/Cargo.toml b/examples/tendermint/program/Cargo.toml index 2da3d65c27..28f722b4dc 100644 --- a/examples/tendermint/program/Cargo.toml +++ b/examples/tendermint/program/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "tendermint-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/prover/Cargo.toml b/prover/Cargo.toml index bb6347da30..e5e0fd552e 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -10,6 +10,7 @@ keywords = { workspace = true } categories = { workspace = true } [dependencies] +p3-matrix = { workspace = true } sp1-recursion-program = { workspace = true } sp1-recursion-circuit = { workspace = true } sp1-recursion-compiler = { workspace = true } @@ -28,7 +29,7 @@ rayon = "1.10.0" itertools = "0.13.0" tracing = "0.1.40" tracing-subscriber = "0.3.18" -serde_json = "1.0.120" +serde_json = "1.0.121" clap = { version = "4.5.9", features = ["derive", "env"] } hex = "0.4.3" anyhow = "1.0.83" @@ -37,7 +38,7 @@ tempfile = "3.10.1" subtle-encoding = "0.5.1" serial_test = "3.1.1" num-bigint = "0.4.6" -thiserror = "1.0.60" +thiserror = "1.0.63" oneshot = "0.1.8" [[bin]] diff --git a/prover/scripts/e2e.rs b/prover/scripts/e2e.rs index 21010dad9b..f6cd434aa2 100644 --- a/prover/scripts/e2e.rs +++ b/prover/scripts/e2e.rs @@ -55,7 +55,7 @@ pub fn main() { tracing::info!("building verifier constraints"); let constraints = tracing::info_span!("wrap circuit") - .in_scope(|| build_wrap_circuit(&prover.wrap_vk, wrapped_proof.proof.clone())); + .in_scope(|| build_wrap_circuit(prover.wrap_vk(), wrapped_proof.proof.clone())); tracing::info!("building template witness"); let pv: &RecursionPublicValues<_> = wrapped_proof.proof.public_values.as_slice().borrow(); diff --git a/prover/src/build.rs b/prover/src/build.rs index 96ee23c349..feede6518b 100644 --- a/prover/src/build.rs +++ b/prover/src/build.rs @@ -112,5 +112,8 @@ pub fn dummy_proof() -> (StarkVerifyingKey, ShardProof) { tracing::info!("wrap"); let wrapped_proof = prover.wrap_bn254(shrink_proof, opts).unwrap(); - (prover.wrap_vk, wrapped_proof.proof) + ( + prover.wrap_keys.into_inner().unwrap().1, + wrapped_proof.proof, + ) } diff --git a/prover/src/components.rs b/prover/src/components.rs index 407cfa873b..6cb9d0678d 100644 --- a/prover/src/components.rs +++ b/prover/src/components.rs @@ -1,6 +1,6 @@ -use sp1_core::stark::{DefaultProver, MachineProver, RiscvAir, StarkGenericConfig}; +use sp1_core::stark::{CpuProver, MachineProver, RiscvAir, StarkGenericConfig}; -use crate::{CompressAir, CoreSC, InnerSC, OuterSC, ReduceAir, WrapAir}; +use crate::{CompressAir, CoreSC, InnerSC, OuterSC, ShrinkAir, WrapAir}; pub trait SP1ProverComponents: Send + Sync { /// The prover for making SP1 core proofs. @@ -9,12 +9,12 @@ pub trait SP1ProverComponents: Send + Sync { + Sync; /// The prover for making SP1 recursive proofs. - type CompressProver: MachineProver::Val>> + type CompressProver: MachineProver::Val>> + Send + Sync; /// The prover for shrinking compressed proofs. - type ShrinkProver: MachineProver::Val>> + type ShrinkProver: MachineProver::Val>> + Send + Sync; @@ -27,8 +27,8 @@ pub trait SP1ProverComponents: Send + Sync { pub struct DefaultProverComponents; impl SP1ProverComponents for DefaultProverComponents { - type CoreProver = DefaultProver::Val>>; - type CompressProver = DefaultProver::Val>>; - type ShrinkProver = DefaultProver::Val>>; - type WrapProver = DefaultProver::Val>>; + type CoreProver = CpuProver::Val>>; + type CompressProver = CpuProver::Val>>; + type ShrinkProver = CpuProver::Val>>; + type WrapProver = CpuProver::Val>>; } diff --git a/prover/src/init.rs b/prover/src/init.rs new file mode 100644 index 0000000000..03be15cdb3 --- /dev/null +++ b/prover/src/init.rs @@ -0,0 +1,168 @@ +use crate::components::SP1ProverComponents; +use p3_baby_bear::BabyBear; +pub use sp1_core::io::{SP1PublicValues, SP1Stdin}; +use sp1_core::stark::MachineProver; +use sp1_core::stark::StarkProvingKey; +use sp1_core::stark::StarkVerifyingKey; +use sp1_primitives::types::RecursionProgramType; +use sp1_recursion_compiler::config::InnerConfig; +use sp1_recursion_core::runtime::RecursionProgram; +pub use sp1_recursion_gnark_ffi::plonk_bn254::PlonkBn254Proof; +pub use sp1_recursion_program::machine::ReduceProgramType; +pub use sp1_recursion_program::machine::{ + SP1CompressMemoryLayout, SP1DeferredMemoryLayout, SP1RecursionMemoryLayout, SP1RootMemoryLayout, +}; +use sp1_recursion_program::machine::{ + SP1CompressVerifier, SP1DeferredVerifier, SP1RecursiveVerifier, SP1RootVerifier, +}; +use tracing::debug_span; + +use crate::{InnerSC, OuterSC, SP1Prover}; + +impl SP1Prover { + /// The program that can recursively verify a set of proofs into a single proof. + pub fn recursion_program(&self) -> &RecursionProgram { + self.recursion_program.get_or_init(|| { + debug_span!("init recursion program").in_scope(|| { + SP1RecursiveVerifier::::build(self.core_prover.machine()) + }) + }) + } + + /// The program that recursively verifies deferred proofs and accumulates the digests. + pub fn deferred_program(&self) -> &RecursionProgram { + self.deferred_program.get_or_init(|| { + debug_span!("init deferred program").in_scope(|| { + SP1DeferredVerifier::::build(self.compress_prover.machine()) + }) + }) + } + + /// The program that reduces a set of recursive proofs into a single proof. + pub fn compress_program(&self) -> &RecursionProgram { + self.compress_program.get_or_init(|| { + debug_span!("init compress program").in_scope(|| { + SP1CompressVerifier::::build( + self.compress_prover.machine(), + self.recursion_vk(), + self.deferred_vk(), + ) + }) + }) + } + + /// The shrink program that compresses a proof into a succinct proof. + pub fn shrink_program(&self) -> &RecursionProgram { + self.shrink_program.get_or_init(|| { + debug_span!("init shrink program").in_scope(|| { + SP1RootVerifier::::build( + self.compress_prover.machine(), + self.compress_vk(), + RecursionProgramType::Shrink, + ) + }) + }) + } + + /// The wrap program that wraps a proof into a SNARK-friendly field. + pub fn wrap_program(&self) -> &RecursionProgram { + self.wrap_program.get_or_init(|| { + debug_span!("init wrap program").in_scope(|| { + SP1RootVerifier::::build( + self.shrink_prover.machine(), + self.shrink_vk(), + RecursionProgramType::Wrap, + ) + }) + }) + } + + /// The proving and verifying keys for the recursion step. + pub fn recursion_keys(&self) -> &(StarkProvingKey, StarkVerifyingKey) { + self.recursion_keys.get_or_init(|| { + debug_span!("init recursion keys") + .in_scope(|| self.compress_prover.setup(self.recursion_program())) + }) + } + + /// The proving key for the recursion step. + pub fn recursion_pk(&self) -> &StarkProvingKey { + &self.recursion_keys().0 + } + + /// The verifying key for the recursion step. + pub fn recursion_vk(&self) -> &StarkVerifyingKey { + &self.recursion_keys().1 + } + + /// The proving and verifying keys for the deferred step. + pub fn deferred_keys(&self) -> &(StarkProvingKey, StarkVerifyingKey) { + self.deferred_keys.get_or_init(|| { + debug_span!("init deferred keys") + .in_scope(|| self.compress_prover.setup(self.deferred_program())) + }) + } + + /// The proving key for the deferred step. + pub fn deferred_pk(&self) -> &StarkProvingKey { + &self.deferred_keys().0 + } + + /// The verifying key for the deferred step. + pub fn deferred_vk(&self) -> &StarkVerifyingKey { + &self.deferred_keys().1 + } + + /// The proving and verifying keys for the compress step. + pub fn compress_keys(&self) -> &(StarkProvingKey, StarkVerifyingKey) { + self.compress_keys.get_or_init(|| { + debug_span!("init compress keys") + .in_scope(|| self.compress_prover.setup(self.compress_program())) + }) + } + + /// The proving key for the compress step. + pub fn compress_pk(&self) -> &StarkProvingKey { + &self.compress_keys().0 + } + + /// The verifying key for the compress step. + pub fn compress_vk(&self) -> &StarkVerifyingKey { + &self.compress_keys().1 + } + + /// The proving and verifying keys for the shrink step. + pub fn shrink_keys(&self) -> &(StarkProvingKey, StarkVerifyingKey) { + self.shrink_keys.get_or_init(|| { + debug_span!("init shrink keys") + .in_scope(|| self.shrink_prover.setup(self.shrink_program())) + }) + } + + /// The proving key for the shrink step. + pub fn shrink_pk(&self) -> &StarkProvingKey { + &self.shrink_keys().0 + } + + /// The verifying key for the shrink step. + pub fn shrink_vk(&self) -> &StarkVerifyingKey { + &self.shrink_keys().1 + } + + /// The proving and verifying keys for the wrap step. + pub fn wrap_keys(&self) -> &(StarkProvingKey, StarkVerifyingKey) { + self.wrap_keys.get_or_init(|| { + debug_span!("init wrap keys").in_scope(|| self.wrap_prover.setup(self.wrap_program())) + }) + } + + /// The proving key for the wrap step. + pub fn wrap_pk(&self) -> &StarkProvingKey { + &self.wrap_keys().0 + } + + /// The verifying key for the wrap step. + pub fn wrap_vk(&self) -> &StarkVerifyingKey { + &self.wrap_keys().1 + } +} diff --git a/prover/src/lib.rs b/prover/src/lib.rs index efd218f860..c77d7473bb 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -13,26 +13,29 @@ pub mod build; pub mod components; +pub mod init; pub mod types; pub mod utils; pub mod verify; use std::borrow::Borrow; use std::path::Path; -use std::sync::Arc; +use std::sync::mpsc::sync_channel; +use std::sync::{Arc, Mutex, OnceLock}; +use std::thread; use components::{DefaultProverComponents, SP1ProverComponents}; use p3_baby_bear::BabyBear; use p3_challenger::CanObserve; use p3_field::{AbstractField, PrimeField}; -use rayon::iter::{IntoParallelIterator, ParallelIterator}; -use rayon::prelude::*; +use p3_matrix::dense::RowMajorMatrix; use sp1_core::air::{PublicValues, Word}; pub use sp1_core::io::{SP1PublicValues, SP1Stdin}; use sp1_core::runtime::{ExecutionError, ExecutionReport, Runtime, SP1Context}; use sp1_core::stark::MachineProver; use sp1_core::stark::{Challenge, StarkProvingKey}; use sp1_core::stark::{Challenger, MachineVerificationError}; +use sp1_core::utils::concurrency::TurnBasedSync; use sp1_core::utils::{SP1CoreOpts, SP1ProverOpts, DIGEST_SIZE}; use sp1_core::{ runtime::Program, @@ -40,10 +43,10 @@ use sp1_core::{ utils::{BabyBearPoseidon2, SP1CoreProverError}, }; use sp1_primitives::hash_deferred_proof; -use sp1_primitives::types::RecursionProgramType; use sp1_recursion_circuit::witness::Witnessable; use sp1_recursion_compiler::config::InnerConfig; use sp1_recursion_compiler::ir::Witness; +use sp1_recursion_core::runtime::ExecutionRecord; use sp1_recursion_core::{ air::RecursionPublicValues, runtime::{RecursionProgram, Runtime as RecursionRuntime}, @@ -53,11 +56,8 @@ pub use sp1_recursion_gnark_ffi::plonk_bn254::PlonkBn254Proof; use sp1_recursion_gnark_ffi::plonk_bn254::PlonkBn254Prover; use sp1_recursion_program::hints::Hintable; pub use sp1_recursion_program::machine::ReduceProgramType; -use sp1_recursion_program::machine::{ - SP1CompressVerifier, SP1DeferredVerifier, SP1RecursiveVerifier, SP1RootVerifier, -}; pub use sp1_recursion_program::machine::{ - SP1DeferredMemoryLayout, SP1RecursionMemoryLayout, SP1ReduceMemoryLayout, SP1RootMemoryLayout, + SP1CompressMemoryLayout, SP1DeferredMemoryLayout, SP1RecursionMemoryLayout, SP1RootMemoryLayout, }; use tracing::instrument; pub use types::*; @@ -74,60 +74,45 @@ pub type InnerSC = BabyBearPoseidon2; /// The configuration for the outer prover. pub type OuterSC = BabyBearPoseidon2Outer; -const REDUCE_DEGREE: usize = 3; -const COMPRESS_DEGREE: usize = 9; +const COMPRESS_DEGREE: usize = 3; +const SHRINK_DEGREE: usize = 9; const WRAP_DEGREE: usize = 17; -pub type ReduceAir = RecursionAir; pub type CompressAir = RecursionAir; +pub type ShrinkAir = RecursionAir; pub type WrapAir = RecursionAir; /// A end-to-end prover implementation for the SP1 RISC-V zkVM. pub struct SP1Prover { /// The program that can recursively verify a set of proofs into a single proof. - pub recursion_program: RecursionProgram, - - /// The proving key for the recursion step. - pub rec_pk: StarkProvingKey, + pub recursion_program: OnceLock>, - /// The verification key for the recursion step. - pub rec_vk: StarkVerifyingKey, + /// The proving key and verifying key for the recursion step. + pub recursion_keys: OnceLock<(StarkProvingKey, StarkVerifyingKey)>, /// The program that recursively verifies deferred proofs and accumulates the digests. - pub deferred_program: RecursionProgram, - - /// The proving key for the reduce step. - pub deferred_pk: StarkProvingKey, + pub deferred_program: OnceLock>, - /// The verification key for the reduce step. - pub deferred_vk: StarkVerifyingKey, + /// The proving key and verifying key for the reduce step. + pub deferred_keys: OnceLock<(StarkProvingKey, StarkVerifyingKey)>, /// The program that reduces a set of recursive proofs into a single proof. - pub compress_program: RecursionProgram, + pub compress_program: OnceLock>, - /// The proving key for the reduce step. - pub compress_pk: StarkProvingKey, - - /// The verification key for the reduce step. - pub compress_vk: StarkVerifyingKey, + /// The proving key and verifying key for the reduce step. + pub compress_keys: OnceLock<(StarkProvingKey, StarkVerifyingKey)>, /// The shrink program that compresses a proof into a succinct proof. - pub shrink_program: RecursionProgram, - - /// The proving key for the compress step. - pub shrink_pk: StarkProvingKey, + pub shrink_program: OnceLock>, - /// The verification key for the compress step. - pub shrink_vk: StarkVerifyingKey, + /// The proving key and verifying key for the compress step. + pub shrink_keys: OnceLock<(StarkProvingKey, StarkVerifyingKey)>, /// The wrap program that wraps a proof into a SNARK-friendly field. - pub wrap_program: RecursionProgram, + pub wrap_program: OnceLock>, - /// The proving key for the wrap step. - pub wrap_pk: StarkProvingKey, - - /// The verification key for the wrapping step. - pub wrap_vk: StarkVerifyingKey, + /// The proving key and verifying key for the wrap step. + pub wrap_keys: OnceLock<(StarkProvingKey, StarkVerifyingKey)>, /// The machine used for proving the core step. pub core_prover: C::CoreProver, @@ -146,65 +131,46 @@ impl SP1Prover { /// Initializes a new [SP1Prover]. #[instrument(name = "initialize prover", level = "debug", skip_all)] pub fn new() -> Self { + let prover = Self::uninitialized(); + // Initialize everything except wrap key which is a bit slow. + prover.recursion_program(); + prover.deferred_program(); + prover.compress_program(); + prover.shrink_program(); + prover.wrap_program(); + prover.recursion_keys(); + prover.deferred_keys(); + prover.compress_keys(); + prover.shrink_keys(); + prover + } + + /// Creates a new [SP1Prover] with lazily initialized components. + pub fn uninitialized() -> Self { + // Initialize the provers. let core_machine = RiscvAir::machine(CoreSC::default()); let core_prover = C::CoreProver::new(core_machine); - // Get the recursive verifier and setup the proving and verifying keys. - let recursion_program = - SP1RecursiveVerifier::::build(core_prover.machine()); - let compress_machine = ReduceAir::machine(InnerSC::default()); + let compress_machine = CompressAir::machine(InnerSC::default()); let compress_prover = C::CompressProver::new(compress_machine); - let (rec_pk, rec_vk) = compress_prover.setup(&recursion_program); - - // Get the deferred program and keys. - let deferred_program = - SP1DeferredVerifier::::build(compress_prover.machine()); - let (deferred_pk, deferred_vk) = compress_prover.setup(&deferred_program); - - // Make the reduce program and keys. - let compress_program = SP1CompressVerifier::::build( - compress_prover.machine(), - &rec_vk, - &deferred_vk, - ); - let (compress_pk, compress_vk) = compress_prover.setup(&compress_program); - // Get the compress program, machine, and keys. - let shrink_program = SP1RootVerifier::::build( - compress_prover.machine(), - &compress_vk, - RecursionProgramType::Shrink, - ); - let shrink_machine = CompressAir::wrap_machine_dyn(InnerSC::compressed()); + let shrink_machine = ShrinkAir::wrap_machine_dyn(InnerSC::compressed()); let shrink_prover = C::ShrinkProver::new(shrink_machine); - let (shrink_pk, shrink_vk) = shrink_prover.setup(&shrink_program); - // Get the wrap program, machine, and keys. - let wrap_program = SP1RootVerifier::::build( - shrink_prover.machine(), - &shrink_vk, - RecursionProgramType::Wrap, - ); let wrap_machine = WrapAir::wrap_machine(OuterSC::default()); let wrap_prover = C::WrapProver::new(wrap_machine); - let (wrap_pk, wrap_vk) = wrap_prover.setup(&wrap_program); Self { - recursion_program, - rec_pk, - rec_vk, - deferred_program, - deferred_pk, - deferred_vk, - compress_program, - compress_pk, - compress_vk, - shrink_program, - shrink_pk, - shrink_vk, - wrap_program, - wrap_pk, - wrap_vk, + recursion_program: OnceLock::new(), + recursion_keys: OnceLock::new(), + deferred_program: OnceLock::new(), + deferred_keys: OnceLock::new(), + compress_program: OnceLock::new(), + compress_keys: OnceLock::new(), + shrink_program: OnceLock::new(), + shrink_keys: OnceLock::new(), + wrap_program: OnceLock::new(), + wrap_keys: OnceLock::new(), core_prover, compress_prover, shrink_prover, @@ -212,6 +178,21 @@ impl SP1Prover { } } + /// Fully initializes the programs, proving keys, and verifying keys that are normally + /// lazily initialized. + pub fn initialize(&mut self) { + self.recursion_program(); + self.deferred_program(); + self.compress_program(); + self.shrink_program(); + self.wrap_program(); + self.recursion_keys(); + self.deferred_keys(); + self.compress_keys(); + self.shrink_keys(); + self.wrap_keys(); + } + /// Creates a proving key and a verifying key for a given RISC-V ELF. #[instrument(name = "setup", level = "debug", skip_all)] pub fn setup(&self, elf: &[u8]) -> (SP1ProvingKey, SP1VerifyingKey) { @@ -228,11 +209,13 @@ impl SP1Prover { /// Generate a proof of an SP1 program with the specified inputs. #[instrument(name = "execute", level = "info", skip_all)] - pub fn execute( + pub fn execute<'a>( + &'a self, elf: &[u8], stdin: &SP1Stdin, - context: SP1Context, + mut context: SP1Context<'a>, ) -> Result<(SP1PublicValues, ExecutionReport), ExecutionError> { + context.subproof_verifier.replace(Arc::new(self)); let program = Program::from(elf); let opts = SP1CoreOpts::default(); let mut runtime = Runtime::with_context(program, opts, context); @@ -257,13 +240,12 @@ impl SP1Prover { opts: SP1ProverOpts, mut context: SP1Context<'a>, ) -> Result { - context - .subproof_verifier - .get_or_insert_with(|| Arc::new(self)); + context.subproof_verifier.replace(Arc::new(self)); let program = Program::from(&pk.elf); let (proof, public_values_stream, cycles) = sp1_core::utils::prove_with_context::<_, C::CoreProver>( &self.core_prover, + &pk.pk, program, stdin, opts.core_opts, @@ -343,7 +325,7 @@ impl SP1Prover { let proofs = batch.to_vec(); deferred_inputs.push(SP1DeferredMemoryLayout { - compress_vk: &self.compress_vk, + compress_vk: self.compress_vk(), machine: self.compress_prover.machine(), proofs, start_reconstruct_deferred_digest: deferred_digest.to_vec(), @@ -374,10 +356,7 @@ impl SP1Prover { shard_proofs: &[ShardProof], deferred_proofs: &[ShardProof], batch_size: usize, - ) -> ( - Vec>>, - Vec>>, - ) { + ) -> Vec> { let is_complete = shard_proofs.len() == 1 && deferred_proofs.is_empty(); let core_inputs = self.get_recursion_core_inputs( &vk.vk, @@ -399,11 +378,18 @@ impl SP1Prover { deferred_proofs, batch_size, ); - (core_inputs, deferred_inputs) + + let mut inputs = Vec::new(); + inputs.extend(core_inputs.into_iter().map(SP1CompressMemoryLayouts::Core)); + inputs.extend( + deferred_inputs + .into_iter() + .map(SP1CompressMemoryLayouts::Deferred), + ); + inputs } /// Reduce shards proofs to a single shard proof using the recursion prover. - #[instrument(name = "compress", level = "info", skip_all)] pub fn compress( &self, vk: &SP1VerifyingKey, @@ -413,7 +399,6 @@ impl SP1Prover { ) -> Result, SP1RecursionProverError> { // Set the batch size for the reduction tree. let batch_size = 2; - let shard_proofs = &proof.proof.0; // Get the leaf challenger. @@ -424,8 +409,8 @@ impl SP1Prover { leaf_challenger.observe_slice(&proof.public_values[0..self.core_prover.num_pv_elts()]); }); - // Run the recursion and reduce programs. - let (core_inputs, deferred_inputs) = self.get_first_layer_inputs( + // Generate the first layer inputs. + let first_layer_inputs = self.get_first_layer_inputs( vk, &leaf_challenger, shard_proofs, @@ -433,84 +418,320 @@ impl SP1Prover { batch_size, ); - let mut reduce_proofs = Vec::new(); - let shard_batch_size = opts.recursion_opts.shard_batch_size; - for inputs in core_inputs.chunks(shard_batch_size) { - let proofs = inputs - .into_par_iter() - .map(|input| { - self.compress_machine_proof(input, &self.recursion_program, &self.rec_pk, opts) - .map(|p| (p, ReduceProgramType::Core)) - }) - .collect::, _>>()?; - reduce_proofs.extend(proofs); + // Calculate the expected height of the tree. + let mut expected_height = 1; + let num_first_layer_inputs = first_layer_inputs.len(); + let mut num_layer_inputs = num_first_layer_inputs; + while num_layer_inputs > batch_size { + num_layer_inputs = (num_layer_inputs + 1) / 2; + expected_height += 1; } - // Run the deferred proofs programs. - for inputs in deferred_inputs.chunks(shard_batch_size) { - let proofs = inputs - .into_par_iter() - .map(|input| { - self.compress_machine_proof( - input, - &self.deferred_program, - &self.deferred_pk, - opts, - ) - .map(|p| (p, ReduceProgramType::Deferred)) - }) - .collect::, _>>()?; - reduce_proofs.extend(proofs); - } + // Generate the proofs. + let span = tracing::Span::current().clone(); + let proof = thread::scope(|s| { + let _span = span.enter(); - // Iterate over the recursive proof batches until there is one proof remaining. - let mut is_complete; - loop { - tracing::debug!("Recursive proof layer size: {}", reduce_proofs.len()); - is_complete = reduce_proofs.len() <= batch_size; - - let compress_inputs = reduce_proofs.chunks(batch_size).collect::>(); - let batched_compress_inputs = - compress_inputs.chunks(shard_batch_size).collect::>(); - reduce_proofs = batched_compress_inputs - .into_par_iter() - .flat_map(|batches| { - batches - .par_iter() - .map(|batch| { - let (shard_proofs, kinds) = - batch.iter().cloned().unzip::<_, _, Vec<_>, Vec<_>>(); - - let input = SP1ReduceMemoryLayout { - compress_vk: &self.compress_vk, - recursive_machine: self.compress_prover.machine(), - shard_proofs, - kinds, - is_complete, - }; + // Spawn a worker that sends the first layer inputs to a bounded channel. + let input_sync = Arc::new(TurnBasedSync::new()); + let (input_tx, input_rx) = sync_channel::<(usize, usize, SP1CompressMemoryLayouts)>( + opts.recursion_opts.checkpoints_channel_capacity, + ); + let input_tx = Arc::new(Mutex::new(input_tx)); + { + let input_tx = Arc::clone(&input_tx); + let input_sync = Arc::clone(&input_sync); + s.spawn(move || { + for (index, input) in first_layer_inputs.into_iter().enumerate() { + input_sync.wait_for_turn(index); + input_tx.lock().unwrap().send((index, 0, input)).unwrap(); + input_sync.advance_turn(); + } + }); + } - self.compress_machine_proof( - input, - &self.compress_program, - &self.compress_pk, - opts, + // Spawn workers who generate the records and traces. + let record_and_trace_sync = Arc::new(TurnBasedSync::new()); + let (record_and_trace_tx, record_and_trace_rx) = + sync_channel::<( + usize, + usize, + ExecutionRecord, + Vec<(String, RowMajorMatrix)>, + ReduceProgramType, + )>(opts.recursion_opts.records_and_traces_channel_capacity); + let record_and_trace_tx = Arc::new(Mutex::new(record_and_trace_tx)); + let record_and_trace_rx = Arc::new(Mutex::new(record_and_trace_rx)); + let input_rx = Arc::new(Mutex::new(input_rx)); + for _ in 0..opts.recursion_opts.trace_gen_workers { + let record_and_trace_sync = Arc::clone(&record_and_trace_sync); + let record_and_trace_tx = Arc::clone(&record_and_trace_tx); + let input_rx = Arc::clone(&input_rx); + let span = tracing::debug_span!("generate records and traces"); + s.spawn(move || { + let _span = span.enter(); + loop { + let received = { input_rx.lock().unwrap().recv() }; + if let Ok((index, height, input)) = received { + // Get the program and witness stream. + let (program, witness_stream, program_type) = tracing::debug_span!( + "write witness stream" ) - .map(|p| (p, ReduceProgramType::Reduce)) - }) - .collect::>() - }) - .collect::, _>>()?; + .in_scope(|| match input { + SP1CompressMemoryLayouts::Core(input) => { + let mut witness_stream = Vec::new(); + witness_stream.extend(input.write()); + ( + self.recursion_program(), + witness_stream, + ReduceProgramType::Core, + ) + } + SP1CompressMemoryLayouts::Deferred(input) => { + let mut witness_stream = Vec::new(); + witness_stream.extend(input.write()); + ( + self.deferred_program(), + witness_stream, + ReduceProgramType::Deferred, + ) + } + SP1CompressMemoryLayouts::Compress(input) => { + let mut witness_stream = Vec::new(); + witness_stream.extend(input.write()); + ( + self.compress_program(), + witness_stream, + ReduceProgramType::Reduce, + ) + } + }); + + // Execute the runtime. + let record = tracing::debug_span!("execute runtime").in_scope(|| { + let mut runtime = + RecursionRuntime::, Challenge, _>::new( + program, + self.compress_prover.config().perm.clone(), + ); + runtime.witness_stream = witness_stream.into(); + runtime + .run() + .map_err(|e| { + SP1RecursionProverError::RuntimeError(e.to_string()) + }) + .unwrap(); + runtime.record + }); + + // Generate the dependencies. + let mut records = vec![record]; + tracing::debug_span!("generate dependencies").in_scope(|| { + self.compress_prover + .machine() + .generate_dependencies(&mut records, &opts.recursion_opts) + }); + + // Generate the traces. + let record = records.into_iter().next().unwrap(); + let traces = tracing::debug_span!("generate traces") + .in_scope(|| self.compress_prover.generate_traces(&record)); + + // Wait for our turn to update the state. + record_and_trace_sync.wait_for_turn(index); + + // Send the record and traces to the worker. + record_and_trace_tx + .lock() + .unwrap() + .send((index, height, record, traces, program_type)) + .unwrap(); + + // Advance the turn. + record_and_trace_sync.advance_turn(); + } else { + break; + } + } + }); + } - if reduce_proofs.len() == 1 { - break; + // Spawn workers who generate the compress proofs. + let proofs_sync = Arc::new(TurnBasedSync::new()); + let (proofs_tx, proofs_rx) = sync_channel::<( + usize, + usize, + ShardProof, + ReduceProgramType, + )>(opts.recursion_opts.shard_batch_size); + let proofs_tx = Arc::new(Mutex::new(proofs_tx)); + let proofs_rx = Arc::new(Mutex::new(proofs_rx)); + let mut prover_handles = Vec::new(); + for _ in 0..opts.recursion_opts.shard_batch_size { + let prover_sync = Arc::clone(&proofs_sync); + let record_and_trace_rx = Arc::clone(&record_and_trace_rx); + let proofs_tx = Arc::clone(&proofs_tx); + let span = tracing::debug_span!("prove"); + let handle = s.spawn(move || { + let _span = span.enter(); + loop { + let received = { record_and_trace_rx.lock().unwrap().recv() }; + if let Ok((index, height, record, traces, program_type)) = received { + tracing::debug_span!("batch").in_scope(|| { + // Get the proving key. + let pk = if program_type == ReduceProgramType::Core { + self.recursion_pk() + } else if program_type == ReduceProgramType::Deferred { + self.deferred_pk() + } else { + self.compress_pk() + }; + + // Observe the proving key. + let mut challenger = self.compress_prover.config().challenger(); + tracing::debug_span!("observe proving key").in_scope(|| { + pk.observe_into(&mut challenger); + }); + + // Commit to the record and traces. + let data = tracing::debug_span!("commit") + .in_scope(|| self.compress_prover.commit(record, traces)); + + // Observe the commitment. + tracing::debug_span!("observe commitment").in_scope(|| { + challenger.observe(data.main_commit); + challenger.observe_slice( + &data.public_values[0..self.compress_prover.num_pv_elts()], + ); + }); + + // Generate the proof. + let proof = tracing::debug_span!("open").in_scope(|| { + self.compress_prover + .open(pk, data, &mut challenger) + .unwrap() + }); + + // Wait for our turn to update the state. + prover_sync.wait_for_turn(index); + + // Send the proof. + proofs_tx + .lock() + .unwrap() + .send((index, height, proof, program_type)) + .unwrap(); + + // Advance the turn. + prover_sync.advance_turn(); + }); + } else { + break; + } + } + }); + prover_handles.push(handle); } - } - debug_assert_eq!(reduce_proofs.len(), 1); - let reduce_proof = reduce_proofs.pop().unwrap(); - Ok(SP1ReduceProof { - proof: reduce_proof.0, - }) + // Spawn a worker that generates inputs for the next layer. + let handle = { + let input_tx = Arc::clone(&input_tx); + let proofs_rx = Arc::clone(&proofs_rx); + let span = tracing::debug_span!("generate next layer inputs"); + s.spawn(move || { + let _span = span.enter(); + let mut count = num_first_layer_inputs; + let mut batch: Vec<( + usize, + usize, + ShardProof, + ReduceProgramType, + )> = Vec::new(); + loop { + let received = { proofs_rx.lock().unwrap().recv() }; + if let Ok((index, height, proof, program_type)) = received { + batch.push((index, height, proof, program_type)); + + // Compute whether we've reached the root of the tree. + let is_complete = height == expected_height; + + // If it's not complete, and we haven't reached the batch size, continue. + if !is_complete && batch.len() < batch_size { + continue; + } + + // Compute whether we're at the last input of a layer. + let mut is_last = false; + if let Some(first) = batch.first() { + is_last = first.1 != height; + } + + // If we're at the last input of a layer, we need to only include the + // first input, otherwise we include all inputs. + let inputs = if is_last { + vec![batch[0].clone()] + } else { + batch.clone() + }; + let shard_proofs = inputs + .iter() + .map(|(_, _, proof, _)| proof.clone()) + .collect(); + let kinds = inputs + .iter() + .map(|(_, _, _, program_type)| *program_type) + .collect(); + let input = + SP1CompressMemoryLayouts::Compress(SP1CompressMemoryLayout { + compress_vk: self.compress_vk(), + recursive_machine: self.compress_prover.machine(), + shard_proofs, + kinds, + is_complete, + }); + + input_sync.wait_for_turn(count); + input_tx + .lock() + .unwrap() + .send((count, inputs[0].1 + 1, input)) + .unwrap(); + input_sync.advance_turn(); + count += 1; + + // If we're at the root of the tree, stop generating inputs. + if is_complete { + break; + } + + // If we were at the last input of a layer, we keep everything but the + // first input. Otherwise, we empty the batch. + if is_last { + batch = vec![batch[1].clone()]; + } else { + batch = Vec::new(); + } + } else { + break; + } + } + }) + }; + + // Wait for all the provers to finish. + drop(input_tx); + drop(record_and_trace_tx); + drop(proofs_tx); + for handle in prover_handles { + handle.join().unwrap(); + } + handle.join().unwrap(); + + let output = proofs_rx.lock().unwrap().recv().unwrap(); + output.2 + }); + + Ok(SP1ReduceProof { proof }) } /// Generate a proof with the compress machine. @@ -526,6 +747,9 @@ impl SP1Prover { self.compress_prover.config().perm.clone(), ); + let span = tracing::debug_span!("execute runtime"); + let guard = span.enter(); + let mut witness_stream = Vec::new(); witness_stream.extend(input.write()); @@ -535,6 +759,8 @@ impl SP1Prover { .map_err(|e| SP1RecursionProverError::RuntimeError(e.to_string()))?; runtime.print_stats(); + drop(guard); + let mut recursive_challenger = self.compress_prover.config().challenger(); let proof = self .compress_prover @@ -568,7 +794,7 @@ impl SP1Prover { // Run the compress program. let mut runtime = RecursionRuntime::, Challenge, _>::new( - &self.shrink_program, + self.shrink_program(), self.shrink_prover.config().perm.clone(), ); @@ -589,7 +815,7 @@ impl SP1Prover { let mut compress_proof = self .shrink_prover .prove( - &self.shrink_pk, + self.shrink_pk(), vec![runtime.record], &mut compress_challenger, opts.recursion_opts, @@ -616,7 +842,7 @@ impl SP1Prover { // Run the compress program. let mut runtime = RecursionRuntime::, Challenge, _>::new( - &self.wrap_program, + self.wrap_program(), self.shrink_prover.config().perm.clone(), ); @@ -638,7 +864,7 @@ impl SP1Prover { let mut wrap_proof = self .wrap_prover .prove( - &self.wrap_pk, + self.wrap_pk(), vec![runtime.record], &mut wrap_challenger, opts.recursion_opts, @@ -650,7 +876,7 @@ impl SP1Prover { let result = self.wrap_prover .machine() - .verify(&self.wrap_vk, &wrap_proof, &mut wrap_challenger); + .verify(self.wrap_vk(), &wrap_proof, &mut wrap_challenger); match result { Ok(_) => tracing::info!("Proof verified successfully"), Err(MachineVerificationError::NonZeroCumulativeSum) => { @@ -824,7 +1050,7 @@ pub mod tests { tracing::info!("generate plonk bn254 proof"); let artifacts_dir = - try_build_plonk_bn254_artifacts_dev(&prover.wrap_vk, &wrapped_bn254_proof.proof); + try_build_plonk_bn254_artifacts_dev(prover.wrap_vk(), &wrapped_bn254_proof.proof); let plonk_bn254_proof = prover.wrap_plonk_bn254(wrapped_bn254_proof, &artifacts_dir); println!("{:?}", plonk_bn254_proof); diff --git a/prover/src/types.rs b/prover/src/types.rs index 036c94b37a..6c05934c13 100644 --- a/prover/src/types.rs +++ b/prover/src/types.rs @@ -8,6 +8,7 @@ use p3_commit::{Pcs, TwoAdicMultiplicativeCoset}; use p3_field::PrimeField; use p3_field::{AbstractField, PrimeField32, TwoAdicField}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use sp1_core::stark::RiscvAir; use sp1_core::{ io::{SP1PublicValues, SP1Stdin}, stark::{ShardProof, StarkGenericConfig, StarkProvingKey, StarkVerifyingKey}, @@ -16,9 +17,13 @@ use sp1_core::{ use sp1_primitives::poseidon2_hash; use sp1_recursion_core::{air::RecursionPublicValues, stark::config::BabyBearPoseidon2Outer}; use sp1_recursion_gnark_ffi::plonk_bn254::PlonkBn254Proof; +use sp1_recursion_program::machine::{ + SP1CompressMemoryLayout, SP1DeferredMemoryLayout, SP1RecursionMemoryLayout, +}; use thiserror::Error; use crate::utils::words_to_bytes_be; +use crate::CompressAir; use crate::{utils::babybear_bytes_to_bn254, words_to_bytes}; use crate::{utils::babybears_to_bn254, CoreSC, InnerSC}; @@ -203,3 +208,10 @@ pub enum SP1RecursionProverError { #[error("Runtime error: {0}")] RuntimeError(String), } + +#[allow(clippy::large_enum_variant)] +pub enum SP1CompressMemoryLayouts<'a> { + Core(SP1RecursionMemoryLayout<'a, InnerSC, RiscvAir>), + Deferred(SP1DeferredMemoryLayout<'a, InnerSC, CompressAir>), + Compress(SP1CompressMemoryLayout<'a, InnerSC, CompressAir>), +} diff --git a/prover/src/verify.rs b/prover/src/verify.rs index 5b1554144e..ed47cee3d3 100644 --- a/prover/src/verify.rs +++ b/prover/src/verify.rs @@ -283,7 +283,7 @@ impl SP1Prover { shard_proofs: vec![proof.proof.clone()], }; self.compress_prover.machine().verify( - &self.compress_vk, + self.compress_vk(), &machine_proof, &mut challenger, )?; @@ -308,7 +308,7 @@ impl SP1Prover { } // Verify that the reduce program is the one we are expecting. - let recursion_vkey_hash = self.compress_vk.hash_babybear(); + let recursion_vkey_hash = self.compress_vk().hash_babybear(); if public_values.compress_vk_digest != recursion_vkey_hash { return Err(MachineVerificationError::InvalidPublicValues( "recursion vk hash mismatch", @@ -330,7 +330,7 @@ impl SP1Prover { }; self.shrink_prover .machine() - .verify(&self.shrink_vk, &machine_proof, &mut challenger)?; + .verify(self.shrink_vk(), &machine_proof, &mut challenger)?; // Validate public values let public_values: &RecursionPublicValues<_> = @@ -366,7 +366,7 @@ impl SP1Prover { }; self.wrap_prover .machine() - .verify(&self.wrap_vk, &machine_proof, &mut challenger)?; + .verify(self.wrap_vk(), &machine_proof, &mut challenger)?; // Validate public values let public_values: &RecursionPublicValues<_> = diff --git a/recursion/circuit/src/constraints.rs b/recursion/circuit/src/constraints.rs index a2d365383e..8a6d93fb43 100644 --- a/recursion/circuit/src/constraints.rs +++ b/recursion/circuit/src/constraints.rs @@ -171,11 +171,10 @@ mod tests { use p3_baby_bear::DiffusionMatrixBabyBear; use p3_challenger::{CanObserve, FieldChallenger}; use p3_commit::{Pcs, PolynomialSpace}; - use serde::{de::DeserializeOwned, Serialize}; use sp1_core::{ stark::{ - Chip, Com, DefaultProver, Dom, MachineProver, OpeningProof, PcsProverData, - ShardCommitment, ShardMainData, ShardProof, StarkGenericConfig, StarkMachine, + Chip, Com, CpuProver, Dom, MachineProver, OpeningProof, PcsProverData, ShardCommitment, + ShardProof, StarkGenericConfig, StarkMachine, }, utils::SP1CoreOpts, }; @@ -212,7 +211,6 @@ mod tests { OpeningProof: Send + Sync, Com: Send + Sync, PcsProverData: Send + Sync, - ShardMainData: Serialize + DeserializeOwned, SC::Val: p3_field::PrimeField32, ::Val: p3_field::extension::BinomiallyExtendable<4>, @@ -300,7 +298,7 @@ mod tests { let mut runtime = Runtime::::new_no_perm(&program); runtime.run().unwrap(); let machine = A::machine(config); - let prover = DefaultProver::new(machine); + let prover = CpuProver::new(machine); let (pk, vk) = prover.setup(&program); let mut challenger = prover.config().challenger(); let proof = prover diff --git a/recursion/circuit/src/fri.rs b/recursion/circuit/src/fri.rs index 52ef9858bd..930068ad5a 100644 --- a/recursion/circuit/src/fri.rs +++ b/recursion/circuit/src/fri.rs @@ -79,7 +79,6 @@ pub fn verify_two_adic_pcs( .map(|(query_opening, &index)| { let mut ro: [Ext; 32] = [builder.eval(SymbolicExt::from_f(C::EF::zero())); 32]; - // An array of the current power for each log_height. let mut log_height_pow = [0usize; 32]; @@ -135,7 +134,9 @@ pub fn verify_two_adic_pcs( let pow = log_height_pow[log_height]; // Fill in any missing powers of alpha. (alpha_pows.len()..pow + 1).for_each(|_| { - alpha_pows.push(builder.eval(*alpha_pows.last().unwrap() * alpha)); + let new_alpha = builder.eval(*alpha_pows.last().unwrap() * alpha); + builder.reduce_e(new_alpha); + alpha_pows.push(new_alpha); }); acc = builder.eval(acc + (alpha_pows[pow] * (p_at_z - p_at_x[0]))); log_height_pow[log_height] += 1; @@ -204,6 +205,7 @@ pub fn verify_query( let index_bits = builder.num2bits_v_circuit(index, 32); let rev_reduced_index = builder.reverse_bits_len_circuit(index_bits.clone(), log_max_height); let mut x = builder.exp_e_bits(two_adic_generator, rev_reduced_index); + builder.reduce_e(x); let mut offset = 0; for (log_folded_height, commit, step, beta) in izip!( @@ -248,6 +250,7 @@ pub fn verify_query( folded_eval = builder .eval(evals_ext[0] + (beta - xs[0]) * (evals_ext[1] - evals_ext[0]) / (xs[1] - xs[0])); x = builder.eval(x * x); + builder.reduce_e(x); offset += 1; } diff --git a/recursion/compiler/src/constraints/mod.rs b/recursion/compiler/src/constraints/mod.rs index 95e9a6ea58..ac7c65e765 100644 --- a/recursion/compiler/src/constraints/mod.rs +++ b/recursion/compiler/src/constraints/mod.rs @@ -361,6 +361,10 @@ impl ConstraintCompiler { // Ignore cycle tracker instruction. // It currently serves as a marker for calculation at compile time. DslIr::CycleTracker(_) => (), + DslIr::ReduceE(a) => constraints.push(Constraint { + opcode: ConstraintOpcode::ReduceE, + args: vec![vec![a.id()]], + }), _ => panic!("unsupported {:?}", instruction), }; } diff --git a/recursion/compiler/src/constraints/opcodes.rs b/recursion/compiler/src/constraints/opcodes.rs index 4911e0f108..02ed47eea5 100644 --- a/recursion/compiler/src/constraints/opcodes.rs +++ b/recursion/compiler/src/constraints/opcodes.rs @@ -47,4 +47,5 @@ pub enum ConstraintOpcode { CommitCommitedValuesDigest, CircuitFelts2Ext, PermuteBabyBear, + ReduceE, } diff --git a/recursion/compiler/src/ir/builder.rs b/recursion/compiler/src/ir/builder.rs index e6480d6632..f9236b8970 100644 --- a/recursion/compiler/src/ir/builder.rs +++ b/recursion/compiler/src/ir/builder.rs @@ -515,6 +515,10 @@ impl Builder { .push(DslIr::CircuitCommitCommitedValuesDigest(var)); } + pub fn reduce_e(&mut self, ext: Ext) { + self.operations.push(DslIr::ReduceE(ext)); + } + pub fn cycle_tracker(&mut self, name: &str) { self.operations.push(DslIr::CycleTracker(name.to_string())); } diff --git a/recursion/compiler/src/ir/instructions.rs b/recursion/compiler/src/ir/instructions.rs index 7080a008dd..d4d40bfbd9 100644 --- a/recursion/compiler/src/ir/instructions.rs +++ b/recursion/compiler/src/ir/instructions.rs @@ -185,6 +185,9 @@ pub enum DslIr { /// Store extension field at address StoreE(Ext, Ptr, MemIndex), + /// Force reduction of field elements in circuit. + ReduceE(Ext), + // Bits. /// Decompose a variable into size bits (bits = num2bits(var, size)). Should only be used when target is a gnark circuit. CircuitNum2BitsV(Var, usize, Vec>), diff --git a/recursion/core/Cargo.toml b/recursion/core/Cargo.toml index 3d8b0d486e..95073c58a2 100644 --- a/recursion/core/Cargo.toml +++ b/recursion/core/Cargo.toml @@ -35,7 +35,7 @@ ff = { version = "0.13", features = ["derive", "derive_bits"] } serde = { version = "1.0", features = ["derive", "rc"] } serde_with = "3.9.0" backtrace = { version = "0.3.71", features = ["serde"] } -arrayref = "0.3.7" +arrayref = "0.3.8" static_assertions = "1.1.0" num_cpus = "1.16.0" diff --git a/recursion/core/src/cpu/columns/opcode_specific.rs b/recursion/core/src/cpu/columns/opcode_specific.rs index 47a6d38acf..cea91801c1 100644 --- a/recursion/core/src/cpu/columns/opcode_specific.rs +++ b/recursion/core/src/cpu/columns/opcode_specific.rs @@ -1,6 +1,8 @@ use std::fmt::{Debug, Formatter}; use std::mem::{size_of, transmute}; +use static_assertions::const_assert; + use super::branch::BranchCols; use super::heap_expand::HeapExpandCols; use super::memory::MemoryCols; @@ -20,8 +22,11 @@ pub union OpcodeSpecificCols { impl Default for OpcodeSpecificCols { fn default() -> Self { + // We must use the largest field to avoid uninitialized padding bytes. + const_assert!(size_of::>() == size_of::>()); + OpcodeSpecificCols { - branch: BranchCols::::default(), + memory: MemoryCols::::default(), } } } diff --git a/recursion/gnark-ffi/Cargo.toml b/recursion/gnark-ffi/Cargo.toml index 1ef47e6ce1..83df0217fc 100644 --- a/recursion/gnark-ffi/Cargo.toml +++ b/recursion/gnark-ffi/Cargo.toml @@ -16,7 +16,7 @@ p3-baby-bear = { workspace = true } sp1-recursion-compiler = { workspace = true } sp1-core = { workspace = true } serde = "1.0.204" -serde_json = "1.0.120" +serde_json = "1.0.121" tempfile = "3.10.1" rand = "0.8" log = "0.4.22" diff --git a/recursion/gnark-ffi/go/sp1/babybear/babybear.go b/recursion/gnark-ffi/go/sp1/babybear/babybear.go index 066edd2653..8259d486c7 100644 --- a/recursion/gnark-ffi/go/sp1/babybear/babybear.go +++ b/recursion/gnark-ffi/go/sp1/babybear/babybear.go @@ -6,6 +6,8 @@ package babybear import "C" import ( + "fmt" + "math" "math/big" "github.com/consensys/gnark/constraint/solver" @@ -20,6 +22,7 @@ func init() { solver.RegisterHint(InvFHint) solver.RegisterHint(InvEHint) solver.RegisterHint(ReduceHint) + solver.RegisterHint(SplitLimbsHint) } type Variable struct { @@ -43,6 +46,13 @@ func NewChip(api frontend.API) *Chip { } } +func Zero() Variable { + return Variable{ + Value: frontend.Variable("0"), + NbBits: 0, + } +} + func NewF(value string) Variable { return Variable{ Value: frontend.Variable(value), @@ -98,8 +108,16 @@ func (c *Chip) negF(a Variable) Variable { if a.NbBits == 31 { return Variable{Value: c.api.Sub(modulus, a.Value), NbBits: 31} } - negOne := NewF("2013265920") - return c.MulF(a, negOne) + + ub := new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(a.NbBits)), big.NewInt(0)) + divisor := new(big.Int).Div(ub, modulus) + divisorPlusOne := new(big.Int).Add(divisor, big.NewInt(1)) + liftedModulus := new(big.Int).Mul(divisorPlusOne, modulus) + + return Variable{ + Value: c.api.Sub(liftedModulus, a.Value), + NbBits: a.NbBits, + } } func (c *Chip) invF(in Variable) Variable { @@ -184,10 +202,10 @@ func (c *Chip) SubEF(a ExtensionVariable, b Variable) ExtensionVariable { func (c *Chip) MulE(a, b ExtensionVariable) ExtensionVariable { v2 := [4]Variable{ - NewF("0"), - NewF("0"), - NewF("0"), - NewF("0"), + Zero(), + Zero(), + Zero(), + Zero(), } for i := 0; i < 4; i++ { @@ -255,7 +273,7 @@ func (c *Chip) ToBinary(in Variable) []frontend.Variable { } func (p *Chip) reduceFast(x Variable) Variable { - if x.NbBits >= uint(120) { + if x.NbBits >= uint(126) { return Variable{ Value: p.reduceWithMaxBits(x.Value, uint64(x.NbBits)), NbBits: 31, @@ -284,7 +302,40 @@ func (p *Chip) reduceWithMaxBits(x frontend.Variable, maxNbBits uint64) frontend p.rangeChecker.Check(quotient, int(maxNbBits-31)) remainder := result[1] - p.rangeChecker.Check(remainder, 31) + + // Check that the remainder has size less than the BabyBear modulus, by decomposing it into a 27 + // bit limb and a 4 bit limb. + new_result, new_err := p.api.Compiler().NewHint(SplitLimbsHint, 2, remainder) + if new_err != nil { + panic(new_err) + } + + lowLimb := new_result[0] + highLimb := new_result[1] + + // Check that the hint is correct. + p.api.AssertIsEqual( + p.api.Add( + p.api.Mul(highLimb, frontend.Variable(uint64(math.Pow(2, 27)))), + lowLimb, + ), + remainder, + ) + p.rangeChecker.Check(highLimb, 4) + p.rangeChecker.Check(lowLimb, 27) + + // If the most significant bits are all 1, then we need to check that the least significant bits + // are all zero in order for element to be less than the BabyBear modulus. Otherwise, we don't + // need to do any checks, since we already know that the element is less than the BabyBear modulus. + shouldCheck := p.api.IsZero(p.api.Sub(highLimb, uint64(math.Pow(2, 4))-1)) + p.api.AssertIsEqual( + p.api.Select( + shouldCheck, + lowLimb, + frontend.Variable(0), + ), + frontend.Variable(0), + ) p.api.AssertIsEqual(x, p.api.Add(p.api.Mul(quotient, modulus), result[1])) @@ -304,6 +355,13 @@ func ReduceHint(_ *big.Int, inputs []*big.Int, results []*big.Int) error { return nil } +func (p *Chip) ReduceE(x ExtensionVariable) ExtensionVariable { + for i := 0; i < 4; i++ { + x.Value[i] = p.ReduceSlow(x.Value[i]) + } + return x +} + func InvFHint(_ *big.Int, inputs []*big.Int, results []*big.Int) error { a := C.uint(inputs[0].Uint64()) ainv := C.babybearinv(a) @@ -311,6 +369,30 @@ func InvFHint(_ *big.Int, inputs []*big.Int, results []*big.Int) error { return nil } +// The hint used to split a BabyBear Variable into a 4 bit limb (the most significant bits) and a +// 27 bit limb. +func SplitLimbsHint(_ *big.Int, inputs []*big.Int, results []*big.Int) error { + if len(inputs) != 1 { + panic("SplitLimbsHint expects 1 input operand") + } + + // The BabyBear field element + input := inputs[0] + + if input.Cmp(modulus) == 0 || input.Cmp(modulus) == 1 { + return fmt.Errorf("input is not in the field") + } + + two_27 := big.NewInt(int64(math.Pow(2, 27))) + + // The least significant bits + results[0] = new(big.Int).Rem(input, two_27) + // The most significant bits + results[1] = new(big.Int).Quo(input, two_27) + + return nil +} + func InvEHint(_ *big.Int, inputs []*big.Int, results []*big.Int) error { a := C.uint(inputs[0].Uint64()) b := C.uint(inputs[1].Uint64()) diff --git a/recursion/gnark-ffi/go/sp1/sp1.go b/recursion/gnark-ffi/go/sp1/sp1.go index a7fe4b651c..5760a43d90 100644 --- a/recursion/gnark-ffi/go/sp1/sp1.go +++ b/recursion/gnark-ffi/go/sp1/sp1.go @@ -195,6 +195,8 @@ func (circuit *Circuit) Define(api frontend.API) error { api.AssertIsEqual(circuit.CommitedValuesDigest, element) case "CircuitFelts2Ext": exts[cs.Args[0][0]] = babybear.Felts2Ext(felts[cs.Args[1][0]], felts[cs.Args[2][0]], felts[cs.Args[3][0]], felts[cs.Args[4][0]]) + case "ReduceE": + exts[cs.Args[0][0]] = fieldAPI.ReduceE(exts[cs.Args[0][0]]) default: return fmt.Errorf("unhandled opcode: %s", cs.Opcode) } diff --git a/recursion/program/src/constraints.rs b/recursion/program/src/constraints.rs index 28824c16e4..ff71a2e641 100644 --- a/recursion/program/src/constraints.rs +++ b/recursion/program/src/constraints.rs @@ -159,14 +159,13 @@ where mod tests { use itertools::{izip, Itertools}; use rand::{thread_rng, Rng}; - use serde::{de::DeserializeOwned, Serialize}; - use sp1_core::stark::DefaultProver; + use sp1_core::stark::CpuProver; use sp1_core::{ io::SP1Stdin, runtime::Program, stark::{ - Chip, Com, Dom, OpeningProof, PcsProverData, RiscvAir, ShardCommitment, ShardMainData, - ShardProof, StarkGenericConfig, StarkMachine, + Chip, Com, Dom, OpeningProof, PcsProverData, RiscvAir, ShardCommitment, ShardProof, + StarkGenericConfig, StarkMachine, }, utils::{BabyBearPoseidon2, SP1CoreOpts}, }; @@ -199,7 +198,6 @@ mod tests { OpeningProof: Send + Sync, Com: Send + Sync, PcsProverData: Send + Sync, - ShardMainData: Serialize + DeserializeOwned, SC::Val: p3_field::PrimeField32, { let ShardProof { @@ -286,7 +284,7 @@ mod tests { let machine = A::machine(SC::default()); let (_, vk) = machine.setup(&Program::from(elf)); let mut challenger = machine.config().challenger(); - let (proof, _, _) = sp1_core::utils::prove::<_, DefaultProver<_, _>>( + let (proof, _, _) = sp1_core::utils::prove::<_, CpuProver<_, _>>( Program::from(elf), &SP1Stdin::new(), SC::default(), diff --git a/recursion/program/src/hints.rs b/recursion/program/src/hints.rs index fb5a52e48a..0163b0380d 100644 --- a/recursion/program/src/hints.rs +++ b/recursion/program/src/hints.rs @@ -548,8 +548,10 @@ impl<'a, A: MachineAir> Hintable } } -impl<'a, A: MachineAir> Hintable for SP1ReduceMemoryLayout<'a, BabyBearPoseidon2, A> { - type HintVariable = SP1ReduceMemoryLayoutVariable; +impl<'a, A: MachineAir> Hintable + for SP1CompressMemoryLayout<'a, BabyBearPoseidon2, A> +{ + type HintVariable = SP1CompressMemoryLayoutVariable; fn read(builder: &mut Builder) -> Self::HintVariable { let compress_vk = VerifyingKeyHint::<'a, BabyBearPoseidon2, A>::read(builder); @@ -557,7 +559,7 @@ impl<'a, A: MachineAir> Hintable for SP1ReduceMemoryLayout<'a, Baby let kinds = Vec::::read(builder); let is_complete = builder.hint_var(); - SP1ReduceMemoryLayoutVariable { + SP1CompressMemoryLayoutVariable { compress_vk, shard_proofs, kinds, diff --git a/recursion/program/src/machine/compress.rs b/recursion/program/src/machine/compress.rs index 4dec375be3..353b6e2fd8 100644 --- a/recursion/program/src/machine/compress.rs +++ b/recursion/program/src/machine/compress.rs @@ -43,7 +43,7 @@ pub struct SP1CompressVerifier { } /// The different types of programs that can be verified by the `SP1ReduceVerifier`. -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] pub enum ReduceProgramType { /// A batch of proofs that are all SP1 Core proofs. Core = 0, @@ -54,7 +54,7 @@ pub enum ReduceProgramType { } /// An input layout for the reduce verifier. -pub struct SP1ReduceMemoryLayout<'a, SC: StarkGenericConfig, A: MachineAir> { +pub struct SP1CompressMemoryLayout<'a, SC: StarkGenericConfig, A: MachineAir> { pub compress_vk: &'a StarkVerifyingKey, pub recursive_machine: &'a StarkMachine, pub shard_proofs: Vec>, @@ -63,7 +63,7 @@ pub struct SP1ReduceMemoryLayout<'a, SC: StarkGenericConfig, A: MachineAir { +pub struct SP1CompressMemoryLayoutVariable { pub compress_vk: VerifyingKeyVariable, pub shard_proofs: Array>, pub kinds: Array>, @@ -82,8 +82,8 @@ where ) -> RecursionProgram { let mut builder = Builder::::new(RecursionProgramType::Compress); - let input: SP1ReduceMemoryLayoutVariable<_> = builder.uninit(); - SP1ReduceMemoryLayout::::witness(&input, &mut builder); + let input: SP1CompressMemoryLayoutVariable<_> = builder.uninit(); + SP1CompressMemoryLayout::::witness(&input, &mut builder); let pcs = TwoAdicFriPcsVariable { config: const_fri_config(&mut builder, machine.config().pcs().fri_config()), @@ -130,11 +130,11 @@ where builder: &mut Builder, pcs: &TwoAdicFriPcsVariable, machine: &StarkMachine, - input: SP1ReduceMemoryLayoutVariable, + input: SP1CompressMemoryLayoutVariable, recursive_vk: &StarkVerifyingKey, deferred_vk: &StarkVerifyingKey, ) { - let SP1ReduceMemoryLayoutVariable { + let SP1CompressMemoryLayoutVariable { compress_vk, shard_proofs, kinds, diff --git a/recursion/program/src/stark.rs b/recursion/program/src/stark.rs index 63a7438c7d..dc16a2668d 100644 --- a/recursion/program/src/stark.rs +++ b/recursion/program/src/stark.rs @@ -394,7 +394,7 @@ pub(crate) mod tests { use sp1_core::air::POSEIDON_NUM_WORDS; use sp1_core::io::SP1Stdin; use sp1_core::runtime::Program; - use sp1_core::stark::DefaultProver; + use sp1_core::stark::CpuProver; use sp1_core::stark::MachineProver; use sp1_core::utils::setup_logger; use sp1_core::utils::InnerChallenge; @@ -441,7 +441,7 @@ pub(crate) mod tests { let machine = A::machine(SC::default()); let (_, vk) = machine.setup(&Program::from(elf)); let mut challenger_val = machine.config().challenger(); - let (proof, _, _) = sp1_core::utils::prove::<_, DefaultProver<_, _>>( + let (proof, _, _) = sp1_core::utils::prove::<_, CpuProver<_, _>>( Program::from(elf), &SP1Stdin::new(), SC::default(), @@ -549,7 +549,7 @@ pub(crate) mod tests { runtime.run().unwrap(); let machine = RecursionAir::<_, 3>::machine(SC::default()); - let prover = DefaultProver::new(machine); + let prover = CpuProver::new(machine); let (pk, vk) = prover.setup(&program); let record = runtime.record.clone(); diff --git a/rust-toolchain b/rust-toolchain index 3a306210c9..b3524b5240 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2024-04-17" -components = ["llvm-tools", "rustc-dev"] \ No newline at end of file +channel = "1.79.0" +components = ["llvm-tools", "rustc-dev"] diff --git a/sdk/Cargo.toml b/sdk/Cargo.toml index 36da7fe893..adea6b1996 100644 --- a/sdk/Cargo.toml +++ b/sdk/Cargo.toml @@ -12,7 +12,7 @@ categories = { workspace = true } [dependencies] prost = "0.12" serde = { version = "1.0.204", features = ["derive"] } -serde_json = "1.0.120" +serde_json = "1.0.121" twirp = { package = "twirp-rs", version = "0.3.0-succinct" } async-trait = "0.1.81" reqwest-middleware = "0.3.2" @@ -26,7 +26,7 @@ sp1-prover = { workspace = true } sp1-core = { workspace = true } futures = "0.3.30" bincode = "1.3.3" -tokio = { version = "1.38.0", features = ["full"] } +tokio = { version = "1.39.2", features = ["full"] } p3-matrix = { workspace = true } p3-commit = { workspace = true } p3-field = { workspace = true } @@ -37,7 +37,7 @@ tracing = "0.1.40" hex = "0.4.3" log = "0.4.22" axum = "=0.7.4" -alloy-sol-types = { version = "0.7.6", optional = true } +alloy-sol-types = { version = "0.7.7", optional = true } sha2 = "0.10.8" dirs = "5.0.1" tempfile = "3.10.1" @@ -46,7 +46,7 @@ cfg-if = "1.0" ethers = { version = "2", default-features = false } strum_macros = "0.26.4" strum = "0.26.3" -thiserror = "1.0.61" +thiserror = "1.0.63" hashbrown = "0.14.5" sysinfo = "0.30.13" diff --git a/sdk/src/action.rs b/sdk/src/action.rs index 7c712b36c1..5701e5b92d 100644 --- a/sdk/src/action.rs +++ b/sdk/src/action.rs @@ -2,9 +2,7 @@ use sp1_core::{ runtime::{ExecutionReport, HookEnv, SP1ContextBuilder}, utils::{SP1CoreOpts, SP1ProverOpts}, }; -use sp1_prover::{ - components::DefaultProverComponents, SP1Prover, SP1ProvingKey, SP1PublicValues, SP1Stdin, -}; +use sp1_prover::{components::DefaultProverComponents, SP1ProvingKey, SP1PublicValues, SP1Stdin}; use anyhow::{Ok, Result}; @@ -12,8 +10,8 @@ use crate::{Prover, SP1ProofKind, SP1ProofWithPublicValues}; /// Builder to prepare and configure execution of a program on an input. /// May be run with [Self::run]. -#[derive(Default)] pub struct Execute<'a> { + prover: &'a dyn Prover, context_builder: SP1ContextBuilder<'a>, elf: &'a [u8], stdin: SP1Stdin, @@ -24,8 +22,13 @@ impl<'a> Execute<'a> { /// /// Prefer using [ProverClient::execute](super::ProverClient::execute). /// See there for more documentation. - pub fn new(elf: &'a [u8], stdin: SP1Stdin) -> Self { + pub fn new( + prover: &'a dyn Prover, + elf: &'a [u8], + stdin: SP1Stdin, + ) -> Self { Self { + prover, elf, stdin, context_builder: Default::default(), @@ -35,14 +38,13 @@ impl<'a> Execute<'a> { /// Execute the program on the input, consuming the built action `self`. pub fn run(self) -> Result<(SP1PublicValues, ExecutionReport)> { let Self { + prover, elf, stdin, mut context_builder, } = self; let context = context_builder.build(); - Ok(SP1Prover::::execute( - elf, &stdin, context, - )?) + Ok(prover.sp1_prover().execute(elf, &stdin, context)?) } /// Add a runtime [Hook](super::Hook) into the context. @@ -183,18 +185,6 @@ impl<'a> Prove<'a> { self } - /// Set the commit stream capacity for proving. - pub fn commit_stream_capacity(mut self, value: usize) -> Self { - self.core_opts.commit_stream_capacity = value; - self - } - - /// Set the prove stream capacity for proving. - pub fn prove_stream_capacity(mut self, value: usize) -> Self { - self.core_opts.prove_stream_capacity = value; - self - } - /// Set whether we should reconstruct commitments while proving. pub fn reconstruct_commitments(mut self, value: bool) -> Self { self.core_opts.reconstruct_commitments = value; diff --git a/sdk/src/lib.rs b/sdk/src/lib.rs index 8f8b01ecd7..27cda68733 100644 --- a/sdk/src/lib.rs +++ b/sdk/src/lib.rs @@ -173,8 +173,8 @@ impl ProverClient { /// // Execute the program on the inputs. /// let (public_values, report) = client.execute(elf, stdin).run().unwrap(); /// ``` - pub fn execute<'a>(&self, elf: &'a [u8], stdin: SP1Stdin) -> action::Execute<'a> { - action::Execute::new(elf, stdin) + pub fn execute<'a>(&'a self, elf: &'a [u8], stdin: SP1Stdin) -> action::Execute<'a> { + action::Execute::new(self.prover.as_ref(), elf, stdin) } /// Prepare to prove the execution of the given program with the given input in the default mode. diff --git a/sdk/src/network/prover.rs b/sdk/src/network/prover.rs index 5a9d4672ac..4d84fb47b8 100644 --- a/sdk/src/network/prover.rs +++ b/sdk/src/network/prover.rs @@ -58,7 +58,9 @@ impl NetworkProver { if !skip_simulation { let (_, report) = - SP1Prover::::execute(elf, &stdin, Default::default())?; + self.local_prover + .sp1_prover() + .execute(elf, &stdin, Default::default())?; log::info!( "Simulation complete, cycles: {}", report.total_instruction_count() diff --git a/sdk/src/provers/local.rs b/sdk/src/provers/local.rs index f192455e43..ed60265fa5 100644 --- a/sdk/src/provers/local.rs +++ b/sdk/src/provers/local.rs @@ -81,7 +81,7 @@ impl Prover for LocalProver { let plonk_bn254_aritfacts = if sp1_prover::build::sp1_dev_mode() { sp1_prover::build::try_build_plonk_bn254_artifacts_dev( - &self.prover.wrap_vk, + self.prover.wrap_vk(), &outer_proof.proof, ) } else { diff --git a/sdk/src/provers/mock.rs b/sdk/src/provers/mock.rs index 0b41471c1f..c3a53359ee 100644 --- a/sdk/src/provers/mock.rs +++ b/sdk/src/provers/mock.rs @@ -57,8 +57,7 @@ impl Prover for MockProver { ) -> Result { match kind { SP1ProofKind::Core => { - let (public_values, _) = - SP1Prover::::execute(&pk.elf, &stdin, context)?; + let (public_values, _) = self.prover.execute(&pk.elf, &stdin, context)?; Ok(SP1ProofWithPublicValues { proof: SP1Proof::Core(vec![]), stdin, @@ -67,8 +66,7 @@ impl Prover for MockProver { }) } SP1ProofKind::Compressed => { - let (public_values, _) = - SP1Prover::::execute(&pk.elf, &stdin, context)?; + let (public_values, _) = self.prover.execute(&pk.elf, &stdin, context)?; Ok(SP1ProofWithPublicValues { proof: SP1Proof::Compressed(ShardProof { commitment: ShardCommitment { @@ -95,8 +93,7 @@ impl Prover for MockProver { }) } SP1ProofKind::Plonk => { - let (public_values, _) = - SP1Prover::::execute(&pk.elf, &stdin, context)?; + let (public_values, _) = self.prover.execute(&pk.elf, &stdin, context)?; Ok(SP1ProofWithPublicValues { proof: SP1Proof::Plonk(PlonkBn254Proof { public_inputs: [ diff --git a/server/Cargo.toml b/server/Cargo.toml new file mode 100644 index 0000000000..07f28671fb --- /dev/null +++ b/server/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "sp1-server" +description = "SP1 is a performant, 100% open-source, contributor-friendly zkVM." +readme = "../README.md" +version = { workspace = true } +edition = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +keywords = { workspace = true } +categories = { workspace = true } + +[dependencies] +sp1-core = { workspace = true } +sp1-prover = { workspace = true } +prost = "0.13" +prost-types = "0.13" +bincode = "1.3.3" +serde = { version = "1.0.197", features = ["derive"] } +serde_json = "1.0.114" +tokio = { version = "^1.38.0", features = ["full"] } +tracing = "0.1.40" +tracing-subscriber = "0.3.18" +twirp = { git = "https://github.com/github/twirp-rs.git" } +ctrlc = "3.4.4" + +[build-dependencies] +prost-build = { version = "0.13", optional = true } +twirp-build = { git = "https://github.com/github/twirp-rs.git", optional = true } + +[dev-dependencies] +sp1-core = { workspace = true, features = ["programs"] } + +[features] +default = [] +protobuf = ["dep:prost-build", "dep:twirp-build"] diff --git a/server/build.rs b/server/build.rs new file mode 100644 index 0000000000..3dee9fc32c --- /dev/null +++ b/server/build.rs @@ -0,0 +1,12 @@ +fn main() { + // This is commented out because it requires for the protobuf-compiler to be installed. + // + // println!("cargo:rerun-if-changed=."); + // let mut config = prost_build::Config::new(); + // config + // .out_dir("src/proto") + // .type_attribute(".", "#[derive(serde::Serialize,serde::Deserialize)]") + // .service_generator(twirp_build::service_generator()) + // .compile_protos(&["./proto/api.proto"], &["./proto"]) + // .unwrap(); +} diff --git a/server/proto/api.proto b/server/proto/api.proto new file mode 100644 index 0000000000..220818beb4 --- /dev/null +++ b/server/proto/api.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package api; + +service ProverService { + rpc ProveCore(ProveCoreRequest) returns (ProveCoreResponse) {} + rpc Compress(CompressRequest) returns (CompressResponse) {} +} + +message ProveCoreRequest { + bytes data = 1; +} + +message ProveCoreResponse { + bytes result = 1; +} + +message CompressRequest { + bytes data = 1; +} + +message CompressResponse { + bytes result = 1; +} \ No newline at end of file diff --git a/server/src/lib.rs b/server/src/lib.rs new file mode 100644 index 0000000000..98a3dfaf19 --- /dev/null +++ b/server/src/lib.rs @@ -0,0 +1,277 @@ +#[rustfmt::skip] +pub mod proto { + pub mod api; +} + +use core::time::Duration; +use std::process::Command; +use std::process::Stdio; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; + +use crate::proto::api::ProverServiceClient; + +use serde::{Deserialize, Serialize}; +use sp1_core::io::SP1Stdin; +use sp1_core::stark::ShardProof; +use sp1_core::utils::SP1CoreProverError; +use sp1_prover::types::SP1ProvingKey; +use sp1_prover::InnerSC; +use sp1_prover::SP1CoreProof; +use sp1_prover::SP1RecursionProverError; +use sp1_prover::SP1ReduceProof; +use sp1_prover::SP1VerifyingKey; +use tokio::runtime::Runtime; +use twirp::url::Url; +use twirp::Client; + +/// A remote client to [sp1_prover::SP1Prover] that runs inside a container. +/// +/// This is currently used to provide experimental support for GPU hardware acceleration. +/// +/// **WARNING**: This is an experimental feature and may not work as expected. +pub struct SP1ProverServer { + /// The gRPC client to communicate with the container. + client: Client, + /// The name of the container. + container_name: String, + /// A flag to indicate whether the container has already been cleaned up. + cleaned_up: Arc, +} + +/// The payload for the [sp1_prover::SP1Prover::prove_core] method. +/// +/// We use this object to serialize and deserialize the payload from the client to the server. +#[derive(Serialize, Deserialize)] +pub struct ProveCoreRequestPayload { + /// The proving key. + pub pk: SP1ProvingKey, + /// The input stream. + pub stdin: SP1Stdin, +} + +/// The payload for the [sp1_prover::SP1Prover::compress] method. +/// +/// We use this object to serialize and deserialize the payload from the client to the server. +#[derive(Serialize, Deserialize)] +pub struct CompressRequestPayload { + /// The verifying key. + pub vk: SP1VerifyingKey, + /// The core proof. + pub proof: SP1CoreProof, + /// The deferred proofs. + pub deferred_proofs: Vec>, +} + +impl SP1ProverServer { + /// Creates a new [SP1Prover] that runs inside a Docker container and returns a + /// [SP1ProverClient] that can be used to communicate with the container. + pub fn new() -> Self { + let container_name = "sp1-gpu"; + let image_name = "jtguibas/sp1-gpu:v1.1.0"; + + let cleaned_up = Arc::new(AtomicBool::new(false)); + let cleanup_name = container_name; + let cleanup_flag = cleaned_up.clone(); + + // Spawn a new thread to start the Docker container. + std::thread::spawn(move || { + Command::new("sudo") + .args([ + "docker", + "run", + "-e", + "RUST_LOG=debug", + "-p", + "3000:3000", + "--rm", + "--runtime=nvidia", + "--gpus", + "all", + "--name", + container_name, + image_name, + ]) + .stdin(Stdio::inherit()) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()) + .status() + .expect("failed to start Docker container"); + }); + + ctrlc::set_handler(move || { + tracing::debug!("received Ctrl+C, cleaning up..."); + if !cleanup_flag.load(Ordering::SeqCst) { + cleanup_container(cleanup_name); + cleanup_flag.store(true, Ordering::SeqCst); + } + std::process::exit(0); + }) + .unwrap(); + + tracing::debug!("sleeping for 10 seconds to allow server to start"); + std::thread::sleep(Duration::from_secs(10)); + + SP1ProverServer { + client: Client::from_base_url( + Url::parse("http://localhost:3000/twirp/").expect("failed to parse url"), + ) + .expect("failed to create client"), + container_name: container_name.to_string(), + cleaned_up: cleaned_up.clone(), + } + } + + /// Executes the [sp1_prover::SP1Prover::prove_core] method inside the container. + /// + /// TODO: We can probably create a trait to unify [sp1_prover::SP1Prover] and [SP1ProverClient]. + /// + /// **WARNING**: This is an experimental feature and may not work as expected. + pub fn prove_core( + &self, + pk: &SP1ProvingKey, + stdin: &SP1Stdin, + ) -> Result { + let payload = ProveCoreRequestPayload { + pk: pk.clone(), + stdin: stdin.clone(), + }; + let request = crate::proto::api::ProveCoreRequest { + data: bincode::serialize(&payload).unwrap(), + }; + let rt = Runtime::new().unwrap(); + let response = rt + .block_on(async { self.client.prove_core(request).await }) + .unwrap(); + let proof: SP1CoreProof = bincode::deserialize(&response.result).unwrap(); + Ok(proof) + } + + /// Executes the [sp1_prover::SP1Prover::compress] method inside the container. + /// + /// TODO: We can probably create a trait to unify [sp1_prover::SP1Prover] and [SP1ProverClient]. + /// + /// **WARNING**: This is an experimental feature and may not work as expected. + pub fn compress( + &self, + vk: &SP1VerifyingKey, + proof: SP1CoreProof, + deferred_proofs: Vec>, + ) -> Result, SP1RecursionProverError> { + let payload = CompressRequestPayload { + vk: vk.clone(), + proof, + deferred_proofs, + }; + let request = crate::proto::api::CompressRequest { + data: bincode::serialize(&payload).unwrap(), + }; + + let rt = Runtime::new().unwrap(); + let response = rt + .block_on(async { self.client.compress(request).await }) + .unwrap(); + let proof: SP1ReduceProof = bincode::deserialize(&response.result).unwrap(); + Ok(proof) + } +} + +impl Default for SP1ProverServer { + fn default() -> Self { + Self::new() + } +} + +impl Drop for SP1ProverServer { + fn drop(&mut self) { + if !self.cleaned_up.load(Ordering::SeqCst) { + tracing::debug!("dropping SP1ProverClient, cleaning up..."); + cleanup_container(&self.container_name); + self.cleaned_up.store(true, Ordering::SeqCst); + } + } +} + +/// Cleans up the a docker container with the given name. +fn cleanup_container(container_name: &str) { + tracing::debug!("cleaning up container: {}", container_name); + if let Err(e) = Command::new("sudo") + .args(["docker", "rm", "-f", container_name]) + .status() + { + eprintln!("failed to remove container: {}", e); + } +} + +#[cfg(test)] +mod tests { + use sp1_core::utils; + use sp1_core::utils::tests::FIBONACCI_ELF; + use sp1_prover::components::DefaultProverComponents; + use sp1_prover::{InnerSC, SP1CoreProof, SP1Prover, SP1ReduceProof}; + use twirp::url::Url; + use twirp::Client; + + use crate::SP1Stdin; + use crate::{proto::api::ProverServiceClient, ProveCoreRequestPayload}; + use crate::{CompressRequestPayload, SP1ProverServer}; + + #[ignore] + #[test] + fn test_client() { + utils::setup_logger(); + + let client = SP1ProverServer::new(); + + let prover = SP1Prover::::new(); + let (pk, vk) = prover.setup(FIBONACCI_ELF); + + println!("proving core"); + let proof = client.prove_core(&pk, &SP1Stdin::new()).unwrap(); + + println!("verifying core"); + prover.verify(&proof.proof, &vk).unwrap(); + + println!("proving compress"); + let proof = client.compress(&vk, proof, vec![]).unwrap(); + + println!("verifying compress"); + prover.verify_compressed(&proof, &vk).unwrap(); + } + + #[ignore] + #[tokio::test] + async fn test_prove_core() { + let client = + Client::from_base_url(Url::parse("http://localhost:3000/twirp/").unwrap()).unwrap(); + + let prover = SP1Prover::::new(); + let (pk, vk) = prover.setup(FIBONACCI_ELF); + let payload = ProveCoreRequestPayload { + pk, + stdin: SP1Stdin::new(), + }; + let request = crate::proto::api::ProveCoreRequest { + data: bincode::serialize(&payload).unwrap(), + }; + let proof = client.prove_core(request).await.unwrap(); + let proof: SP1CoreProof = bincode::deserialize(&proof.result).unwrap(); + prover.verify(&proof.proof, &vk).unwrap(); + + tracing::info!("compress"); + let payload = CompressRequestPayload { + vk: vk.clone(), + proof, + deferred_proofs: vec![], + }; + let request = crate::proto::api::CompressRequest { + data: bincode::serialize(&payload).unwrap(), + }; + let compressed_proof = client.compress(request).await.unwrap(); + let compressed_proof: SP1ReduceProof = + bincode::deserialize(&compressed_proof.result).unwrap(); + + tracing::info!("verify compressed"); + prover.verify_compressed(&compressed_proof, &vk).unwrap(); + } +} diff --git a/server/src/proto/api.rs b/server/src/proto/api.rs new file mode 100644 index 0000000000..a3ee846b67 --- /dev/null +++ b/server/src/proto/api.rs @@ -0,0 +1,89 @@ +// This file is @generated by prost-build. +#[derive(serde::Serialize, serde::Deserialize)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ProveCoreRequest { + #[prost(bytes = "vec", tag = "1")] + pub data: ::prost::alloc::vec::Vec, +} +#[derive(serde::Serialize, serde::Deserialize)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ProveCoreResponse { + #[prost(bytes = "vec", tag = "1")] + pub result: ::prost::alloc::vec::Vec, +} +#[derive(serde::Serialize, serde::Deserialize)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CompressRequest { + #[prost(bytes = "vec", tag = "1")] + pub data: ::prost::alloc::vec::Vec, +} +#[derive(serde::Serialize, serde::Deserialize)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CompressResponse { + #[prost(bytes = "vec", tag = "1")] + pub result: ::prost::alloc::vec::Vec, +} +pub use twirp; +pub const SERVICE_FQN: &str = "/api.ProverService"; +#[twirp::async_trait::async_trait] +pub trait ProverService { + async fn prove_core( + &self, + ctx: twirp::Context, + req: ProveCoreRequest, + ) -> Result; + async fn compress( + &self, + ctx: twirp::Context, + req: CompressRequest, + ) -> Result; +} +pub fn router(api: std::sync::Arc) -> twirp::Router +where + T: ProverService + Send + Sync + 'static, +{ + twirp::details::TwirpRouterBuilder::new(api) + .route( + "/ProveCore", + |api: std::sync::Arc, ctx: twirp::Context, req: ProveCoreRequest| async move { + api.prove_core(ctx, req).await + }, + ) + .route( + "/Compress", + |api: std::sync::Arc, ctx: twirp::Context, req: CompressRequest| async move { + api.compress(ctx, req).await + }, + ) + .build() +} +#[twirp::async_trait::async_trait] +pub trait ProverServiceClient: Send + Sync + std::fmt::Debug { + async fn prove_core( + &self, + req: ProveCoreRequest, + ) -> Result; + async fn compress( + &self, + req: CompressRequest, + ) -> Result; +} +#[twirp::async_trait::async_trait] +impl ProverServiceClient for twirp::client::Client { + async fn prove_core( + &self, + req: ProveCoreRequest, + ) -> Result { + self.request("api.ProverService/ProveCore", req).await + } + async fn compress( + &self, + req: CompressRequest, + ) -> Result { + self.request("api.ProverService/Compress", req).await + } +} diff --git a/sp1up/install b/sp1up/install old mode 100644 new mode 100755 diff --git a/sp1up/sp1up b/sp1up/sp1up old mode 100644 new mode 100755 index d6ec849b75..1f139c75ad --- a/sp1up/sp1up +++ b/sp1up/sp1up @@ -27,6 +27,7 @@ main() { -p|--path) shift; SP1UP_LOCAL_REPO=$1;; -P|--pr) shift; SP1UP_PR=$1;; -C|--commit) shift; SP1UP_COMMIT=$1;; + -c|--c-toolchain) SP1UP_C_TOOLCHAIN=true;; --arch) shift; SP1UP_ARCH=$1;; --platform) shift; SP1UP_PLATFORM=$1;; -t|--token) shift; GITHUB_TOKEN=$1;; @@ -73,7 +74,7 @@ main() { ensure ln -s "$PWD/target/release/$bin" "$SP1_BIN_DIR/$bin" done - say "done" + say "done!" exit 0 fi @@ -90,39 +91,10 @@ main() { SP1UP_TAG="${SP1UP_VERSION}" fi - say "installing sp1 (version ${SP1UP_VERSION}, tag ${SP1UP_TAG})" + say "installing SP1 (version ${SP1UP_VERSION}, tag ${SP1UP_TAG})" - uname_s=$(uname -s) - PLATFORM=$(tolower "${SP1UP_PLATFORM:-$uname_s}") - EXT="tar.gz" - case $PLATFORM in - linux) ;; - darwin|mac*) - PLATFORM="darwin" - ;; - mingw*|win*) - EXT="zip" - PLATFORM="win32" - ;; - *) - err "unsupported platform: $PLATFORM" - ;; - esac - - uname_m=$(uname -m) - ARCHITECTURE=$(tolower "${SP1UP_ARCH:-$uname_m}") - if [ "${ARCHITECTURE}" = "x86_64" ]; then - # Redirect stderr to /dev/null to avoid printing errors if non Rosetta. - if [ "$(sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then - ARCHITECTURE="arm64" # Rosetta. - else - ARCHITECTURE="amd64" # Intel. - fi - elif [ "${ARCHITECTURE}" = "arm64" ] ||[ "${ARCHITECTURE}" = "aarch64" ] ; then - ARCHITECTURE="arm64" # Arm. - else - ARCHITECTURE="amd64" # Amd. - fi + determine_platform + determine_architecture # Compute the URL of the release tarball in the sp1 repository. RELEASE_URL="https://github.com/${SP1UP_REPO}/releases/download/${SP1UP_TAG}/" @@ -164,11 +136,8 @@ EOF say "installing rust toolchain" ensure "$bin_path" prove install-toolchain say "installed rust toolchain" - - say "done!" - - # Install by cloning the repo with the provided branch/tag else + # Install by cloning the repo with the provided branch/tag need_cmd cargo SP1UP_BRANCH=${SP1UP_BRANCH:-main} REPO_PATH="$SP1_DIR/$SP1UP_REPO" @@ -202,8 +171,113 @@ EOF fi done done + fi + + if [ "$SP1UP_C_TOOLCHAIN" = true ]; then + install_c_toolchain + fi + + say "done!" +} + +# Downloads the C++ toolchain for RISC-V and creates a symlink to it at SP1_BIN_DIR/riscv32-unknown-elf-gcc +install_c_toolchain() { + say "installing C++ toolchain for RISC-V" + + determine_platform + determine_architecture + + case $PLATFORM in + linux) + case $ARCHITECTURE in + amd64) TOOLCHAIN_URL="https://github.com/risc0/toolchain/releases/download/2022.03.25/riscv32im-linux-x86_64.tar.xz" ;; + *) err "unsupported architecture for Linux: $ARCHITECTURE" ;; + esac + ;; + darwin) + case $ARCHITECTURE in + arm64) TOOLCHAIN_URL="https://github.com/risc0/toolchain/releases/download/2022.03.25/riscv32im-osx-arm64.tar.xz" ;; + amd64) TOOLCHAIN_URL="https://github.com/risc0/toolchain/releases/download/2022.03.25/riscv32im-osx-x86_64.tar.xz" ;; + *) err "unsupported architecture for macOS: $ARCHITECTURE" ;; + esac + ;; + win32) + case $ARCHITECTURE in + amd64) TOOLCHAIN_URL="https://github.com/risc0/toolchain/releases/download/2022.03.25/riscv32im-windows-x86_64.tar.xz" ;; + *) err "unsupported architecture for Windows: $ARCHITECTURE" ;; + esac + ;; + *) + err "unsupported platform: $PLATFORM" + ;; + esac + + TOOLCHAIN_ARCHIVE="/tmp/$(basename $TOOLCHAIN_URL)" + TOOLCHAIN_INSTALL_DIR="$SP1_DIR/riscv" + + # Always re-download the RISC-V GCC prebuilt binary archive + ensure download "$TOOLCHAIN_URL" "$TOOLCHAIN_ARCHIVE" + + # Create the ~/.sp1 and TOOLCHAIN_INSTALL_DIR directories if they don't exist + mkdir -p "$TOOLCHAIN_INSTALL_DIR" + + # Extract the downloaded archive into the target directory + if [ "$PLATFORM" = "win32" ]; then + ensure unzip "$TOOLCHAIN_ARCHIVE" -d "$TOOLCHAIN_INSTALL_DIR" >/dev/null 2>&1 + else + ensure tar -xvf "$TOOLCHAIN_ARCHIVE" -C "$TOOLCHAIN_INSTALL_DIR" >/dev/null 2>&1 + fi + rm -f "$TOOLCHAIN_ARCHIVE" - say "done" + # Find the actual location of the gcc binary and create a symlink to it in SP1_BIN_DIR + GCC_PATH=$(find "$TOOLCHAIN_INSTALL_DIR" -name 'riscv32-unknown-elf-gcc' -type f | head -n 1) + if [ -z "$GCC_PATH" ]; then + err "riscv32-unknown-elf-gcc not found in extracted archive" + fi + ln -sf "$GCC_PATH" "$SP1_BIN_DIR/riscv32-unknown-elf-gcc" + + # Set environment variables for the RISC-V toolchain + export PATH="$SP1_BIN_DIR:$PATH" + export CC_riscv32im_succinct_zkvm_elf="$SP1_BIN_DIR/riscv32-unknown-elf-gcc" + + say "installed C++ toolchain for RISC-V and set environment variables" +} + +determine_platform() { + uname_s=$(uname -s) + PLATFORM=$(tolower "${SP1UP_PLATFORM:-$uname_s}") + case $PLATFORM in + linux) ;; + darwin|mac*) + PLATFORM="darwin" + ;; + mingw*|win*) + PLATFORM="win32" + ;; + *) + err "unsupported platform: $PLATFORM" + ;; + esac + EXT="tar.gz" + if [ "$PLATFORM" = "win32" ]; then + EXT="zip" + fi +} + +determine_architecture() { + uname_m=$(uname -m) + ARCHITECTURE=$(tolower "${SP1UP_ARCH:-$uname_m}") + if [ "${ARCHITECTURE}" = "x86_64" ]; then + # Redirect stderr to /dev/null to avoid printing errors if non Rosetta. + if [ "$(sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then + ARCHITECTURE="arm64" # Rosetta. + else + ARCHITECTURE="amd64" # Intel. + fi + elif [ "${ARCHITECTURE}" = "arm64" ] || [ "${ARCHITECTURE}" = "aarch64" ]; then + ARCHITECTURE="arm64" # Arm. + else + ARCHITECTURE="amd64" # Amd. fi } @@ -211,21 +285,23 @@ usage() { cat 1>&2 < OPTIONS: - -h, --help Print help information - -v, --version Install a specific version - -b, --branch Install a specific branch - -P, --pr Install a specific Pull Request - -C, --commit Install a specific commit - -r, --repo Install from a remote GitHub repo (uses default branch if no other options are set) - -p, --path Install a local repository - --arch Install a specific architecture (supports amd64 and arm64) - --platform Install a specific platform (supports win32, linux, and darwin) + -h, --help Print help information + -v, --version Install a specific version + -b, --branch Install a specific branch + -P, --pr Install a specific Pull Request + -C, --commit Install a specific commit + -r, --repo Install from a remote GitHub repo (uses default branch if no other options are set) + -p, --path Install a local repository + -c, --c-toolchain Install a C++ toolchain for RISC-V (needed building programs that bind to C code) + --arch Install a specific architecture (supports amd64 and arm64) + --platform Install a specific platform (supports win32, linux, and darwin) + -t, --token GitHub token to use for avoiding API rate limits EOF } @@ -296,7 +372,7 @@ banner() { Repo : https://github.com/succinctlabs/sp1 Book : https://succinctlabs.github.io/sp1 -Telegram : https://t.me/succinct_sp1 +Telegram : https://t.me/+AzG4ws-kD24yMGYx ._______ ._______ ._______ ._______ ._______ ._______ ._______ ._______ ._______ diff --git a/tests/bls12381-add/Cargo.toml b/tests/bls12381-add/Cargo.toml index ff3fbc99f8..edb1eb3171 100644 --- a/tests/bls12381-add/Cargo.toml +++ b/tests/bls12381-add/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bls12381-add-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/bls12381-decompress/Cargo.toml b/tests/bls12381-decompress/Cargo.toml index 2b837fce72..b76c3c8d02 100644 --- a/tests/bls12381-decompress/Cargo.toml +++ b/tests/bls12381-decompress/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bls-decompress-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/bls12381-double/Cargo.toml b/tests/bls12381-double/Cargo.toml index cb43319a31..72ddc804fc 100644 --- a/tests/bls12381-double/Cargo.toml +++ b/tests/bls12381-double/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bls12381-double-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/bls12381-mul/Cargo.toml b/tests/bls12381-mul/Cargo.toml index b9b519bae1..42131b2613 100644 --- a/tests/bls12381-mul/Cargo.toml +++ b/tests/bls12381-mul/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bls12381-mul-test" -version = "1.0.0-rc.1" +version = "1.0.1" publish = false diff --git a/tests/bn254-add/Cargo.toml b/tests/bn254-add/Cargo.toml index f9dd6654a8..4cddb64d22 100644 --- a/tests/bn254-add/Cargo.toml +++ b/tests/bn254-add/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bn254-add-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/bn254-double/Cargo.toml b/tests/bn254-double/Cargo.toml index c95972dda2..f51eff7840 100644 --- a/tests/bn254-double/Cargo.toml +++ b/tests/bn254-double/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bn254-double-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/bn254-mul/Cargo.toml b/tests/bn254-mul/Cargo.toml index b891e5fd8a..e3d12710b9 100644 --- a/tests/bn254-mul/Cargo.toml +++ b/tests/bn254-mul/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "bn254-mul-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/cycle-tracker/Cargo.toml b/tests/cycle-tracker/Cargo.toml index fb04b2465c..b3ce0e21d8 100644 --- a/tests/cycle-tracker/Cargo.toml +++ b/tests/cycle-tracker/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "cycle-tracker-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/ecrecover/Cargo.toml b/tests/ecrecover/Cargo.toml index 1c74f8af73..30a3774b79 100644 --- a/tests/ecrecover/Cargo.toml +++ b/tests/ecrecover/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "ecrecover-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/ed-add/Cargo.toml b/tests/ed-add/Cargo.toml index b8323c2e8f..7347c6c67e 100644 --- a/tests/ed-add/Cargo.toml +++ b/tests/ed-add/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "ed-add-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/ed-decompress/Cargo.toml b/tests/ed-decompress/Cargo.toml index 31cc9802e4..cc88095513 100644 --- a/tests/ed-decompress/Cargo.toml +++ b/tests/ed-decompress/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "ed-decompress-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/ed25519/Cargo.toml b/tests/ed25519/Cargo.toml index cdf64839d8..7f36187ce7 100644 --- a/tests/ed25519/Cargo.toml +++ b/tests/ed25519/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "ed25519-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/fibonacci/Cargo.toml b/tests/fibonacci/Cargo.toml index bbe0fde81a..eba3e580ba 100644 --- a/tests/fibonacci/Cargo.toml +++ b/tests/fibonacci/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "fibonacci-program-tests" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/hint-io/Cargo.toml b/tests/hint-io/Cargo.toml index 1e8caf9ef3..a4e3853eb2 100644 --- a/tests/hint-io/Cargo.toml +++ b/tests/hint-io/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "hint-io-test" -version = "1.0.0-rc.1" +version = "1.0.1" publish = false [dependencies] diff --git a/tests/keccak-permute/Cargo.toml b/tests/keccak-permute/Cargo.toml index 1836a61e6a..1d0a2f950f 100644 --- a/tests/keccak-permute/Cargo.toml +++ b/tests/keccak-permute/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "keccak-permute-test" -version = "1.0.0-rc.1" +version = "1.0.1" publish = false [dependencies] diff --git a/tests/keccak256/Cargo.toml b/tests/keccak256/Cargo.toml index 4d6b9d1f40..c76f041294 100644 --- a/tests/keccak256/Cargo.toml +++ b/tests/keccak256/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "keccak256-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/panic/Cargo.toml b/tests/panic/Cargo.toml index 73408f84e5..26bc8983db 100644 --- a/tests/panic/Cargo.toml +++ b/tests/panic/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "panic-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/rand/Cargo.toml b/tests/rand/Cargo.toml index d37f14d2e7..13bf348b94 100644 --- a/tests/rand/Cargo.toml +++ b/tests/rand/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "rand-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/secp256k1-add/Cargo.toml b/tests/secp256k1-add/Cargo.toml index c1e56e3421..1105a6854f 100644 --- a/tests/secp256k1-add/Cargo.toml +++ b/tests/secp256k1-add/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "secp256k1-add-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/secp256k1-decompress/Cargo.toml b/tests/secp256k1-decompress/Cargo.toml index 5abf03a12b..4501a60676 100644 --- a/tests/secp256k1-decompress/Cargo.toml +++ b/tests/secp256k1-decompress/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "secp256k1-decompress-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/secp256k1-double/Cargo.toml b/tests/secp256k1-double/Cargo.toml index ab451635bd..0e730b634a 100644 --- a/tests/secp256k1-double/Cargo.toml +++ b/tests/secp256k1-double/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "secp256k1-double-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/secp256k1-mul/Cargo.toml b/tests/secp256k1-mul/Cargo.toml index 451cc9cdee..20069bcd86 100644 --- a/tests/secp256k1-mul/Cargo.toml +++ b/tests/secp256k1-mul/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "secp256k1-mul-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/sha-compress/Cargo.toml b/tests/sha-compress/Cargo.toml index a7459e5e39..9952188518 100644 --- a/tests/sha-compress/Cargo.toml +++ b/tests/sha-compress/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "sha-compress-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/sha-extend/Cargo.toml b/tests/sha-extend/Cargo.toml index ee2b76c606..2f4c8e6c1f 100644 --- a/tests/sha-extend/Cargo.toml +++ b/tests/sha-extend/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "sha-extend-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/sha2/Cargo.toml b/tests/sha2/Cargo.toml index d58f48b4a2..959c6d20f0 100644 --- a/tests/sha2/Cargo.toml +++ b/tests/sha2/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "sha2-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/tendermint-benchmark/Cargo.toml b/tests/tendermint-benchmark/Cargo.toml index 2f4871e764..e819697907 100644 --- a/tests/tendermint-benchmark/Cargo.toml +++ b/tests/tendermint-benchmark/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "tendermint-benchmark-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/uint256-arith/Cargo.toml b/tests/uint256-arith/Cargo.toml index 566e0326a3..295f4559ee 100644 --- a/tests/uint256-arith/Cargo.toml +++ b/tests/uint256-arith/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "uint256-arith-program" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/uint256-mul/Cargo.toml b/tests/uint256-mul/Cargo.toml index b566692a4d..841a631b7f 100644 --- a/tests/uint256-mul/Cargo.toml +++ b/tests/uint256-mul/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "biguint-mul-test" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/tests/verify-proof/Cargo.toml b/tests/verify-proof/Cargo.toml index aafde1fe9a..fff214e0b5 100644 --- a/tests/verify-proof/Cargo.toml +++ b/tests/verify-proof/Cargo.toml @@ -1,7 +1,7 @@ [workspace] [package] name = "verify-proof" -version = "1.0.0-rc.1" +version = "1.0.1" edition = "2021" publish = false diff --git a/zkvm/entrypoint/src/syscalls/halt.rs b/zkvm/entrypoint/src/syscalls/halt.rs index 930685b588..3691bf981a 100644 --- a/zkvm/entrypoint/src/syscalls/halt.rs +++ b/zkvm/entrypoint/src/syscalls/halt.rs @@ -22,9 +22,10 @@ pub extern "C" fn syscall_halt(exit_code: u8) -> ! { unsafe { // When we halt, we retrieve the public values finalized digest. This is the hash of all // the bytes written to the public values fd. - let pv_digest_bytes = core::mem::take(&mut zkvm::PUBLIC_VALUES_HASHER) - .unwrap() - .finalize(); + let pv_digest_bytes = + core::mem::take(&mut *core::ptr::addr_of_mut!(zkvm::PUBLIC_VALUES_HASHER)) + .unwrap() + .finalize(); // For each digest word, call COMMIT ecall. In the runtime, this will store the digest words // into the runtime's execution record's public values digest. In the AIR, it will be used