From c87fab32755d1ef458549fcb13b8ce0668808f8a Mon Sep 17 00:00:00 2001 From: John Guibas Date: Wed, 3 Jul 2024 14:44:44 -0700 Subject: [PATCH] hm --- .github/workflows/docs.yml | 51 ++++++++ book/SUMMARY.md | 4 +- book/generating-proofs/basics.md | 2 +- book/getting-started/hardware-requirements.md | 33 ++++++ book/onchain-verification/getting-started.md | 4 +- book/writing-programs/patched-crates.md | 75 +++++------- book/writing-programs/precompiles.md | 112 ++---------------- zkvm/lib/src/lib.rs | 64 ++++++++-- 8 files changed, 179 insertions(+), 166 deletions(-) create mode 100644 .github/workflows/docs.yml create mode 100644 book/getting-started/hardware-requirements.md diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000000..27b1b0cd8e --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,51 @@ +name: Docs +on: + push: + branches: [main] + pull_request: + branches: + - "**" +permissions: + contents: read + pages: write + id-token: write +concurrency: + group: deploy + cancel-in-progress: false +jobs: + build: + name: Build + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Setup Rust + uses: dtolnay/rust-toolchain@stable + - name: Configure cache + uses: Swatinem/rust-cache@v2 + - name: Setup pages + id: pages + uses: actions/configure-pages@v4 + - name: Clean docs folder + run: cargo clean --doc + - name: Build docs + run: cargo doc --no-deps + - name: Add redirect + run: echo '' > target/doc/index.html + - name: Remove lock file + run: rm target/doc/.lock + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: target/doc + deploy: + name: Deploy + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 \ No newline at end of file diff --git a/book/SUMMARY.md b/book/SUMMARY.md index 30cccffdd8..9b416c0ad5 100644 --- a/book/SUMMARY.md +++ b/book/SUMMARY.md @@ -10,6 +10,8 @@ - [Project Template](./getting-started/project-template.md) +- [Hardware Requirements](./getting-started/hardware-requirements.md) + # Writing Programs - [Setup](./writing-programs/setup.md) @@ -36,7 +38,7 @@ # Onchain Verification -- [Getting Started](./onchain-verification/getting-started.md) +- [Setup](./onchain-verification/getting-started.md) - [Solidity SDK](./onchain-verification/solidity-sdk.md) diff --git a/book/generating-proofs/basics.md b/book/generating-proofs/basics.md index 9cd7a1b43b..3d9f4c0ecf 100644 --- a/book/generating-proofs/basics.md +++ b/book/generating-proofs/basics.md @@ -9,7 +9,7 @@ An end-to-end flow of proving `f(x) = y` with the SP1 zkVM involves the followin To make this more concrete, let's walk through a simple example of generating a proof for a Fiboancci program inside the zkVM. -## Fibonacci +## Example: Fibonacci ```rust,noplayground {{#include ../../examples/fibonacci/script/src/main.rs}} diff --git a/book/getting-started/hardware-requirements.md b/book/getting-started/hardware-requirements.md new file mode 100644 index 0000000000..759596f7d2 --- /dev/null +++ b/book/getting-started/hardware-requirements.md @@ -0,0 +1,33 @@ +# Hardware Requirements + +The hardware requirements for SP1 depend on what features you want to use. These requirements can also +change over time as the design of the zKVM evolves. + +**The most important requirement is CPU for performance/latency and RAM to prevent running out of memory.** + +| | Mock / Network | Compress / Shrink / Wrap | PLONK (EVM) | +|--------|------------------------------|------------------------------------|----------------------------| +| CPU | 1+, single-core perf matters | 16+, more is better | 32+, more is better | +| Memory | 8GB+, more is better | 32GB+, more if you have more cores | 128GB+ (for PLONK) | +| Disk | 20GB+ | 20GB+ | 100GB+ (for trusted setup) | + +### CPU + +The execution & trace generation of the zkVM is mostly CPU bound, having a high single-core +performance is recommended to accelerate these steps. The rest of the prover is mostly bound by hashing/field operations +which can be parallelized with multiple cores. + +### Memory + +Our prover requires keeping large matrices (i.e., traces) in memory to generate the proofs. Certain steps of the prover +have a minimum memory requirement, meaning that if you have less than this amount of memory, the process will OOM. + +This effect is most noticeable when using the PLONK prover, which requires around 128GB of RAM to generate a proof. We use PLONK to avoid +having to perform a trusted setup, which other SNARK provers like Groth16 require. We have future optimizations planned to reduce +the memory requirements of the PLONK prover substantially. + +### Disk + +Disk is required to install the SP1 zkVM toolchain and to install the trused setup artifacts, if you plan to locally build the PLONK prover. + +Furthermore, disk is used to checkpoint the state of the program execution, which is required to generate the proofs. \ No newline at end of file diff --git a/book/onchain-verification/getting-started.md b/book/onchain-verification/getting-started.md index de8266ec23..918a6705e5 100644 --- a/book/onchain-verification/getting-started.md +++ b/book/onchain-verification/getting-started.md @@ -1,4 +1,4 @@ -# Onchain Verification +# Onchain Verification: Setup The best way to get started with verifying SP1 proofs on-chain is to refer to the [SP1 Project Template](https://github.com/succinctlabs/sp1-project-template/tree/main). @@ -8,7 +8,7 @@ The best way to get started with verifying SP1 proofs on-chain is to refer to th Refer to the section on [Contract Addresses](./contract-addresses.md) for the addresses of the deployed verifiers. -## Generating SP1 Proof for Onchain Verification +## Generating SP1 Proofs for Onchain Verification By default, the proofs generated by SP1 are not verifiable onchain, as they are non-constant size and STARK verification on Ethereum is very expensive. To generate a proof that can be verified onchain, we use performant STARK recursion to combine SP1 shard proofs into a single STARK proof and then wrap that in a SNARK proof. Our `ProverClient` has a prover option for this called `plonk`. Behind the scenes, this function will first generate a normal SP1 proof, then recursively combine all of them into a single proof using the STARK recursion protocol. Finally, the proof is wrapped in a SNARK proof using PLONK. diff --git a/book/writing-programs/patched-crates.md b/book/writing-programs/patched-crates.md index 84f0fbb036..1fd72d64b5 100644 --- a/book/writing-programs/patched-crates.md +++ b/book/writing-programs/patched-crates.md @@ -34,67 +34,52 @@ revm = { git = "https://github.com/sp1-patches/revm", branch = "patch-v5.0.0" } reth-primitives = { git = "https://github.com/sp1-patches/reth", default-features = false, branch = "sp1-reth" } ``` -You may also need to update your `Cargo.lock` file. For example: +An example of using patched crates is available in our [Tendermint Example](https://github.com/succinctlabs/sp1/blob/main/examples/tendermint/program/Cargo.toml#L22-L25). + +### Verifying Patch Usage: Cargo + +You can check if the patch was applied by using cargo's tree command to print the dependencies of the crate you patched. ```bash -cargo update -p ed25519-consensus +cargo tree -p sha2 +cargo tree -p sha2@0.9.8 ``` -If you encounter issues relating to cargo / git, you can try setting `CARGO_NET_GIT_FETCH_WITH_CLI`: +Next to the package name, it should have a link to the Github repository that you patched with. -``` -CARGO_NET_GIT_FETCH_WITH_CLI=true cargo update -p ed25519-consensus -``` +### Verifying Patch Usage: SP1 -You can permanently set this value in `~/.cargo/config`: +To check if a precompile is used by your program, you can observe SP1's log output. Make sure to setup the logger with `sp1_sdk::utils::setup_logger()` and run your program with `RUST_LOG=info`. -```toml -[net] -git-fetch-with-cli = true -``` +In the example below, note how the `sha256_extend` precompile was repoted as being used eight times. -### Sanity Checks +```bash +2024-07-03T04:46:33.753527Z INFO prove_core: execution report (syscall counts): +2024-07-03T04:46:33.753550Z INFO prove_core: 8 sha256_extend +2024-07-03T04:46:33.753550Z INFO prove_core: 8 commit +2024-07-03T04:46:33.753553Z INFO prove_core: 8 commit_deferred_proofs +2024-07-03T04:46:33.753554Z INFO prove_core: 4 write +2024-07-03T04:46:33.753555Z INFO prove_core: 1 halt +``` -**You must make sure your patch is in the workspace root, otherwise it will not be applied.** +### Troubleshooting -You can check if the patch was applied by running a command like the following: +You may also need to update your `Cargo.lock` file. For example: ```bash -cargo tree -p sha2 -cargo tree -p sha2@0.9.8 +cargo update -p ed25519-consensus ``` -Next to the package name, it should have a link to the Github repository that you patched with. +If you encounter issues relating to cargo / git, you can try setting `CARGO_NET_GIT_FETCH_WITH_CLI`: -**Checking whether a precompile is used** +``` +CARGO_NET_GIT_FETCH_WITH_CLI=true cargo update -p ed25519-consensus +``` -To check if a precompile is used by your program, when running the script to generate a proof, make sure to use the `RUST_LOG=info` environment variable and set up `utils::setup_logger()` in your script. Then, when you run the script, you should see a log message like the following: +You can permanently set this value in `~/.cargo/config`: -```bash -2024-03-02T19:10:39.570244Z INFO runtime.run(...): ... -2024-03-02T19:10:39.570244Z INFO runtime.run(...): ... -2024-03-02T19:10:40.003907Z INFO runtime.prove(...): Sharding the execution record. -2024-03-02T19:10:40.003916Z INFO runtime.prove(...): Generating trace for each chip. -2024-03-02T19:10:40.003918Z INFO runtime.prove(...): Record stats before generate_trace (incomplete): ShardStats { - nb_cpu_events: 7476561, - nb_add_events: 2126546, - nb_mul_events: 11116, - nb_sub_events: 54075, - nb_bitwise_events: 646940, - nb_shift_left_events: 142595, - nb_shift_right_events: 274016, - nb_divrem_events: 0, - nb_lt_events: 81862, - nb_field_events: 0, - nb_sha_extend_events: 0, - nb_sha_compress_events: 0, - nb_keccak_permute_events: 2916, - nb_ed_add_events: 0, - nb_ed_decompress_events: 0, - nb_weierstrass_add_events: 0, - nb_weierstrass_double_events: 0, - nb_k256_decompress_events: 0, -} +```toml +[net] +git-fetch-with-cli = true ``` -The `ShardStats` struct contains the number of events for each "table" from the execution of the program, including precompile tables. In the example above, the `nb_keccak_permute_events` field is `2916`, indicating that the precompile for the Keccak permutation was used. diff --git a/book/writing-programs/precompiles.md b/book/writing-programs/precompiles.md index f8709fcdfa..8812a80665 100644 --- a/book/writing-programs/precompiles.md +++ b/book/writing-programs/precompiles.md @@ -10,113 +10,15 @@ Each precompile has a unique system call number and implements an interface for SP1 also has been designed specifically to make it easy for external contributors to create and extend the zkVM with their own precompiles. To learn more about this, you can look at implementations of existing precompiles in the [precompiles](https://github.com/succinctlabs/sp1/tree/main/core/src/syscall/precompiles) folder. More documentation on this will be coming soon. -## Supported Precompiles +**To use precompiles, we typically recommend you interact with them through [patches](./patched-crates.md), which are crates modified +to use these precompiles under the hood.** -Typically, we recommend you interact with precompiles through [patches](./patched-crates.md), which are crates patched -to use these precompiles under the hood. However, if you are an advanced user you can interact -with the precompiles directly using extern system calls. +## Specification -Here is a list of extern system calls that use precompiles. +If you are an advanced user you can interact with the precompiles directly using extern system calls. -### SHA256 Extend - -Executes the SHA256 extend operation on a word array. - -```rust,noplayground -pub extern "C" fn syscall_sha256_extend(w: *mut u32); -``` - -### SHA256 Compress - -Executes the SHA256 compress operation on a word array and a given state. - -```rust,noplayground -pub extern "C" fn syscall_sha256_compress(w: *mut u32, state: *mut u32); -``` - -### Keccak256 Permute - -Executes the Keccak256 permutation function on the given state. - -```rust,noplayground -pub extern "C" fn syscall_keccak_permute(state: *mut u64); -``` - -#### Ed25519 Add - -Adds two points on the ed25519 curve. The result is stored in the first point. - -```rust,noplayground -pub extern "C" fn syscall_ed_add(p: *mut u32, q: *mut u32); -``` - -#### Ed25519 Decompress. - -Decompresses a compressed Ed25519 point. - -The second half of the input array should contain the compressed Y point with the final bit as -the sign bit. The first half of the input array will be overwritten with the decompressed point, -and the sign bit will be removed. - -```rust,noplayground -pub extern "C" fn syscall_ed_decompress(point: &mut [u8; 64]) -``` - -#### Secp256k1 Add - -Adds two Secp256k1 points. The result is stored in the first point. - -```rust,noplayground -pub extern "C" fn syscall_secp256k1_add(p: *mut u32, q: *mut u32) -``` - -#### Secp256k1 Double - -Doubles a Secp256k1 point in place. - -```rust,noplayground -pub extern "C" fn syscall_secp256k1_double(p: *mut u32) -``` - -#### Secp256k1 Decompress - -Decompess a Secp256k1 point. - -The input array should be 32 bytes long, with the first 16 bytes containing the X coordinate in -big-endian format. The second half of the input will be overwritten with the decompressed point. - -```rust,noplayground -pub extern "C" fn syscall_secp256k1_decompress(point: &mut [u8; 64], is_odd: bool); -``` - -#### Bn254 Add - -Adds two Bn254 points. The result is stored in the first point. - -```rust,noplayground -pub extern "C" fn syscall_bn254_add(p: *mut u32, q: *mut u32) -``` - -#### Bn254 Double - -Doubles a Bn254 point in place. - -```rust,noplayground -pub extern "C" fn syscall_bn254_double(p: *mut u32) -``` - -#### Bls12-381 Add - -Adds two Bls12-381 points. The result is stored in the first point. - -```rust,noplayground -pub extern "C" fn syscall_bls12381_add(p: *mut u32, q: *mut u32) -``` - -#### Bls12-381 Double - -Doubles a Bls12-381 point in place. +Here is a list of all available system calls & precompiles. ```rust,noplayground -pub extern "C" fn syscall_bls12381_double(p: *mut u32) -``` +{{#include ../../zkvm/lib/src/lib.rs}} +``` \ No newline at end of file diff --git a/zkvm/lib/src/lib.rs b/zkvm/lib/src/lib.rs index f4e4b81e8d..6f12a36807 100644 --- a/zkvm/lib/src/lib.rs +++ b/zkvm/lib/src/lib.rs @@ -1,9 +1,4 @@ -//! Precompiles for the SP1 zkVM. -//! -//! Specifically, this crate contains user-friendly functions that call SP1 syscalls. Syscalls are -//! also declared here for convenience. In order to avoid duplicate symbol errors, the syscall -//! function impls must live in sp1-zkvm, which is only imported into the end user program crate. -//! In contrast, sp1-precompiles can be imported into any crate in the dependency tree. +//! System calls for the SP1 zkVM. #[cfg(feature = "bls12381")] pub mod bls12381; @@ -17,37 +12,82 @@ pub mod utils; #[cfg(feature = "verify")] pub mod verify; -pub const BIGINT_WIDTH_WORDS: usize = 8; - extern "C" { + /// Halts the program with the given exit code. pub fn syscall_halt(exit_code: u8) -> !; + + /// Writes the bytes in the given buffer to the given file descriptor. pub fn syscall_write(fd: u32, write_buf: *const u8, nbytes: usize); + + /// Reads the bytes from the given file descriptor into the given buffer. pub fn syscall_read(fd: u32, read_buf: *mut u8, nbytes: usize); + + /// Executes the SHA-256 extend operation on the given word array. pub fn syscall_sha256_extend(w: *mut u32); + + /// Executes the SHA-256 compress operation on the given word array and a given state. pub fn syscall_sha256_compress(w: *mut u32, state: *mut u32); + + /// Executes an Ed25519 curve addition on the given points. pub fn syscall_ed_add(p: *mut u32, q: *mut u32); + + /// Executes an Ed25519 curve decompression on the given point. pub fn syscall_ed_decompress(point: &mut [u8; 64]); + + /// Executes an Sepc256k1 curve addition on the given points. pub fn syscall_secp256k1_add(p: *mut u32, q: *const u32); + + /// Executes an Secp256k1 curve doubling on the given point. pub fn syscall_secp256k1_double(p: *mut u32); + + /// Executes an Secp256k1 curve decompression on the given point. pub fn syscall_secp256k1_decompress(point: &mut [u8; 64], is_odd: bool); + + /// Executes a Bn254 curve addition on the given points. pub fn syscall_bn254_add(p: *mut u32, q: *const u32); + + /// Executes a Bn254 curve doubling on the given point. pub fn syscall_bn254_double(p: *mut u32); + + /// Executes a BLS12-381 curve addition on the given points. pub fn syscall_bls12381_add(p: *mut u32, q: *const u32); + + /// Executes a BLS12-381 curve doubling on the given point. pub fn syscall_bls12381_double(p: *mut u32); + + /// Executes the Keccak-256 permutation on the given state. pub fn syscall_keccak_permute(state: *mut u64); + + /// Executes an uint256 multiplication on the given inputs. pub fn syscall_uint256_mulmod(x: *mut u32, y: *const u32); + + /// Enters unconstrained mode. pub fn syscall_enter_unconstrained() -> bool; + + /// Exits unconstrained mode. pub fn syscall_exit_unconstrained(); + + /// Defers the verification of a valid SP1 zkVM proof. pub fn syscall_verify_sp1_proof(vkey: &[u32; 8], pv_digest: &[u8; 32]); + + /// Returns the length of the next element in the hint stream. pub fn syscall_hint_len() -> usize; + + /// Reads the next element in the hint stream into the given buffer. pub fn syscall_hint_read(ptr: *mut u8, len: usize); + + /// Allocates a buffer aligned to the given alignment. pub fn sys_alloc_aligned(bytes: usize, align: usize) -> *mut u8; + + /// Decompresses a BLS12-381 point. pub fn syscall_bls12381_decompress(point: &mut [u8; 96], is_odd: bool); + + /// Computes a big integer operation with a modulus. pub fn sys_bigint( - result: *mut [u32; BIGINT_WIDTH_WORDS], + result: *mut [u32; 8], op: u32, - x: *const [u32; BIGINT_WIDTH_WORDS], - y: *const [u32; BIGINT_WIDTH_WORDS], - modulus: *const [u32; BIGINT_WIDTH_WORDS], + x: *const [u32; 8], + y: *const [u32; 8], + modulus: *const [u32; 8], ); }