Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
55 commits
Select commit Hold shift + click to select a range
a3a6f30
Add SPARK
batmendbar Mar 18, 2026
c53d3d2
Merge main into adds-spark-squashed
batmendbar Mar 18, 2026
5d9fef0
Remove comments
batmendbar Mar 18, 2026
6108fc8
Format
batmendbar Mar 18, 2026
fb9f3fc
Cleanup
batmendbar Mar 18, 2026
0fb0d8e
split commitments
batmendbar Mar 20, 2026
3cdbf0d
Merge branch 'main' into adds-spark-squashed
batmendbar Mar 24, 2026
f9bcf09
combine files
batmendbar Mar 24, 2026
ff5c3bd
format
batmendbar Mar 24, 2026
2a65bae
recompute timestamps on prover
batmendbar Mar 24, 2026
6e94a18
change file write
batmendbar Mar 25, 2026
44390ac
file write and ci change
batmendbar Mar 25, 2026
af2245b
fix ci
batmendbar Mar 25, 2026
abba4c4
format
batmendbar Mar 25, 2026
7c7d47d
update tracing
batmendbar Mar 25, 2026
c69eab7
combines prepare and spark-prove into a server
batmendbar Mar 26, 2026
5e15c6e
parallelize and update end-to-end.yml
batmendbar Mar 26, 2026
c7d174a
cleanup
batmendbar Mar 26, 2026
d96ad02
cleanup
batmendbar Mar 26, 2026
69ab9c7
cleanup
batmendbar Mar 26, 2026
941d799
format
batmendbar Mar 26, 2026
09056f9
organize tracing
batmendbar Apr 8, 2026
8bbeb5d
Format and organize
batmendbar Apr 8, 2026
db411d5
merge
batmendbar Apr 8, 2026
1062d1d
Adds temporary transcript draining
batmendbar Apr 8, 2026
c5cbc91
format
batmendbar Apr 27, 2026
70111ef
Separate spark query from noir proof
batmendbar Apr 27, 2026
ce1c677
limit message size
batmendbar Apr 27, 2026
76cbf24
stop user-given server write
batmendbar Apr 27, 2026
19f6ab9
zero check
batmendbar Apr 28, 2026
0afba35
write spark commitment to a file
batmendbar Apr 29, 2026
96530fa
cli command to natively verify spark proofs
batmendbar Apr 29, 2026
58e01e7
move spark commitments to setup
batmendbar Apr 29, 2026
83d95eb
use request in DS instance
batmendbar Apr 29, 2026
9c3eae6
format
batmendbar Apr 29, 2026
a2d65c1
Merge branch 'main' into adds-spark-squashed
batmendbar Apr 29, 2026
dcec2d0
fix CI errors
batmendbar Apr 29, 2026
b0ddd0d
remove spark server
batmendbar Apr 30, 2026
6a7f053
merge whir proofs
batmendbar May 4, 2026
e7cf195
use reference to optimize
batmendbar May 4, 2026
9762059
optimize gpa4 vector building
batmendbar May 4, 2026
1b3f44b
initialize once optimization
batmendbar May 4, 2026
a4fd935
gpa base layer refactor
batmendbar May 4, 2026
dc07d98
simplify rayon
batmendbar May 5, 2026
7bbbd5c
adds hash engine to spark
batmendbar May 5, 2026
d233b5c
adds cli flag for spark
batmendbar May 5, 2026
52f8b94
Removes clone
batmendbar May 5, 2026
1b2bdce
format
batmendbar May 5, 2026
98c6a08
fix docs
batmendbar May 5, 2026
e7839be
Decouple from batching randomness
batmendbar May 7, 2026
d45bb7c
Spark batch request
batmendbar May 7, 2026
3d31fca
separate path for single request
batmendbar May 7, 2026
aadb89f
format
batmendbar May 7, 2026
7a2ca2a
update spark.md
batmendbar May 7, 2026
14cc661
update ci
batmendbar May 7, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 43 additions & 14 deletions .github/workflows/end-to-end.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,48 +57,77 @@ jobs:
- name: Clean stale benchmark artifacts
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
rm -f ./benchmark-inputs/*.pkp ./benchmark-inputs/*.pkv ./benchmark-inputs/*.np
rm -f ./benchmark-inputs/*.pkp ./benchmark-inputs/*.pkv ./benchmark-inputs/*.np ./benchmark-inputs/*.sp ./benchmark-inputs/*.spc
rm -rf ./benchmark-inputs/*-spark
echo "Cleaned stale benchmark artifacts"

- name: Prepare circuits
- name: Prepare circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "Preparing $circuit"
cargo run --release --bin provekit-cli prepare ./target/$circuit.json \
--pkp ./benchmark-inputs/$circuit-prover.pkp \
--pkv ./benchmark-inputs/$circuit-verifier.pkv
../../../target/release/provekit-cli prepare \
./target/$circuit.json \
--pkp ./benchmark-inputs/$circuit.pkp \
--pkv ./benchmark-inputs/$circuit.pkv \
--spark \
--spc ./benchmark-inputs/$circuit.spc
echo "Prepared $circuit"
done

- name: Generate proofs for all circuits
- name: Prove all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "Proving $circuit"
cargo run --release --bin provekit-cli prove \
./benchmark-inputs/$circuit-prover.pkp \
../../../target/release/provekit-cli prove \
./benchmark-inputs/$circuit.pkp \
./benchmark-inputs/tbs_720/$circuit.toml \
-o ./benchmark-inputs/$circuit-proof.np
-o ./benchmark-inputs/$circuit-proof.np \
--spark-queries-dir ./benchmark-inputs/$circuit-spark \
--produce-spark-query
echo "Proved $circuit"
done

- name: Verify proofs for all circuits
- name: Generate SPARK proofs for all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "SPARK proving $circuit"
../../../target/release/provekit-cli prove-spark \
./benchmark-inputs/$circuit.pkp \
--spark-dir ./benchmark-inputs/$circuit-spark
echo "SPARK proved $circuit"
done

- name: Natively verify Noir proofs for all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "Verifying $circuit"
cargo run --release --bin provekit-cli verify \
./benchmark-inputs/$circuit-verifier.pkv \
../../../target/release/provekit-cli verify \
./benchmark-inputs/$circuit.pkv \
./benchmark-inputs/$circuit-proof.np
echo "Verified $circuit"
done

- name: Verify SPARK proofs for all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "SPARK verifying $circuit"
../../../target/release/provekit-cli verify-spark \
./benchmark-inputs/$circuit-spark/spark_proof.sp \
./benchmark-inputs/$circuit.spc \
./benchmark-inputs/$circuit-spark/spark_query_*.json
echo "SPARK verified $circuit"
done

- name: Generate Gnark inputs
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
cargo run --release --bin provekit-cli generate-gnark-inputs \
./benchmark-inputs/t_attest-verifier.pkv \
../../../target/release/provekit-cli generate-gnark-inputs \
./benchmark-inputs/t_attest.pkv \
./benchmark-inputs/t_attest-proof.np


Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,15 @@
*.pkp
*.pkv
*.np
*.sp
*.spc
spark_proofs/
params_for_recursive_verifier
params
artifacts/
spartan_vm_debug/
mavros_debug/
mavros/

# Don't ignore benchmarking artifacts
!tooling/provekit-bench/benches/*
Expand Down
17 changes: 17 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ members = [
"tooling/provekit-wasm",
"tooling/verifier-server",
"ntt",
"provekit/spark",
"poseidon2",
"playground/passport-input-gen",
]
Expand Down Expand Up @@ -102,6 +103,7 @@ provekit-ffi = { path = "tooling/provekit-ffi" }
provekit-gnark = { path = "tooling/provekit-gnark" }
provekit-prover = { path = "provekit/prover", default-features = false }
provekit-r1cs-compiler = { path = "provekit/r1cs-compiler" }
provekit-spark = { path = "provekit/spark" }
provekit-verifier = { path = "provekit/verifier" }
provekit-verifier-server = { path = "tooling/verifier-server" }
provekit-wasm = { path = "tooling/provekit-wasm" }
Expand Down
2 changes: 1 addition & 1 deletion noir-examples/power/Nargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "basic"
name = "power"
type = "bin"
authors = [""]
compiler_version = ">=0.22.0"
Expand Down
Binary file added noir-examples/power/benchmark-inputs/power.spc
Binary file not shown.
2 changes: 1 addition & 1 deletion noir-examples/power/src/main.nr
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
fn main(mut x: Field, y: pub Field) {
let mut r = 1;
for i in 0..10 {
for _ in 0..1000 {
r *= x;
}
assert(r == y);
Expand Down
2 changes: 1 addition & 1 deletion playground/passport-input-gen/src/bin/passport_cli/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ fn prove_circuit<T: serde::Serialize>(
.map_err(|e| anyhow::anyhow!("ABI parse error for {circuit_name}: {e}"))?;

tee_println!(" [{circuit_name}] Generating proof...");
let proof = prover
let (proof, _) = prover
.prove(input_map)
.with_context(|| format!("Proving {circuit_name}"))?;

Expand Down
8 changes: 7 additions & 1 deletion provekit/common/src/file/binary_format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,10 @@ pub const NOIR_PROOF_SCHEME_FORMAT: [u8; 8] = *b"NrProScm";
pub const NOIR_PROOF_SCHEME_VERSION: (u16, u16) = (1, 2);

pub const NOIR_PROOF_FORMAT: [u8; 8] = *b"NPSProof";
pub const NOIR_PROOF_VERSION: (u16, u16) = (1, 1);
pub const NOIR_PROOF_VERSION: (u16, u16) = (1, 2);

pub const SPARK_PROOF_FORMAT: [u8; 8] = *b"SparkPrf";
pub const SPARK_PROOF_VERSION: (u16, u16) = (1, 0);

pub const SPARK_SETUP_FORMAT: [u8; 8] = *b"SparkStp";
pub const SPARK_SETUP_VERSION: (u16, u16) = (1, 0);
5 changes: 3 additions & 2 deletions provekit/common/src/file/io/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ mod buf_ext;
mod counting_writer;
mod json;

pub use self::bin::Compression;
use {
self::{
bin::{
deserialize_from_bytes, read_bin, read_hash_config as read_hash_config_bin,
serialize_to_bytes, write_bin, Compression,
serialize_to_bytes, write_bin,
},
buf_ext::BufExt,
counting_writer::CountingWriter,
Expand All @@ -29,7 +30,7 @@ pub trait FileFormat: Serialize + for<'a> Deserialize<'a> {
}

/// Helper trait to optionally extract hash config.
pub(crate) trait MaybeHashAware {
pub trait MaybeHashAware {
fn maybe_hash_config(&self) -> Option<HashConfig>;
}

Expand Down
3 changes: 2 additions & 1 deletion provekit/common/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
pub mod file;
pub use file::binary_format;
pub mod hash_config;
mod interner;
pub mod interner;
mod mavros;
mod noir_proof_scheme;
pub mod ntt;
Expand All @@ -11,6 +11,7 @@ pub mod prefix_covector;
mod prover;
mod r1cs;
pub mod skyscraper;
pub mod spark;
pub mod sparse_matrix;
mod transcript_sponge;
pub mod u256_arith;
Expand Down
18 changes: 10 additions & 8 deletions provekit/common/src/mavros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,16 @@ use {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MavrosProver {
#[serde(with = "crate::utils::serde_jsonify")]
pub abi: Abi,
pub num_public_inputs: usize,
pub whir_for_witness: WhirR1CSScheme,
pub witgen_binary: Vec<u64>,
pub ad_binary: Vec<u64>,
pub constraints_layout: ConstraintsLayout,
pub witness_layout: WitnessLayout,
pub hash_config: HashConfig,
pub abi: Abi,
pub num_public_inputs: usize,
pub whir_for_witness: WhirR1CSScheme,
pub witgen_binary: Vec<u64>,
pub ad_binary: Vec<u64>,
pub constraints_layout: ConstraintsLayout,
pub witness_layout: WitnessLayout,
pub hash_config: HashConfig,
#[serde(skip, default)]
pub produce_spark_query: bool,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
Expand Down
27 changes: 19 additions & 8 deletions provekit/common/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ pub struct NoirProver {
pub split_witness_builders: SplitWitnessBuilders,
pub witness_generator: NoirWitnessGenerator,
pub whir_for_witness: WhirR1CSScheme,
#[serde(skip, default)]
pub produce_spark_query: bool,
}

// INVARIANT: Variant order is wire-format-critical (postcard uses positional
Expand All @@ -40,20 +42,29 @@ impl Prover {
split_witness_builders: d.split_witness_builders,
witness_generator: d.witness_generator,
whir_for_witness: d.whir_for_witness,
produce_spark_query: false,
}),
NoirProofScheme::Mavros(d) => Prover::Mavros(MavrosProver {
abi: d.abi,
num_public_inputs: d.num_public_inputs,
whir_for_witness: d.whir_for_witness,
witgen_binary: d.witgen_binary,
ad_binary: d.ad_binary,
constraints_layout: d.constraints_layout,
witness_layout: d.witness_layout,
hash_config: d.hash_config,
abi: d.abi,
num_public_inputs: d.num_public_inputs,
whir_for_witness: d.whir_for_witness,
witgen_binary: d.witgen_binary,
ad_binary: d.ad_binary,
constraints_layout: d.constraints_layout,
witness_layout: d.witness_layout,
hash_config: d.hash_config,
produce_spark_query: false,
}),
}
}

pub fn set_produce_spark_query(&mut self, value: bool) {
match self {
Prover::Noir(p) => p.produce_spark_query = value,
Prover::Mavros(p) => p.produce_spark_query = value,
}
}

pub fn abi(&self) -> &Abi {
match self {
Prover::Noir(p) => p.witness_generator.abi(),
Expand Down
35 changes: 35 additions & 0 deletions provekit/common/src/spark.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
use {
crate::{utils::serde_ark, FieldElement},
serde::{Deserialize, Serialize},
sha3::{Digest, Sha3_256},
};

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Point {
#[serde(with = "serde_ark")]
pub row: Vec<FieldElement>,
#[serde(with = "serde_ark")]
pub col: Vec<FieldElement>,
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct R1CSSparkQuery {
pub point_to_evaluate: Point,
#[serde(with = "serde_ark")]
pub claimed_a: FieldElement,
#[serde(with = "serde_ark")]
pub claimed_b: FieldElement,
#[serde(with = "serde_ark")]
pub claimed_c: FieldElement,
}

impl R1CSSparkQuery {
pub fn hash_bytes(&self) -> [u8; 32] {
hash_query_set(std::slice::from_ref(self))
}
}

pub fn hash_query_set(queries: &[R1CSSparkQuery]) -> [u8; 32] {
let bytes = postcard::to_allocvec(queries).expect("serializing R1CSSparkQuery set");
Sha3_256::digest(&bytes).into()
}
5 changes: 5 additions & 0 deletions provekit/common/src/utils/sumcheck.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,11 @@ fn eval_eq(
}
}

/// Evaluates a quadratic polynomial on a value
pub fn eval_quadratic_poly(poly: [FieldElement; 3], point: FieldElement) -> FieldElement {
poly[0] + point * (poly[1] + point * poly[2])
}

/// Evaluates a cubic polynomial on a value
pub fn eval_cubic_poly(poly: [FieldElement; 4], point: FieldElement) -> FieldElement {
poly[0] + point * (poly[1] + point * (poly[2] + point * poly[3]))
Expand Down
Loading