Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
96 commits
Select commit Hold shift + click to select a range
b8bff46
first version of the sqrt PST without the MIPP
Dec 22, 2022
a29888d
snarkpack integration
maramihali Jan 19, 2023
1d7b27d
snarkpack integration
maramihali Jan 19, 2023
ef18b7b
Merge branch 'snarkpack-integration' of github.com:cryptonetlab/testu…
nikkolasg Feb 3, 2023
7a7707e
adding mipp as submodule directly
nikkolasg Feb 6, 2023
59b2371
snarkpack integration
maramihali Jan 19, 2023
50ba972
finalizing
nikkolasg Feb 6, 2023
d44ffd0
building
nikkolasg Feb 6, 2023
4d68831
snarkpack integration
maramihali Jan 19, 2023
0d13d9c
merging
nikkolasg Feb 7, 2023
60c8524
Merge branch 'snarkpack-integration' into feat/mipp-module
nikkolasg Feb 7, 2023
dc5bcc8
update mipp with latestest optimisations and add preliminary
maramihali Feb 6, 2023
ef27dc2
Merge branch 'feat/mipp-module' of github.com:cryptonetlab/testudo in…
nikkolasg Feb 7, 2023
3641aa6
Merge pull request #13 from cryptonetlab/feat/mipp-module
nikkolasg Feb 7, 2023
8fc381e
improve codebase documentation
maramihali Feb 7, 2023
ab2c7cd
Merge branch 'master' into snarkpack-integration
maramihali Feb 7, 2023
688cfff
remove unused imports and apply cargo fix changes
maramihali Feb 7, 2023
93e0c24
passing v0.4
nikkolasg Feb 7, 2023
4117964
tests passing
nikkolasg Feb 7, 2023
598b92a
adding gh action
nikkolasg Feb 7, 2023
edf077b
correct workflow item
nikkolasg Feb 7, 2023
3835371
correct working dir and msrv
nikkolasg Feb 7, 2023
71e6762
Merge pull request #14 from cryptonetlab/feat/v0.4
nikkolasg Feb 7, 2023
cdf8bd8
remove unnecessary stuff
maramihali Feb 7, 2023
1a3c209
Merge branch 'snarkpack-integration' into feat/remove-useless
maramihali Feb 7, 2023
71122ec
wip
nikkolasg Feb 7, 2023
b67c384
Merge pull request #15 from cryptonetlab/feat/remove-useless
nikkolasg Feb 7, 2023
db9213c
wip
nikkolasg Feb 7, 2023
8fedd99
wip
nikkolasg Feb 7, 2023
86ae3be
remove circuit in fq as it's not needed now
maramihali Feb 7, 2023
6600432
done for tonight
nikkolasg Feb 7, 2023
2f3be26
wip
nikkolasg Feb 8, 2023
fdb890f
Simplify circuit by removing outer Groth16 in the meantime
nikkolasg Feb 8, 2023
3b65325
merged without outer groth16
nikkolasg Feb 8, 2023
0884de2
wip
nikkolasg Feb 8, 2023
f2ed1a8
sip
nikkolasg Feb 8, 2023
1afb856
prallelise commitment and groth16 verification
maramihali Feb 7, 2023
393cf49
finalise comments for mipp
maramihali Feb 8, 2023
55af3ad
wip
nikkolasg Feb 8, 2023
79feba1
finalise comments
maramihali Feb 8, 2023
ed5fa97
Merge branch 'snarkpack-integration' of https://github.com/maramihali…
maramihali Feb 8, 2023
23231d4
Merge pull request #19 from cryptonetlab/experiment
nikkolasg Feb 8, 2023
0e071e0
wip
nikkolasg Feb 8, 2023
48f0794
merged from upstream
nikkolasg Feb 8, 2023
627ecfb
compiling but test failing
nikkolasg Feb 8, 2023
3106e83
putting back non random blinds
nikkolasg Feb 8, 2023
f94a450
using absorb when we can
nikkolasg Feb 8, 2023
f2c2ae9
absorbing scalar
nikkolasg Feb 8, 2023
a4c0793
with bls12-381
nikkolasg Feb 9, 2023
9691306
Merge pull request #21 from cryptonetlab/feat/generics-absorb
nikkolasg Feb 9, 2023
bd4c32a
stuff
maramihali Feb 9, 2023
c64004f
trying to bring ark-blst to testudo
nikkolasg Feb 9, 2023
d2e3136
correcting random implementation
nikkolasg Feb 9, 2023
853799e
with square in place
nikkolasg Feb 9, 2023
d5cbe1b
works with blst
nikkolasg Feb 9, 2023
4a85c78
works with blst
nikkolasg Feb 9, 2023
503c4f9
Merge branch 'snarkpack-integration' of https://github.com/maramihali…
maramihali Feb 10, 2023
ab9a413
fix: don't require nightly Rust
vmx Feb 13, 2023
0163083
using ark-blst main branch
nikkolasg Feb 14, 2023
a36093c
Merge pull request #24 from cryptonetlab/fix/ark-blst
nikkolasg Feb 14, 2023
e7c8432
started cleanup and added testudo benchmark
maramihali Feb 13, 2023
dd973b6
Merge branch 'snarkpack-integration' of https://github.com/maramihali…
maramihali Feb 14, 2023
07ab741
Merge branch 'snarkpack-integration' into prover-refactor
maramihali Feb 14, 2023
6767d90
Merge pull request #23 from vmx/stable-rust
nikkolasg Feb 15, 2023
6d35ef8
add testudo snark and nizk in separate files
maramihali Feb 15, 2023
6f30e37
Merge branch 'snarkpack-integration' into prover-refactor
maramihali Feb 15, 2023
7484321
rename functions that perform setups and add comments
maramihali Feb 16, 2023
be30c34
Merge pull request #29 from cryptonetlab/prover-refactor
maramihali Feb 16, 2023
4e9bcf8
prototyping
maramihali Mar 6, 2023
13f6efb
Merge branch 'snarkpack-integration' into feat/odd-case
maramihali Mar 6, 2023
571f54f
explain testudo-nizk
maramihali Mar 6, 2023
7d52852
add support for odd case in sqrt_pst
maramihali Mar 10, 2023
8fa04bb
add missing constraints and correct proof size for benchmarks
maramihali Mar 10, 2023
df9e890
add support for odd case in sqrt_pst
maramihali Mar 10, 2023
a49d598
Merge branch 'feat/odd-case' into benchmarks
maramihali Mar 10, 2023
84b3ce7
Merge pull request #33 from cryptonetlab/benchmarks
maramihali Mar 10, 2023
11d9ba0
Merge pull request #32 from cryptonetlab/feat/odd-case
maramihali Mar 10, 2023
a26b9d0
fix typo in comment
maramihali Mar 13, 2023
2164b54
Documentation #31
maramihali Mar 22, 2023
661214d
fix typo in comment
maramihali Mar 13, 2023
425c274
Fix Cargo.toml and add benchmark for sqrt pst (#34)
maramihali Mar 22, 2023
af03d8f
add README
maramihali Mar 22, 2023
64c368f
Merge branch 'snarkpack-integration' of https://github.com/cryptonetl…
maramihali Mar 22, 2023
8e45a8f
comment from readme not executing
nikkolasg Mar 22, 2023
ffad1c8
Merge branch 'master' into snarkpack-integration
nikkolasg Mar 22, 2023
e492b90
Merge branch 'master' of github.com:cryptonetlab/testudo
nikkolasg Apr 17, 2023
c820325
adding groth16 comparison
nikkolasg Jun 5, 2023
f2b224c
reduced benchmark size
nikkolasg Jun 6, 2023
a374ea8
do not verify for blst because of zero MSM bug
nikkolasg Jun 6, 2023
8312fd1
adding pst comparison
nikkolasg Jun 6, 2023
23ac14e
adding blst
nikkolasg Jun 6, 2023
6ececf5
right sizes for bench
nikkolasg Jun 6, 2023
d8a2e69
adding pst size
nikkolasg Jun 6, 2023
0c05f20
adding groth16 comparison separaletly
nikkolasg Jun 6, 2023
e4a41a0
add file groth16
nikkolasg Jun 6, 2023
7c71b8f
putting rightfields in benchmark
nikkolasg Jun 6, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,11 @@ name = "testudo"
harness = false

[[bench]]
name = "pst"
name = "commitment"
harness = false

[[bench]]
name = "groth16"
harness = false

[features]
Expand Down
63 changes: 54 additions & 9 deletions benches/pst.rs → benches/commitment.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
use std::time::Instant;

use ark_bls12_377::Bls12_377;
use ark_ec::pairing::Pairing;
use ark_poly_commit::multilinear_pc::MultilinearPC;
use ark_serialize::CanonicalSerialize;
use ark_std::UniformRand;
use libtestudo::{
parameters::PoseidonConfiguration, poseidon_transcript::PoseidonTranscript, sqrt_pst::Polynomial,
};
use serde::Serialize;
type F = ark_bls12_377::Fr;
type E = ark_bls12_377::Bls12_377;
use ark_std::UniformRand;

#[derive(Default, Clone, Serialize)]
struct BenchmarkResults {
Expand All @@ -18,22 +18,37 @@ struct BenchmarkResults {
verification_time: u128,
proof_size: usize,
commiter_key_size: usize,
pst_commit: u128,
pst_opening: u128,
pst_verification: u128,
pst_proof_size: u128,
}
fn main() {
let params = ark_bls12_377::Fr::poseidon_params();
testudo_commitment_benchmark::<Bls12_377>("testudo_commitment_bls12377.csv");
testudo_commitment_benchmark::<ark_blst::Bls12>("testudo_commitment_bls12381.csv");
}

let mut writer = csv::Writer::from_path("sqrt_pst.csv").expect("unable to open csv writer");
for &s in [4, 5, 20, 27].iter() {
fn testudo_commitment_benchmark<E: Pairing>(fname: &str)
where
E::ScalarField: PoseidonConfiguration,
{
let params = E::ScalarField::poseidon_params();
let mut writer = csv::Writer::from_path(fname).expect("unable to open csv writer");
for &s in [4, 5, 15, 20, 25].iter() {
println!("Running for {} inputs", s);
let mut rng = ark_std::test_rng();
let mut br = BenchmarkResults::default();
br.power = s;
let num_vars = s;
let len = 2_usize.pow(num_vars as u32);
let z: Vec<F> = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect();
let r: Vec<F> = (0..num_vars)
bench_pst::<E>(num_vars, &mut br);
let z: Vec<E::ScalarField> = (0..len)
.into_iter()
.map(|_| F::rand(&mut rng))
.map(|_| E::ScalarField::rand(&mut rng))
.collect();
let r: Vec<E::ScalarField> = (0..num_vars)
.into_iter()
.map(|_| E::ScalarField::rand(&mut rng))
.collect();

let setup_vars = (num_vars as f32 / 2.0).ceil() as usize;
Expand Down Expand Up @@ -96,3 +111,33 @@ fn main() {
writer.flush().expect("wasn't able to flush");
}
}

fn bench_pst<E: Pairing>(num_vars: usize, res: &mut BenchmarkResults) {
use ark_poly::{DenseMultilinearExtension, MultilinearExtension};
use ark_poly_commit::multilinear_pc::MultilinearPC;
let params = MultilinearPC::<E>::setup(num_vars, &mut rand::thread_rng());
let (comkey, vkey) = MultilinearPC::trim(&params, num_vars);
let poly = DenseMultilinearExtension::rand(num_vars, &mut rand::thread_rng());

let start = Instant::now();
let comm = MultilinearPC::commit(&comkey, &poly);
res.pst_commit = start.elapsed().as_millis();

let xs = (0..num_vars)
.map(|_| E::ScalarField::rand(&mut rand::thread_rng()))
.collect::<Vec<_>>();
let y = poly.evaluate(&xs).unwrap();
let start = Instant::now();
let proof = MultilinearPC::open(&comkey, &poly, &xs);
res.pst_opening = start.elapsed().as_millis();

let start = Instant::now();
let check = MultilinearPC::check(&vkey, &comm, &xs, y, &proof);
res.pst_verification = start.elapsed().as_millis();

let mut b = Vec::new();
proof.serialize_compressed(&mut b).unwrap();
res.pst_proof_size = b.len() as u128;

assert!(check);
}
86 changes: 86 additions & 0 deletions benches/groth16.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
use ark_ec::pairing::Pairing;
use ark_ff::PrimeField;
use ark_groth16::prepare_verifying_key;
use ark_groth16::Groth16;
use ark_r1cs_std::alloc::AllocVar;
use ark_r1cs_std::fields::fp::FpVar;
use ark_relations::r1cs::ConstraintSynthesizer;
use ark_relations::r1cs::ConstraintSystem;
use ark_std::marker::PhantomData;
use ark_std::time::Instant;
use serde::Serialize;
use std::ops::Mul;
#[derive(Default, Clone, Serialize)]
struct BenchmarkResults {
power: usize,
input_constraints: usize,
g16_proving_time: u128,
}

fn main() {
let n = 10;
let nconstraints = (2_usize).pow(n as u32);
let mut res = BenchmarkResults::default();
res.power = n;
res.input_constraints = nconstraints;
groth16_bench::<ark_bls12_377::Bls12_377>(nconstraints, &mut res);
let mut writer = csv::Writer::from_path("groth16.csv").expect("unable to open csv writer");
writer
.serialize(res)
.expect("unable to write results to csv");
writer.flush().expect("wasn't able to flush");
}
struct GrothCircuit<F: PrimeField> {
n_constraints: usize,
_p: PhantomData<F>,
}

impl<F: PrimeField> GrothCircuit<F> {
pub fn new(n_constraints: usize) -> Self {
GrothCircuit {
n_constraints,
_p: PhantomData,
}
}
}

impl<F: PrimeField> ConstraintSynthesizer<F> for GrothCircuit<F> {
fn generate_constraints(
self,
cs: ark_relations::r1cs::ConstraintSystemRef<F>,
) -> ark_relations::r1cs::Result<()> {
let a = F::rand(&mut rand::thread_rng());
let mut av = FpVar::new_witness(cs.clone(), || Ok(a))?;
for _ in 0..self.n_constraints {
let av = av.clone().mul(av.clone());
}
Ok(())
}
}
fn groth16_bench<E: Pairing>(n_constraints: usize, res: &mut BenchmarkResults) {
let params = {
let c = GrothCircuit::<E::ScalarField>::new(n_constraints);
Groth16::<E>::generate_random_parameters_with_reduction(c, &mut rand::thread_rng()).unwrap()
};
let pvk = prepare_verifying_key(&params.vk);
println!("Running G16 proving for {} constraints", n_constraints);
let number_constraints = {
let circuit = GrothCircuit::<E::ScalarField>::new(n_constraints);
let cs = ConstraintSystem::<E::ScalarField>::new_ref();
circuit.generate_constraints(cs.clone()).unwrap();
cs.num_constraints() as u64
};
assert_eq!(number_constraints as usize, n_constraints);
let start = Instant::now();
let proof = Groth16::<E>::create_random_proof_with_reduction(
GrothCircuit::<E::ScalarField>::new(n_constraints),
&params,
&mut rand::thread_rng(),
)
.expect("proof creation failed");
let proving_time = start.elapsed().as_millis();
res.g16_proving_time = proving_time;

let r = Groth16::<E>::verify_proof(&pvk, &proof, &[]).unwrap();
assert!(r);
}
114 changes: 91 additions & 23 deletions benches/testudo.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
use std::marker::PhantomData;
use std::time::Instant;

use ark_crypto_primitives::sponge::poseidon::PoseidonConfig;
use ark_crypto_primitives::sponge::Absorb;
use ark_ec::pairing::Pairing;
use ark_ff::PrimeField;
use ark_groth16::prepare_verifying_key;
use ark_groth16::Groth16;
use ark_r1cs_std::fields::fp::FpVar;
use ark_r1cs_std::prelude::AllocVar;
use ark_relations::r1cs::ConstraintSynthesizer;
use ark_relations::r1cs::ConstraintSystem;
use ark_serialize::*;
use libtestudo::parameters::PoseidonConfiguration;
use libtestudo::{
Expand All @@ -12,6 +19,40 @@ use libtestudo::{
Instance,
};
use serde::Serialize;
use std::ops::Mul;

fn main() {
// bench_with_bls12_377();
// bench_with_bls12_381();
bench_with_ark_blst();
}
struct GrothCircuit<F: PrimeField> {
n_constraints: usize,
_p: PhantomData<F>,
}

impl<F: PrimeField> GrothCircuit<F> {
pub fn new(n_constraints: usize) -> Self {
GrothCircuit {
n_constraints,
_p: PhantomData,
}
}
}

impl<F: PrimeField> ConstraintSynthesizer<F> for GrothCircuit<F> {
fn generate_constraints(
self,
cs: ark_relations::r1cs::ConstraintSystemRef<F>,
) -> ark_relations::r1cs::Result<()> {
let a = F::rand(&mut rand::thread_rng());
let mut av = FpVar::new_witness(cs.clone(), || Ok(a))?;
for _ in 0..self.n_constraints {
let av = av.clone().mul(av.clone());
}
Ok(())
}
}

#[derive(Default, Clone, Serialize)]
struct BenchmarkResults {
Expand All @@ -22,38 +63,34 @@ struct BenchmarkResults {
sat_proof_size: usize,
eval_proof_size: usize,
total_proof_size: usize,
}

fn main() {
bench_with_bls12_377();
// bench_with_bls12_381();
// bench_with_ark_blst();
g16_proving_time: u128,
}

fn bench_with_ark_blst() {
let params = ark_blst::Scalar::poseidon_params();
testudo_snark_bench::<ark_blst::Bls12>(params, "testudo_blst");
testudo_snark_bench::<ark_blst::Bls12>(params, "testudo_blst", false);
}

fn bench_with_bls12_377() {
let params = ark_bls12_377::Fr::poseidon_params();
testudo_snark_bench::<ark_bls12_377::Bls12_377>(params, "testudo_bls12_377");
testudo_snark_bench::<ark_bls12_377::Bls12_377>(params, "testudo_bls12_377", true);
}

fn bench_with_bls12_381() {
let params = ark_bls12_381::Fr::poseidon_params();
testudo_snark_bench::<ark_bls12_381::Bls12_381>(params, "testudo_bls12_381");
testudo_snark_bench::<ark_bls12_381::Bls12_381>(params, "testudo_bls12_381", true);
}

fn testudo_snark_bench<E>(params: PoseidonConfig<E::ScalarField>, file_name: &str)
fn testudo_snark_bench<E>(params: PoseidonConfig<E::ScalarField>, file_name: &str, verify: bool)
where
E: Pairing,
E::ScalarField: PrimeField,
E::ScalarField: Absorb,
{
let mut writer = csv::Writer::from_path(file_name).expect("unable to open csv writer");
for &s in [4, 5, 10, 12, 14, 16, 18, 20, 22, 24, 26].iter() {
println!("Running for {} inputs", s);
for &s in [5, 10, 15, 20, 24].iter() {
//for &s in [4].iter() {
println!("Running for {} constraints", s);
let mut br = BenchmarkResults::default();
let num_vars = (2_usize).pow(s as u32);
let num_cons = num_vars;
Expand Down Expand Up @@ -108,20 +145,51 @@ where
let mut verifier_transcript = PoseidonTranscript::new(&params.clone());
let start = Instant::now();

let res = proof.verify(
&gens,
&comm,
&inputs,
&mut verifier_transcript,
params.clone(),
);
assert!(res.is_ok());
let duration = start.elapsed().as_millis();
br.testudo_verification_time = duration;

if verify {
let res = proof.verify(
&gens,
&comm,
&inputs,
&mut verifier_transcript,
params.clone(),
);
assert!(res.is_ok());
let duration = start.elapsed().as_millis();
br.testudo_verification_time = duration;
}

groth16_bench::<E>(num_cons, &mut br);
writer
.serialize(br)
.expect("unable to write results to csv");
writer.flush().expect("wasn't able to flush");
}
}

fn groth16_bench<E: Pairing>(n_constraints: usize, res: &mut BenchmarkResults) {
let params = {
let c = GrothCircuit::<E::ScalarField>::new(n_constraints);
Groth16::<E>::generate_random_parameters_with_reduction(c, &mut rand::thread_rng()).unwrap()
};
let pvk = prepare_verifying_key(&params.vk);
println!("Running G16 proving for {} constraints", n_constraints);
let number_constraints = {
let circuit = GrothCircuit::<E::ScalarField>::new(n_constraints);
let cs = ConstraintSystem::<E::ScalarField>::new_ref();
circuit.generate_constraints(cs.clone()).unwrap();
cs.num_constraints() as u64
};
assert_eq!(number_constraints as usize, n_constraints);
let start = Instant::now();
let proof = Groth16::<E>::create_random_proof_with_reduction(
GrothCircuit::<E::ScalarField>::new(n_constraints),
&params,
&mut rand::thread_rng(),
)
.expect("proof creation failed");
let proving_time = start.elapsed().as_millis();
res.g16_proving_time = proving_time;

let r = Groth16::<E>::verify_proof(&pvk, &proof, &[]).unwrap();
assert!(r);
}
11 changes: 6 additions & 5 deletions src/r1csproof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -569,26 +569,27 @@ mod tests {
#[test]
fn check_r1cs_proof_ark_blst() {
let params = ark_blst::Scalar::poseidon_params();
check_r1cs_proof::<ark_blst::Bls12>(params);
check_r1cs_proof::<ark_blst::Bls12>(10, params);
}
#[test]
fn check_r1cs_proof_bls12_377() {
let params = ark_bls12_377::Fr::poseidon_params();
check_r1cs_proof::<ark_bls12_377::Bls12_377>(params);
check_r1cs_proof::<ark_bls12_377::Bls12_377>(10, params);
}

#[test]
fn check_r1cs_proof_bls12_381() {
let params = ark_bls12_381::Fr::poseidon_params();
check_r1cs_proof::<ark_bls12_381::Bls12_381>(params);
check_r1cs_proof::<ark_bls12_381::Bls12_381>(10, params);
}
fn check_r1cs_proof<P>(params: PoseidonConfig<P::ScalarField>)
fn check_r1cs_proof<P>(size: usize, params: PoseidonConfig<P::ScalarField>)
where
P: Pairing,
P::ScalarField: PrimeField,
P::ScalarField: Absorb,
{
let num_vars = 1024;

let num_vars = (2_usize).pow(size as u32);
let num_cons = num_vars;
let num_inputs = 3;
let (inst, vars, input) =
Expand Down