diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 4a88084e2..1d1798b0d 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -39,6 +39,7 @@ strum = { version = "0.24.1", features = ["derive"] } log = "0.4.17" serde = "1.0" serde_json = "1.0" +bincode = "1.3.3" hex = "0.4" thiserror = "1.0.43" mktemp = "0.5.0" diff --git a/backend/src/composite/mod.rs b/backend/src/composite/mod.rs index d5f61e719..0ccbe20d9 100644 --- a/backend/src/composite/mod.rs +++ b/backend/src/composite/mod.rs @@ -1,5 +1,12 @@ -use std::{collections::BTreeMap, io, marker::PhantomData, path::PathBuf, sync::Arc}; - +use std::{ + collections::BTreeMap, + io::{self, Cursor, Read}, + marker::PhantomData, + path::PathBuf, + sync::Arc, +}; + +use itertools::Itertools; use powdr_ast::analyzed::Analyzed; use powdr_executor::witgen::WitgenCallback; use powdr_number::{DegreeType, FieldElement}; @@ -10,11 +17,18 @@ use crate::{Backend, BackendFactory, BackendOptions, Error, Proof}; mod split; -/// A composite proof that contains a proof for each machine separately. +/// A composite verification key that contains a verification key for each machine separately. +#[derive(Serialize, Deserialize)] +struct CompositeVerificationKey { + /// Verification key for each machine (if available, otherwise None), sorted by machine name. + verification_keys: Vec>>, +} + +/// A composite proof that contains a proof for each machine separately, sorted by machine name. #[derive(Serialize, Deserialize)] struct CompositeProof { /// Map from machine name to proof - proofs: BTreeMap>, + proofs: Vec>, } pub(crate) struct CompositeBackendFactory> { @@ -42,13 +56,40 @@ impl> BackendFactory for CompositeBacke verification_app_key: Option<&mut dyn std::io::Read>, backend_options: BackendOptions, ) -> Result + 'a>, Error> { - if setup.is_some() || verification_key.is_some() || verification_app_key.is_some() { + if verification_app_key.is_some() { unimplemented!(); } - let per_machine_data = split::split_pil((*pil).clone()) + let pils = split::split_pil((*pil).clone()); + + // Read the setup once (if any) to pass to all backends. + let setup_bytes = setup.map(|setup| { + let mut setup_data = Vec::new(); + setup.read_to_end(&mut setup_data).unwrap(); + setup_data + }); + + // Read all provided verification keys + let verification_keys = verification_key + .map(|verification_key| bincode::deserialize_from(verification_key).unwrap()) + .unwrap_or(CompositeVerificationKey { + verification_keys: vec![None; pils.len()], + }) + .verification_keys; + + let machine_data = pils .into_iter() - .map(|(machine_name, pil)| { + .zip_eq(verification_keys.into_iter()) + .map(|((machine_name, pil), verification_key)| { + // Set up readers for the setup and verification key + let mut setup_cursor = setup_bytes.as_ref().map(Cursor::new); + let setup = setup_cursor.as_mut().map(|cursor| cursor as &mut dyn Read); + + let mut verification_key_cursor = verification_key.as_ref().map(Cursor::new); + let verification_key = verification_key_cursor + .as_mut() + .map(|cursor| cursor as &mut dyn Read); + let pil = Arc::new(pil); let output_dir = output_dir .clone() @@ -61,22 +102,20 @@ impl> BackendFactory for CompositeBacke pil.clone(), fixed, output_dir, - // TODO: Handle setup, verification_key, verification_app_key - None, - None, + setup, + verification_key, + // TODO: Handle verification_app_key None, backend_options.clone(), ); backend.map(|backend| (machine_name.to_string(), MachineData { pil, backend })) }) - .collect::, _>>()?; - Ok(Box::new(CompositeBackend { - machine_data: per_machine_data, - })) + .collect::>()?; + Ok(Box::new(CompositeBackend { machine_data })) } - fn generate_setup(&self, _size: DegreeType, _output: &mut dyn io::Write) -> Result<(), Error> { - Err(Error::NoSetupAvailable) + fn generate_setup(&self, size: DegreeType, output: &mut dyn io::Write) -> Result<(), Error> { + self.factory.generate_setup(size, output) } } @@ -86,6 +125,9 @@ struct MachineData<'a, F> { } pub(crate) struct CompositeBackend<'a, F> { + /// Maps each machine name to the corresponding machine data + /// Note that it is essential that we use BTreeMap here to ensure that the machines are + /// deterministically ordered. machine_data: BTreeMap>, } @@ -117,33 +159,48 @@ impl<'a, F: FieldElement> Backend<'a, F> for CompositeBackend<'a, F> { let witness = machine_witness_columns(witness, pil, machine); - backend - .prove(&witness, None, witgen_callback) - .map(|proof| (machine.clone(), proof)) + backend.prove(&witness, None, witgen_callback) }) .collect::>()?, }; - Ok(serde_json::to_vec(&proof).unwrap()) + Ok(bincode::serialize(&proof).unwrap()) } fn verify(&self, proof: &[u8], instances: &[Vec]) -> Result<(), Error> { - let proof: CompositeProof = serde_json::from_slice(proof).unwrap(); - for (machine, machine_proof) in proof.proofs { - let machine_data = self - .machine_data - .get(&machine) - .ok_or_else(|| Error::BackendError(format!("Unknown machine: {machine}")))?; + let proof: CompositeProof = bincode::deserialize(proof).unwrap(); + for (machine_data, machine_proof) in self.machine_data.values().zip_eq(proof.proofs) { machine_data.backend.verify(&machine_proof, instances)?; } Ok(()) } - fn export_setup(&self, _output: &mut dyn io::Write) -> Result<(), Error> { - unimplemented!() + fn export_setup(&self, output: &mut dyn io::Write) -> Result<(), Error> { + // All backend are the same, just pick the first + self.machine_data + .values() + .next() + .unwrap() + .backend + .export_setup(output) } - fn export_verification_key(&self, _output: &mut dyn io::Write) -> Result<(), Error> { - unimplemented!(); + fn verification_key_bytes(&self) -> Result, Error> { + let verification_key = CompositeVerificationKey { + verification_keys: self + .machine_data + .values() + .map(|machine_data| { + let backend = machine_data.backend.as_ref(); + let vk_bytes = backend.verification_key_bytes(); + match vk_bytes { + Ok(vk_bytes) => Ok(Some(vk_bytes)), + Err(Error::NoVerificationAvailable) => Ok(None), + Err(e) => Err(e), + } + }) + .collect::>()?, + }; + Ok(bincode::serialize(&verification_key).unwrap()) } fn export_ethereum_verifier(&self, _output: &mut dyn io::Write) -> Result<(), Error> { diff --git a/backend/src/estark/starky_wrapper.rs b/backend/src/estark/starky_wrapper.rs index ab0a0bd6b..9d57aa3c7 100644 --- a/backend/src/estark/starky_wrapper.rs +++ b/backend/src/estark/starky_wrapper.rs @@ -1,4 +1,3 @@ -use std::io; use std::path::PathBuf; use std::sync::Arc; use std::time::Instant; @@ -212,13 +211,9 @@ impl<'a, F: FieldElement> Backend<'a, F> for EStark { } } - fn export_verification_key(&self, output: &mut dyn io::Write) -> Result<(), Error> { - match serde_json::to_writer(output, &self.setup) { - Ok(_) => Ok(()), - Err(_) => Err(Error::BackendError( - "Could not export verification key".to_string(), - )), - } + fn verification_key_bytes(&self) -> Result, Error> { + serde_json::to_vec(&self.setup) + .map_err(|_| Error::BackendError("Could not serialize verification key".to_string())) } } diff --git a/backend/src/halo2/mod.rs b/backend/src/halo2/mod.rs index 0abd07ea9..4305468a8 100644 --- a/backend/src/halo2/mod.rs +++ b/backend/src/halo2/mod.rs @@ -159,11 +159,9 @@ impl<'a, T: FieldElement> Backend<'a, T> for Halo2Prover { Ok(self.write_setup(&mut output)?) } - fn export_verification_key(&self, mut output: &mut dyn io::Write) -> Result<(), Error> { + fn verification_key_bytes(&self) -> Result, Error> { let vk = self.verification_key()?; - vk.write(&mut output, SerdeFormat::Processed)?; - - Ok(()) + Ok(vk.to_bytes(SerdeFormat::Processed)) } fn export_ethereum_verifier(&self, output: &mut dyn io::Write) -> Result<(), Error> { diff --git a/backend/src/lib.rs b/backend/src/lib.rs index a61478de7..e490fa9bb 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -176,7 +176,16 @@ pub trait Backend<'a, F: FieldElement> { /// Exports the verification key in a backend specific format. Can be used /// to create a new backend object of the same kind. - fn export_verification_key(&self, _output: &mut dyn io::Write) -> Result<(), Error> { + fn export_verification_key(&self, output: &mut dyn io::Write) -> Result<(), Error> { + let v = self.verification_key_bytes()?; + log::info!("Verification key size: {} bytes", v.len()); + output + .write_all(&v) + .map_err(|_| Error::BackendError("Could not write verification key".to_string()))?; + Ok(()) + } + + fn verification_key_bytes(&self) -> Result, Error> { Err(Error::NoVerificationAvailable) } diff --git a/executor/src/constant_evaluator/mod.rs b/executor/src/constant_evaluator/mod.rs index 09e360671..9fa007bc9 100644 --- a/executor/src/constant_evaluator/mod.rs +++ b/executor/src/constant_evaluator/mod.rs @@ -609,4 +609,24 @@ mod test { ("F.a".to_string(), convert([14, 15, 16, 17].to_vec())) ); } + + #[test] + fn do_not_add_constraint_for_empty_tuple() { + let input = r#"namespace N(4); + let f: -> () = || (); + let g: col = |i| { + // This returns an empty tuple, we check that this does not lead to + // a call to add_constraints() + f(); + i + }; + "#; + let analyzed = analyze_string::(input); + assert_eq!(analyzed.degree(), 4); + let constants = generate(&analyzed); + assert_eq!( + constants[0], + ("N.g".to_string(), convert([0, 1, 2, 3].to_vec())) + ); + } } diff --git a/pil-analyzer/src/evaluator.rs b/pil-analyzer/src/evaluator.rs index a7bbff651..c989f28ff 100644 --- a/pil-analyzer/src/evaluator.rs +++ b/pil-analyzer/src/evaluator.rs @@ -658,7 +658,10 @@ impl<'a, 'b, T: FieldElement, S: SymbolLookup<'a, T>> Evaluator<'a, 'b, T, S> { } Operation::AddConstraint => { let result = self.value_stack.pop().unwrap(); - self.symbols.add_constraints(result, SourceRef::unknown())?; + match result.as_ref() { + Value::Tuple(t) if t.is_empty() => {} + _ => self.symbols.add_constraints(result, SourceRef::unknown())?, + } } }; } diff --git a/pipeline/src/pipeline.rs b/pipeline/src/pipeline.rs index c204fae1f..8e4a282cb 100644 --- a/pipeline/src/pipeline.rs +++ b/pipeline/src/pipeline.rs @@ -5,11 +5,13 @@ use std::{ io::{self, BufReader}, marker::Send, path::{Path, PathBuf}, + rc::Rc, sync::Arc, time::Instant, }; use log::Level; +use mktemp::Temp; use powdr_ast::{ analyzed::Analyzed, asm_analysis::AnalysisASMFile, @@ -114,6 +116,10 @@ pub struct Pipeline { artifact: Artifacts, /// Output directory for intermediate files. If None, no files are written. output_dir: Option, + /// The temporary directory, owned by the pipeline (or any copies of it). + /// This object is not used directly, but keeping it here ensures that the directory + /// is not deleted until the pipeline is dropped. + _tmp_dir: Option>, /// The name of the pipeline. Used to name output files. name: Option, /// Whether to overwrite existing files. If false, an error is returned if a file @@ -140,6 +146,7 @@ where Pipeline { artifact: Default::default(), output_dir: None, + _tmp_dir: None, log_level: Level::Info, name: None, force_overwrite: false, @@ -190,12 +197,14 @@ where /// let proof = pipeline.compute_proof().unwrap(); /// ``` impl Pipeline { - /// Initializes the output directory to a temporary directory. - /// Note that the user is responsible for keeping the temporary directory alive. - pub fn with_tmp_output(self, tmp_dir: &mktemp::Temp) -> Self { + /// Initializes the output directory to a temporary directory which lives as long + /// the pipeline does. + pub fn with_tmp_output(self) -> Self { + let tmp_dir = Rc::new(mktemp::Temp::new_dir().unwrap()); Pipeline { output_dir: Some(tmp_dir.to_path_buf()), force_overwrite: true, + _tmp_dir: Some(tmp_dir), ..self } } @@ -795,8 +804,11 @@ impl Pipeline { let start = Instant::now(); let fixed_cols = constant_evaluator::generate(&pil); + self.log(&format!( + "Fixed column generation took {}s", + start.elapsed().as_secs_f32() + )); self.maybe_write_constants(&fixed_cols)?; - self.log(&format!("Took {}", start.elapsed().as_secs_f32())); self.artifact.fixed_cols = Some(Arc::new(fixed_cols)); @@ -830,7 +842,10 @@ impl Pipeline { .with_external_witness_values(&external_witness_values) .generate(); - self.log(&format!("Took {}", start.elapsed().as_secs_f32())); + self.log(&format!( + "Witness generation took {}s", + start.elapsed().as_secs_f32() + )); self.maybe_write_witness(&fixed_cols, &witness)?; @@ -914,6 +929,7 @@ impl Pipeline { .as_ref() .map(|path| fs::read(path).unwrap()); + let start = Instant::now(); let proof = match backend.prove(&witness, existing_proof, witgen_callback) { Ok(proof) => proof, Err(powdr_backend::Error::BackendError(e)) => { @@ -921,6 +937,11 @@ impl Pipeline { } Err(e) => panic!("{}", e), }; + self.log(&format!( + "Proof generation took {}s", + start.elapsed().as_secs_f32() + )); + self.log(&format!("Proof size: {} bytes", proof.len())); drop(backend); @@ -1096,8 +1117,15 @@ impl Pipeline { ) .unwrap(); + let start = Instant::now(); match backend.verify(proof, instances) { - Ok(_) => Ok(()), + Ok(_) => { + self.log(&format!( + "Verification took {}s", + start.elapsed().as_secs_f32() + )); + Ok(()) + } Err(powdr_backend::Error::BackendError(e)) => Err(vec![e]), _ => panic!(), } diff --git a/pipeline/src/test_util.rs b/pipeline/src/test_util.rs index a9a7c9dea..5116dade5 100644 --- a/pipeline/src/test_util.rs +++ b/pipeline/src/test_util.rs @@ -74,9 +74,8 @@ pub fn verify_pipeline( // TODO: Also test Composite variants let mut pipeline = pipeline.with_backend(backend, None); - let tmp_dir = mktemp::Temp::new_dir().unwrap(); if pipeline.output_dir().is_none() { - pipeline = pipeline.with_tmp_output(&tmp_dir); + pipeline = pipeline.with_tmp_output(); } pipeline.compute_proof().unwrap(); @@ -84,27 +83,41 @@ pub fn verify_pipeline( verify(pipeline.output_dir().as_ref().unwrap()) } -pub fn gen_estark_proof(file_name: &str, inputs: Vec) { - let tmp_dir = mktemp::Temp::new_dir().unwrap(); +/// Makes a new pipeline for the given file and inputs. All steps until witness generation are +/// already computed, so that the test can branch off from there, without having to re-compute +/// these steps. +pub fn make_prepared_pipeline(file_name: &str, inputs: Vec) -> Pipeline { let mut pipeline = Pipeline::default() - .with_tmp_output(&tmp_dir) + .with_tmp_output() .from_file(resolve_test_file(file_name)) - .with_prover_inputs(inputs) - .with_backend(powdr_backend::BackendType::EStarkStarky, None); + .with_prover_inputs(inputs); + pipeline.compute_witness().unwrap(); + pipeline +} - pipeline.clone().compute_proof().unwrap(); +pub fn gen_estark_proof(file_name: &str, inputs: Vec) { + let pipeline = make_prepared_pipeline(file_name, inputs); + gen_estark_proof_with_backend_variant(pipeline.clone(), BackendVariant::Monolithic); + gen_estark_proof_with_backend_variant(pipeline, BackendVariant::Composite); +} - // Also test composite backend: - pipeline - .clone() - .with_backend(powdr_backend::BackendType::EStarkStarkyComposite, None) - .compute_proof() - .unwrap(); +pub fn gen_estark_proof_with_backend_variant( + pipeline: Pipeline, + backend_variant: BackendVariant, +) { + let backend = match backend_variant { + BackendVariant::Monolithic => BackendType::EStarkStarky, + BackendVariant::Composite => BackendType::EStarkStarkyComposite, + }; + let mut pipeline = pipeline.with_backend(backend, None); + + pipeline.clone().compute_proof().unwrap(); // Repeat the proof generation, but with an externally generated verification key // Verification Key - let vkey_file_path = tmp_dir.as_path().join("verification_key.bin"); + let output_dir = pipeline.output_dir().as_ref().unwrap(); + let vkey_file_path = output_dir.join("verification_key.bin"); buffered_write_file(&vkey_file_path, |writer| { pipeline.export_verification_key(writer).unwrap() }) @@ -126,23 +139,34 @@ pub fn gen_estark_proof(file_name: &str, inputs: Vec) { pipeline.verify(&proof, &[publics]).unwrap(); } -#[cfg(feature = "halo2")] pub fn test_halo2(file_name: &str, inputs: Vec) { + let pipeline = make_prepared_pipeline(file_name, inputs); + test_halo2_with_backend_variant(pipeline.clone(), BackendVariant::Monolithic); + test_halo2_with_backend_variant(pipeline, BackendVariant::Composite); +} + +/// Whether to compute a monolithic or composite proof. +pub enum BackendVariant { + Monolithic, + Composite, +} + +#[cfg(feature = "halo2")] +pub fn test_halo2_with_backend_variant( + pipeline: Pipeline, + backend_variant: BackendVariant, +) { use std::env; - // Generate a mock proof (fast and has good error messages) - Pipeline::default() - .from_file(resolve_test_file(file_name)) - .with_prover_inputs(inputs.clone()) - .with_backend(powdr_backend::BackendType::Halo2Mock, None) - .compute_proof() - .unwrap(); + let backend = match backend_variant { + BackendVariant::Monolithic => BackendType::Halo2Mock, + BackendVariant::Composite => BackendType::Halo2MockComposite, + }; - // Also generate a proof with the composite backend - Pipeline::default() - .from_file(resolve_test_file(file_name)) - .with_prover_inputs(inputs.clone()) - .with_backend(powdr_backend::BackendType::Halo2MockComposite, None) + // Generate a mock proof (fast and has good error messages) + pipeline + .clone() + .with_backend(backend, None) .compute_proof() .unwrap(); @@ -152,22 +176,25 @@ pub fn test_halo2(file_name: &str, inputs: Vec) { .map(|v| v == "true") .unwrap_or(false); if is_nightly_test { - gen_halo2_proof(file_name, inputs.clone()); - gen_halo2_composite_proof(file_name, inputs); + gen_halo2_proof(pipeline, backend_variant); } } #[cfg(not(feature = "halo2"))] -pub fn test_halo2(_file_name: &str, _inputs: Vec) {} +pub fn test_halo2_with_backend_variant( + _pipeline: Pipeline, + backend_variant: BackendVariant, +) { +} #[cfg(feature = "halo2")] -pub fn gen_halo2_proof(file_name: &str, inputs: Vec) { - let tmp_dir = mktemp::Temp::new_dir().unwrap(); - let mut pipeline = Pipeline::default() - .with_tmp_output(&tmp_dir) - .from_file(resolve_test_file(file_name)) - .with_prover_inputs(inputs) - .with_backend(powdr_backend::BackendType::Halo2, None); +pub fn gen_halo2_proof(pipeline: Pipeline, backend: BackendVariant) { + let backend = match backend { + BackendVariant::Monolithic => BackendType::Halo2, + BackendVariant::Composite => BackendType::Halo2Composite, + }; + + let mut pipeline = pipeline.clone().with_backend(backend, None); // Generate a proof with the setup and verification key generated on the fly pipeline.clone().compute_proof().unwrap(); @@ -176,7 +203,8 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec) { let pil = pipeline.compute_optimized_pil().unwrap(); // Setup - let setup_file_path = tmp_dir.as_path().join("params.bin"); + let output_dir = pipeline.output_dir().clone().unwrap(); + let setup_file_path = output_dir.join("params.bin"); buffered_write_file(&setup_file_path, |writer| { powdr_backend::BackendType::Halo2 .factory::() @@ -187,7 +215,7 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec) { let mut pipeline = pipeline.with_setup_file(Some(setup_file_path)); // Verification Key - let vkey_file_path = tmp_dir.as_path().join("verification_key.bin"); + let vkey_file_path = output_dir.join("verification_key.bin"); buffered_write_file(&vkey_file_path, |writer| { pipeline.export_verification_key(writer).unwrap() }) @@ -210,28 +238,12 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec) { } #[cfg(not(feature = "halo2"))] -pub fn gen_halo2_proof(_file_name: &str, _inputs: Vec) {} - -#[cfg(feature = "halo2")] -pub fn gen_halo2_composite_proof(file_name: &str, inputs: Vec) { - let tmp_dir = mktemp::Temp::new_dir().unwrap(); - Pipeline::default() - .with_tmp_output(&tmp_dir) - .from_file(resolve_test_file(file_name)) - .with_prover_inputs(inputs) - .with_backend(powdr_backend::BackendType::Halo2Composite, None) - .compute_proof() - .unwrap(); -} - -#[cfg(not(feature = "halo2"))] -pub fn gen_halo2_composite_proof(_file_name: &str, _inputs: Vec) {} +pub fn gen_halo2_proof(_pipeline: Pipeline, _backend: BackendVariant) {} #[cfg(feature = "plonky3")] pub fn test_plonky3(file_name: &str, inputs: Vec) { - let tmp_dir = mktemp::Temp::new_dir().unwrap(); let mut pipeline = Pipeline::default() - .with_tmp_output(&tmp_dir) + .with_tmp_output() .from_file(resolve_test_file(file_name)) .with_prover_inputs(inputs) .with_backend(powdr_backend::BackendType::Plonky3, None); @@ -251,7 +263,8 @@ pub fn test_plonky3(file_name: &str, inputs: Vec) { if pipeline.optimized_pil().unwrap().constant_count() > 0 { // Export verification Key - let vkey_file_path = tmp_dir.as_path().join("verification_key.bin"); + let output_dir = pipeline.output_dir().as_ref().unwrap(); + let vkey_file_path = output_dir.join("verification_key.bin"); buffered_write_file(&vkey_file_path, |writer| { pipeline.export_verification_key(writer).unwrap() }) @@ -318,9 +331,8 @@ pub fn assert_proofs_fail_for_invalid_witnesses_pilcom( file_name: &str, witness: &[(String, Vec)], ) { - let tmp_dir = mktemp::Temp::new_dir().unwrap(); let pipeline = Pipeline::::default() - .with_tmp_output(&tmp_dir) + .with_tmp_output() .from_file(resolve_test_file(file_name)) .set_witness(convert_witness(witness)); diff --git a/pipeline/tests/asm.rs b/pipeline/tests/asm.rs index dbb9c434e..377e49234 100644 --- a/pipeline/tests/asm.rs +++ b/pipeline/tests/asm.rs @@ -2,8 +2,9 @@ use powdr_backend::BackendType; use powdr_number::{Bn254Field, FieldElement, GoldilocksField}; use powdr_pipeline::{ test_util::{ - asm_string_to_pil, gen_estark_proof, gen_halo2_composite_proof, resolve_test_file, - test_halo2, verify_test_file, + asm_string_to_pil, gen_estark_proof, gen_estark_proof_with_backend_variant, + make_prepared_pipeline, resolve_test_file, test_halo2, test_halo2_with_backend_variant, + verify_test_file, BackendVariant, }, util::{read_poly_set, FixedPolySet, WitnessPolySet}, Pipeline, @@ -227,7 +228,11 @@ fn vm_to_block_different_length() { let f = "asm/vm_to_block_different_length.asm"; // Because machines have different lengths, this can only be proven // with a composite proof. - gen_halo2_composite_proof(f, vec![]); + test_halo2_with_backend_variant(make_prepared_pipeline(f, vec![]), BackendVariant::Composite); + gen_estark_proof_with_backend_variant( + make_prepared_pipeline(f, vec![]), + BackendVariant::Composite, + ); } #[test] diff --git a/pipeline/tests/pil.rs b/pipeline/tests/pil.rs index e9d8d9bbe..073739705 100644 --- a/pipeline/tests/pil.rs +++ b/pipeline/tests/pil.rs @@ -4,8 +4,9 @@ use powdr_number::GoldilocksField; use powdr_pipeline::test_util::{ assert_proofs_fail_for_invalid_witnesses, assert_proofs_fail_for_invalid_witnesses_estark, assert_proofs_fail_for_invalid_witnesses_halo2, - assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof, gen_halo2_composite_proof, - test_halo2, test_plonky3, verify_test_file, + assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof, + gen_estark_proof_with_backend_variant, make_prepared_pipeline, test_halo2, + test_halo2_with_backend_variant, test_plonky3, verify_test_file, BackendVariant, }; use test_log::test; @@ -312,7 +313,11 @@ fn different_degrees() { let f = "pil/different_degrees.pil"; // Because machines have different lengths, this can only be proven // with a composite proof. - gen_halo2_composite_proof(f, vec![]); + test_halo2_with_backend_variant(make_prepared_pipeline(f, vec![]), BackendVariant::Composite); + gen_estark_proof_with_backend_variant( + make_prepared_pipeline(f, vec![]), + BackendVariant::Composite, + ); } #[test] diff --git a/pipeline/tests/powdr_std.rs b/pipeline/tests/powdr_std.rs index a227dd015..f241f5053 100644 --- a/pipeline/tests/powdr_std.rs +++ b/pipeline/tests/powdr_std.rs @@ -6,8 +6,8 @@ use powdr_pil_analyzer::evaluator::Value; use powdr_pipeline::{ test_util::{ evaluate_function, evaluate_integer_function, execute_test_file, gen_estark_proof, - gen_halo2_composite_proof, gen_halo2_proof, resolve_test_file, std_analyzed, test_halo2, - verify_test_file, + gen_halo2_proof, make_prepared_pipeline, resolve_test_file, std_analyzed, test_halo2, + verify_test_file, BackendVariant, }, Pipeline, }; @@ -21,8 +21,11 @@ fn poseidon_bn254_test() { // `test_halo2` only does a mock proof in the PR tests. // This makes sure we test the whole proof generation for one example // file even in the PR tests. - gen_halo2_proof(f, Default::default()); - gen_halo2_composite_proof(f, Default::default()); + gen_halo2_proof( + make_prepared_pipeline(f, vec![]), + BackendVariant::Monolithic, + ); + gen_halo2_proof(make_prepared_pipeline(f, vec![]), BackendVariant::Composite); } #[test] diff --git a/std/math/fp2.asm b/std/math/fp2.asm index 4c1029fc7..c12d37b7f 100644 --- a/std/math/fp2.asm +++ b/std/math/fp2.asm @@ -1,7 +1,13 @@ +use std::array::len; +use std::array::fold; +use std::check::assert; +use std::check::panic; use std::convert::fe; use std::convert::int; use std::convert::expr; use std::field::modulus; +use std::field::known_field; +use std::field::KnownField; use std::prover::eval; /// An element of the extension field over the implied base field (which has to be either @@ -90,6 +96,23 @@ let unpack_ext: Fp2 -> (T, T) = |a| match a { Fp2::Fp2(a0, a1) => (a0, a1) }; +/// Whether we need to operate on the F_{p^2} extension field (because the current field is too small). +let needs_extension: -> bool = || match known_field() { + Option::Some(KnownField::Goldilocks) => true, + Option::Some(KnownField::BN254) => false, + None => panic("The permutation/lookup argument is not implemented for the current field!") +}; + +/// Matches whether the length of a given array is correct to operate on the extension field +let is_extension = |arr| match len(arr) { + 1 => false, + 2 => true, + _ => panic("Expected 1 or 2 accumulator columns!") +}; + +/// Constructs an extension field element `a0 + a1 * X` from either `[a0, a1]` or `[a0]` (setting `a1`to zero in that case) +let fp2_from_array = |arr| if is_extension(arr) { Fp2::Fp2(arr[0], arr[1]) } else { from_base(arr[0]) }; + mod test { use super::Fp2; use super::from_base; diff --git a/std/protocols/fingerprint.asm b/std/protocols/fingerprint.asm new file mode 100644 index 000000000..cff54175e --- /dev/null +++ b/std/protocols/fingerprint.asm @@ -0,0 +1,12 @@ +use std::array::fold; +use std::math::fp2::Fp2; +use std::math::fp2::add_ext; +use std::math::fp2::mul_ext; +use std::math::fp2::from_base; + +/// Maps [x_1, x_2, ..., x_n] to its Read-Solomon fingerprint, using a challenge alpha: $\sum_{i=1}^n alpha**{(n - i)} * x_i$ +let fingerprint: T[], Fp2 -> Fp2 = |expr_array, alpha| fold( + expr_array, + from_base(0), + |sum_acc, el| add_ext(mul_ext(alpha, sum_acc), from_base(el)) +); \ No newline at end of file diff --git a/std/protocols/lookup.asm b/std/protocols/lookup.asm index 069f03736..354cca2a6 100644 --- a/std/protocols/lookup.asm +++ b/std/protocols/lookup.asm @@ -1,11 +1,8 @@ use std::array::fold; -use std::utils::unwrap_or_else; use std::array::len; use std::array::map; use std::check::assert; use std::check::panic; -use std::field::known_field; -use std::field::KnownField; use std::math::fp2::Fp2; use std::math::fp2::add_ext; use std::math::fp2::sub_ext; @@ -15,7 +12,12 @@ use std::math::fp2::next_ext; use std::math::fp2::inv_ext; use std::math::fp2::eval_ext; use std::math::fp2::from_base; +use std::math::fp2::is_extension; +use std::math::fp2::fp2_from_array; +use std::math::fp2::needs_extension; use std::math::fp2::constrain_eq_ext; +use std::protocols::fingerprint::fingerprint; +use std::utils::unwrap_or_else; let unpack_lookup_constraint: Constr -> (expr, expr[], expr, expr[]) = |lookup_constraint| match lookup_constraint { Constr::Lookup((lhs_selector, rhs_selector), values) => ( @@ -27,27 +29,12 @@ let unpack_lookup_constraint: Constr -> (expr, expr[], expr, expr[]) = |lookup_c _ => panic("Expected lookup constraint") }; -/// Whether we need to operate on the F_{p^2} extension field (because the current field is too small). -let needs_extension: -> bool = || match known_field() { - Option::Some(KnownField::Goldilocks) => true, - Option::Some(KnownField::BN254) => false, - None => panic("The lookup argument is not implemented for the current field!") -}; - -//* Generic for both permutation and lookup arguments -/// Maps [x_1, x_2, ..., x_n] to alpha**(n - 1) * x_1 + alpha ** (n - 2) * x_2 + ... + x_n -let compress_expression_array: T[], Fp2 -> Fp2 = |expr_array, alpha| fold( - expr_array, - from_base(0), - |sum_acc, el| add_ext(mul_ext(alpha, sum_acc), from_base(el)) -); - // Compute z' = z + 1/(beta-a_i) * lhs_selector - m_i/(beta-b_i) * rhs_selector, using extension field arithmetic let compute_next_z: Fp2, Fp2, Fp2, Constr, expr -> fe[] = query |acc, alpha, beta, lookup_constraint, multiplicities| { let (lhs_selector, lhs, rhs_selector, rhs) = unpack_lookup_constraint(lookup_constraint); - let lhs_denom = sub_ext(beta, compress_expression_array(lhs, alpha)); - let rhs_denom = sub_ext(beta, compress_expression_array(rhs, alpha)); + let lhs_denom = sub_ext(beta, fingerprint(lhs, alpha)); + let rhs_denom = sub_ext(beta, fingerprint(rhs, alpha)); let m_ext = from_base(multiplicities); // acc' = acc + 1/(beta-a_i) * lhs_selector - m_i/(beta-b_i) * rhs_selector @@ -81,28 +68,20 @@ let lookup: expr, expr[], Fp2, Fp2, Constr, expr -> Constr[] = |is_f let (lhs_selector, lhs, rhs_selector, rhs) = unpack_lookup_constraint(lookup_constraint); let _ = assert(len(lhs) == len(rhs), || "LHS and RHS should have equal length"); - - let with_extension = match len(acc) { - 1 => false, - 2 => true, - _ => panic("Expected 1 or 2 accumulator columns!") - }; - - let _ = if !with_extension { + let _ = if !is_extension(acc) { assert(!needs_extension(), || "The Goldilocks field is too small and needs to move to the extension field. Pass two accumulators instead!") - } else { () }; + } else { }; // On the extension field, we'll need two field elements to represent the challenge. // If we don't need an extension field, we can simply set the second component to 0, // in which case the operations below effectively only operate on the first component. - let fp2_from_array = |arr| if with_extension { Fp2::Fp2(arr[0], arr[1]) } else { from_base(arr[0]) }; let acc_ext = fp2_from_array(acc); - let lhs_denom = sub_ext(beta, compress_expression_array(lhs, alpha)); - let rhs_denom = sub_ext(beta, compress_expression_array(rhs, alpha)); + let lhs_denom = sub_ext(beta, fingerprint(lhs, alpha)); + let rhs_denom = sub_ext(beta, fingerprint(rhs, alpha)); let m_ext = from_base(multiplicities); - let next_acc = if with_extension { + let next_acc = if is_extension(acc) { next_ext(acc_ext) } else { // The second component is 0, but the next operator is not defined on it... diff --git a/std/protocols/mod.asm b/std/protocols/mod.asm index 1ef638a24..90d583ae0 100644 --- a/std/protocols/mod.asm +++ b/std/protocols/mod.asm @@ -1,2 +1,3 @@ -mod permutation; -mod lookup; \ No newline at end of file +mod fingerprint; +mod lookup; +mod permutation; \ No newline at end of file diff --git a/std/protocols/permutation.asm b/std/protocols/permutation.asm index 86159d70c..274469b58 100644 --- a/std/protocols/permutation.asm +++ b/std/protocols/permutation.asm @@ -1,11 +1,7 @@ -use std::array::fold; use std::array::map; -use std::utils::unwrap_or_else; use std::array::len; use std::check::assert; use std::check::panic; -use std::field::known_field; -use std::field::KnownField; use std::math::fp2::Fp2; use std::math::fp2::add_ext; use std::math::fp2::sub_ext; @@ -15,7 +11,12 @@ use std::math::fp2::next_ext; use std::math::fp2::inv_ext; use std::math::fp2::eval_ext; use std::math::fp2::from_base; +use std::math::fp2::needs_extension; +use std::math::fp2::is_extension; +use std::math::fp2::fp2_from_array; use std::math::fp2::constrain_eq_ext; +use std::protocols::fingerprint::fingerprint; +use std::utils::unwrap_or_else; let unpack_permutation_constraint: Constr -> (expr, expr[], expr, expr[]) = |permutation_constraint| match permutation_constraint { Constr::Permutation((lhs_selector, rhs_selector), values) => ( @@ -27,20 +28,6 @@ let unpack_permutation_constraint: Constr -> (expr, expr[], expr, expr[]) = |per _ => panic("Expected permutation constraint") }; -/// Whether we need to operate on the F_{p^2} extension field (because the current field is too small). -let needs_extension: -> bool = || match known_field() { - Option::Some(KnownField::Goldilocks) => true, - Option::Some(KnownField::BN254) => false, - None => panic("The permutation argument is not implemented for the current field!") -}; - -/// Maps [x_1, x_2, ..., x_n] to its Read-Solomon fingerprint, using challenge alpha: $\sum_{i=1}^n alpha**{(n - i)} * x_i$ -let compress_expression_array: T[], Fp2 -> Fp2 = |expr_array, alpha| fold( - expr_array, - from_base(0), - |sum_acc, el| add_ext(mul_ext(alpha, sum_acc), from_base(el)) -); - /// Takes a boolean selector (0/1) and a value, returns equivalent of `if selector { value } else { 1 }` /// Implemented as: selector * (value - 1) + 1 let selected_or_one: T, Fp2 -> Fp2 = |selector, value| add_ext(mul_ext(from_base(selector), sub_ext(value, from_base(1))), from_base(1)); @@ -54,8 +41,8 @@ let compute_next_z: Fp2, Fp2, Fp2, Constr -> fe[] = query |acc let (lhs_selector, lhs, rhs_selector, rhs) = unpack_permutation_constraint(permutation_constraint); - let lhs_folded = selected_or_one(lhs_selector, sub_ext(beta, compress_expression_array(lhs, alpha))); - let rhs_folded = selected_or_one(rhs_selector, sub_ext(beta, compress_expression_array(rhs, alpha))); + let lhs_folded = selected_or_one(lhs_selector, sub_ext(beta, fingerprint(lhs, alpha))); + let rhs_folded = selected_or_one(rhs_selector, sub_ext(beta, fingerprint(rhs, alpha))); // acc' = acc * lhs_folded / rhs_folded let res = mul_ext( @@ -84,7 +71,7 @@ let compute_next_z: Fp2, Fp2, Fp2, Constr -> fe[] = query |acc /// page 99, paragraph "Multiset equality checking (a.k.a. permutation checking) /// via fingerprinting". In short: /// 1. The LHS and RHS are Reed-Solomon fingerprinted using challenge $\alpha$ -/// (see `compress_expression_array`). +/// (see `std::fingerprint::fingerprint`). /// 2. If the selector is one, the accumulator is updated as: /// `acc' = acc * (beta - lhs) / (beta - rhs)`. /// This iteratively evaluates the fraction of polynomials $\prod_i (X - lhs_i)$ @@ -98,29 +85,23 @@ let permutation: expr, expr[], Fp2, Fp2, Constr -> Constr[] = |is_fi let (lhs_selector, lhs, rhs_selector, rhs) = unpack_permutation_constraint(permutation_constraint); let _ = assert(len(lhs) == len(rhs), || "LHS and RHS should have equal length"); - let with_extension = match len(acc) { - 1 => false, - 2 => true, - _ => panic("Expected 1 or 2 accumulator columns!") - }; - - let _ = if !with_extension { + let _ = if !is_extension(acc) { assert(!needs_extension(), || "The Goldilocks field is too small and needs to move to the extension field. Pass two accumulators instead!") - } else { () }; - + } else { }; + // On the extension field, we'll need two field elements to represent the challenge. // If we don't need an extension field, we can simply set the second component to 0, // in which case the operations below effectively only operate on the first component. - let fp2_from_array = |arr| if with_extension { Fp2::Fp2(arr[0], arr[1]) } else { from_base(arr[0]) }; + let fp2_from_array = |arr| if is_extension(acc) { Fp2::Fp2(arr[0], arr[1]) } else { from_base(arr[0]) }; let acc_ext = fp2_from_array(acc); - // If the selector is 1, contribute a factor of `beta - compress_expression_array(lhs)` to accumulator. + // If the selector is 1, contribute a factor of `beta - fingerprint(lhs)` to accumulator. // If the selector is 0, contribute a factor of 1 to the accumulator. - // Implemented as: folded = selector * (beta - compress_expression_array(values) - 1) + 1; - let lhs_folded = selected_or_one(lhs_selector, sub_ext(beta, compress_expression_array(lhs, alpha))); - let rhs_folded = selected_or_one(rhs_selector, sub_ext(beta, compress_expression_array(rhs, alpha))); + // Implemented as: folded = selector * (beta - fingerprint(values) - 1) + 1; + let lhs_folded = selected_or_one(lhs_selector, sub_ext(beta, fingerprint(lhs, alpha))); + let rhs_folded = selected_or_one(rhs_selector, sub_ext(beta, fingerprint(rhs, alpha))); - let next_acc = if with_extension { + let next_acc = if is_extension(acc) { next_ext(acc_ext) } else { // The second component is 0, but the next operator is not defined on it... @@ -130,7 +111,7 @@ let permutation: expr, expr[], Fp2, Fp2, Constr -> Constr[] = |is_fi // Update rule: // acc' = acc * lhs_folded / rhs_folded // => rhs_folded * acc' - lhs_folded * acc = 0 - let diff_from_expected = sub_ext( + let update_expr = sub_ext( mul_ext(rhs_folded, next_acc), mul_ext(lhs_folded, acc_ext) ); @@ -145,5 +126,5 @@ let permutation: expr, expr[], Fp2, Fp2, Constr -> Constr[] = |is_fi // Note that if with_extension is false, this generates 0 = 0 and is removed // by the optimizer. is_first * acc_2 = 0 - ] + constrain_eq_ext(diff_from_expected, from_base(0)) + ] + constrain_eq_ext(update_expr, from_base(0)) }; \ No newline at end of file diff --git a/std/utils.asm b/std/utils.asm index 4ad6fde5c..e53f5314d 100644 --- a/std/utils.asm +++ b/std/utils.asm @@ -1,4 +1,3 @@ - /// Evaluates to folder(...folder(folder(initial, f(0)), f(1)) ..., f(length - 1)), /// i.e. calls f(0), f(1), ..., f(length - 1) and combines the results /// using the function `folder`, starting with the value `initial`. diff --git a/std/well_known.asm b/std/well_known.asm index 3ce7ac0c3..99e74bd0a 100644 --- a/std/well_known.asm +++ b/std/well_known.asm @@ -1 +1,3 @@ +/// Evaluates to 1 on the first row and 0 on all other rows. +/// Useful to define a fixed column of that property. let is_first: int -> int = |i| if i == 0 { 1 } else { 0 }; \ No newline at end of file