From 3c681a3f61999db6fb52c152bb15c7dfcf24f74d Mon Sep 17 00:00:00 2001 From: Pablo Deymonnaz Date: Thu, 2 Mar 2023 13:21:30 -0300 Subject: [PATCH] Fibonacci Stark Prover (#59) * prover sub crate created * working on fold function * merge * test working * fold test completed * next_fri_layer function * Dependencies removed * using iterator step_by * fmt * reordering fri functions * fri_decommit init * evaluate_vec in polynomial and reference in evaluate * using evaluate_vec * evaluate_vec changed to evaluate_slice * evaluate_slice changed * fri_commitment * fri continuation * comment moved * fri_decommit_layers * comments added * polynomial.rs merge confilct * adapting to the new code * conflicts solved * append in transcript * insert last_evaluation in transcript * beta from transcript.challenge() * test: generating subgroups * prover sub crate created * Save work in progress * Add first iteration of function to get composition polynomials from trace and air * Add test for get_composition_poly * Add get_coefficients function * Tidy up code * Add docs * Fix tests * Add u128_prime field and make get_composition_poly return a Polynomial data structure * Fixes from rebasing * Apply clippy suggestions * Make functions pub crate * Tidy up code * Tidy up code * Minor fixes * Use U384 instead of U128 * Tidy up code and remove unnecessary u128 field element module * generate_vec_roots * generate_vec_roots in lib * Return trace polynomial from get_composition_poly * coset_factor * Add coset evaluation and fri commitment steps * Add result to get_cp_and_tp * Change error description and module name * Add decommitment step * Start filling the stark proof struct * Small comments * Add first verifier step * Switch to hardcoded fibonacci trace * Start FRI verification step * More progress * Improve code, change field to 17 for testing purposes * Fix FRI operation * Go back to fibonacci example with test passing * Refactor functions that use fiat shamir to take in a transcript * Add TODO * Add comments * Moved field definition to lib, removed duplicated definitions * Renamed types * Simplified operations * Refactor roots of unity generator * Small refactor * Refactor roots of unity generator * Update comment * Extracted FRI * Refactor verify * Refactor clippy * Re ordered prover * cargo fmt * fix roots of unity * Remove air * Prover -> Stark * Move folders * Uncomment tests, remove unused code * Fix fri_functions tests * Remove fri_merkle_tree module, move to mod.rs * Clippy * Remove TODOs --------- Co-authored-by: Pablo Deymonnaz Co-authored-by: Mariano Nicolini Co-authored-by: Javier Chatruc Co-authored-by: MauroFab --- Cargo.toml | 1 + crypto/src/fiat_shamir/mod.rs | 2 +- crypto/src/fiat_shamir/transcript.rs | 10 +- crypto/src/merkle_tree/mod.rs | 2 +- crypto/src/merkle_tree/proof.rs | 17 + math/src/field/element.rs | 5 + math/src/field/extensions/cubic.rs | 4 + math/src/field/extensions/quadratic.rs | 4 + math/src/field/fields/u384_prime_field.rs | 5 + math/src/field/fields/u64_prime_field.rs | 4 + math/src/field/test_fields/u64_test_field.rs | 4 + math/src/field/traits.rs | 3 + math/src/polynomial.rs | 53 ++- math/src/unsigned_integer/element.rs | 1 + proving-system/stark/Cargo.toml | 12 + .../stark/src/fri/fri_commitment.rs | 10 + proving-system/stark/src/fri/fri_decommit.rs | 94 ++++++ proving-system/stark/src/fri/fri_functions.rs | 164 +++++++++ proving-system/stark/src/fri/mod.rs | 114 +++++++ proving-system/stark/src/lib.rs | 318 ++++++++++++++++++ 20 files changed, 819 insertions(+), 8 deletions(-) create mode 100644 proving-system/stark/Cargo.toml create mode 100644 proving-system/stark/src/fri/fri_commitment.rs create mode 100644 proving-system/stark/src/fri/fri_decommit.rs create mode 100644 proving-system/stark/src/fri/fri_functions.rs create mode 100644 proving-system/stark/src/fri/mod.rs create mode 100644 proving-system/stark/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 7662aa11d..2946f434e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,4 +3,5 @@ members = [ "math", "crypto", + "proving-system/stark", ] diff --git a/crypto/src/fiat_shamir/mod.rs b/crypto/src/fiat_shamir/mod.rs index e24a46384..ece16b1ef 100644 --- a/crypto/src/fiat_shamir/mod.rs +++ b/crypto/src/fiat_shamir/mod.rs @@ -1 +1 @@ -mod transcript; +pub mod transcript; diff --git a/crypto/src/fiat_shamir/transcript.rs b/crypto/src/fiat_shamir/transcript.rs index 7d1d38d62..77cd620e0 100644 --- a/crypto/src/fiat_shamir/transcript.rs +++ b/crypto/src/fiat_shamir/transcript.rs @@ -1,24 +1,24 @@ use sha3::{Digest, Sha3_256}; -struct Transcript { +pub struct Transcript { hasher: Sha3_256, } impl Transcript { - #[allow(unused)] - fn new() -> Self { + #[allow(clippy::new_without_default)] + pub fn new() -> Self { Self { hasher: Sha3_256::new(), } } #[allow(unused)] - fn append(&mut self, new_data: &[u8]) { + pub fn append(&mut self, new_data: &[u8]) { self.hasher.update(&mut new_data.to_owned()); } #[allow(unused)] - fn challenge(&mut self) -> [u8; 32] { + pub fn challenge(&mut self) -> [u8; 32] { let mut result_hash = [0_u8; 32]; result_hash.copy_from_slice(&self.hasher.finalize_reset()); self.hasher.update(result_hash); diff --git a/crypto/src/merkle_tree/mod.rs b/crypto/src/merkle_tree/mod.rs index 1e52facc5..00e909e8e 100644 --- a/crypto/src/merkle_tree/mod.rs +++ b/crypto/src/merkle_tree/mod.rs @@ -8,7 +8,7 @@ use crate::hash::traits::IsCryptoHash; use self::{merkle::MerkleTree, proof::Proof}; pub mod merkle; -mod proof; +pub mod proof; mod utils; pub type U64F = U64PrimeField<0xFFFF_FFFF_0000_0001_u64>; diff --git a/crypto/src/merkle_tree/proof.rs b/crypto/src/merkle_tree/proof.rs index ff473db67..6c7d32b50 100644 --- a/crypto/src/merkle_tree/proof.rs +++ b/crypto/src/merkle_tree/proof.rs @@ -5,12 +5,29 @@ use lambdaworks_math::{ traits::ByteConversion, }; +#[derive(Debug, Clone)] pub struct Proof> { pub value: FieldElement, pub merkle_path: Vec<(FieldElement, bool)>, pub hasher: H, } +impl> Proof { + pub fn verify(&self, root_hash: FieldElement) -> bool { + let mut hashed_value = self.hasher.hash_one(self.value.clone()); + + for (sibling_node, is_left) in self.merkle_path.iter().rev() { + if *is_left { + hashed_value = self.hasher.hash_two(hashed_value, sibling_node.clone()); + } else { + hashed_value = self.hasher.hash_two(sibling_node.clone(), hashed_value); + } + } + + root_hash == hashed_value + } +} + impl ByteConversion for Proof where F: IsField, diff --git a/math/src/field/element.rs b/math/src/field/element.rs index 6f4cf8c76..e256f1662 100644 --- a/math/src/field/element.rs +++ b/math/src/field/element.rs @@ -300,6 +300,11 @@ where &self.value } + // Returns the representative of the value stored + pub fn representative(&self) -> F::BaseType { + F::representative(self.value.clone()) + } + /// Returns the multiplicative inverse of `self` pub fn inv(&self) -> Self { Self { diff --git a/math/src/field/extensions/cubic.rs b/math/src/field/extensions/cubic.rs index 808123e6c..deac3df85 100644 --- a/math/src/field/extensions/cubic.rs +++ b/math/src/field/extensions/cubic.rs @@ -127,6 +127,10 @@ where fn from_base_type(x: [FieldElement; 3]) -> [FieldElement; 3] { x } + + fn representative(_x: Self::BaseType) -> Self::BaseType { + todo!() + } } #[cfg(test)] diff --git a/math/src/field/extensions/quadratic.rs b/math/src/field/extensions/quadratic.rs index ca9f4cbe9..571c71e3c 100644 --- a/math/src/field/extensions/quadratic.rs +++ b/math/src/field/extensions/quadratic.rs @@ -105,6 +105,10 @@ where fn from_base_type(x: [FieldElement; 2]) -> [FieldElement; 2] { x } + + fn representative(_x: Self::BaseType) -> Self::BaseType { + todo!() + } } #[cfg(test)] diff --git a/math/src/field/fields/u384_prime_field.rs b/math/src/field/fields/u384_prime_field.rs index 472b0d57b..a062b39c0 100644 --- a/math/src/field/fields/u384_prime_field.rs +++ b/math/src/field/fields/u384_prime_field.rs @@ -151,6 +151,11 @@ where fn from_base_type(x: Self::BaseType) -> Self::BaseType { MontgomeryAlgorithms::cios(&x, &C::R2, &C::MODULUS, &C::MU) } + + // TO DO: Add tests for representatives + fn representative(x: Self::BaseType) -> Self::BaseType { + MontgomeryAlgorithms::cios(&x, &U384::from_u64(1), &C::MODULUS, &C::MU) + } } impl ByteConversion for FieldElement> diff --git a/math/src/field/fields/u64_prime_field.rs b/math/src/field/fields/u64_prime_field.rs index fcbe116b3..4767c6e59 100644 --- a/math/src/field/fields/u64_prime_field.rs +++ b/math/src/field/fields/u64_prime_field.rs @@ -56,6 +56,10 @@ impl IsField for U64PrimeField { fn from_base_type(x: u64) -> u64 { Self::from_u64(x) } + + fn representative(x: u64) -> u64 { + x + } } impl Copy for U64FieldElement {} diff --git a/math/src/field/test_fields/u64_test_field.rs b/math/src/field/test_fields/u64_test_field.rs index f66e3c1c9..8245fc336 100644 --- a/math/src/field/test_fields/u64_test_field.rs +++ b/math/src/field/test_fields/u64_test_field.rs @@ -50,6 +50,10 @@ impl IsField for U64TestField { fn from_base_type(x: u64) -> u64 { Self::from_u64(x) } + + fn representative(x: u64) -> u64 { + x + } } impl IsTwoAdicField for U64TestField { diff --git a/math/src/field/traits.rs b/math/src/field/traits.rs index 9f084a912..884fb7ca1 100644 --- a/math/src/field/traits.rs +++ b/math/src/field/traits.rs @@ -93,4 +93,7 @@ pub trait IsField: Debug + Clone { /// Takes as input an element of BaseType and returns the internal representation /// of that element in the field. fn from_base_type(x: Self::BaseType) -> Self::BaseType; + + // Returns the representative of the value stored + fn representative(a: Self::BaseType) -> Self::BaseType; } diff --git a/math/src/polynomial.rs b/math/src/polynomial.rs index 6baf5dd45..6f0eacb0f 100644 --- a/math/src/polynomial.rs +++ b/math/src/polynomial.rs @@ -6,7 +6,7 @@ use std::ops; /// as a vector of coefficients `[c_0, c_1, ... , c_n]` #[derive(Debug, Clone, PartialEq, Eq)] pub struct Polynomial { - coefficients: Vec, + pub coefficients: Vec, } impl Polynomial> { @@ -160,6 +160,32 @@ impl Polynomial> { } } +// TODO: This is not an optimal implementation, it should use FFT to interpolate. +pub fn compose( + poly_1: &Polynomial>, + poly_2: &Polynomial>, +) -> Polynomial> +where + F: IsField, +{ + let max_degree: u64 = (poly_1.degree() * poly_2.degree()) as u64; + + let mut interpolation_points = vec![]; + for i in 0_u64..max_degree + 1 { + interpolation_points.push(FieldElement::::from(i)); + } + + let values: Vec<_> = interpolation_points + .iter() + .map(|value| { + let intermediate_value = poly_2.evaluate(value); + poly_1.evaluate(&intermediate_value) + }) + .collect(); + + Polynomial::interpolate(interpolation_points.as_slice(), values.as_slice()) +} + impl ops::Add<&Polynomial>> for &Polynomial> { type Output = Polynomial>; @@ -232,6 +258,21 @@ impl ops::Mul>> for Polynomial ops::Mul> for Polynomial> { + type Output = Polynomial>; + + fn mul(self, multiplicand: FieldElement) -> Polynomial> { + let new_coefficients = self + .coefficients + .iter() + .map(|value| value * &multiplicand) + .collect(); + Polynomial { + coefficients: new_coefficients, + } + } +} + #[cfg(test)] mod tests { use crate::field::fields::u64_prime_field::U64PrimeField; @@ -494,4 +535,14 @@ mod tests { let p = Polynomial::interpolate(&[FE::new(0)], &[FE::new(0)]); assert_eq!(FE::new(0), p.evaluate(&FE::new(0))); } + + #[test] + fn composition_works() { + let p = Polynomial::new(&[FE::new(0), FE::new(2)]); + let q = Polynomial::new(&[FE::new(0), FE::new(0), FE::new(1)]); + assert_eq!( + compose(&p, &q), + Polynomial::new(&[FE::new(0), FE::new(0), FE::new(2)]) + ); + } } diff --git a/math/src/unsigned_integer/element.rs b/math/src/unsigned_integer/element.rs index e40693ae6..8d048e42b 100644 --- a/math/src/unsigned_integer/element.rs +++ b/math/src/unsigned_integer/element.rs @@ -9,6 +9,7 @@ use std::fmt::Debug; pub type U384 = UnsignedInteger<6>; pub type U256 = UnsignedInteger<4>; +pub type U128 = UnsignedInteger<2>; /// A big unsigned integer in base 2^{64} represented /// as fixed-size array `limbs` of `u64` components. diff --git a/proving-system/stark/Cargo.toml b/proving-system/stark/Cargo.toml new file mode 100644 index 000000000..82eb31c61 --- /dev/null +++ b/proving-system/stark/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "lambdaworks-stark" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +rand = "0.8.5" +lambdaworks-math = { path = "../../math" } +lambdaworks-crypto = { path = "../../crypto"} +thiserror = "1.0.38" diff --git a/proving-system/stark/src/fri/fri_commitment.rs b/proving-system/stark/src/fri/fri_commitment.rs new file mode 100644 index 000000000..ceaf56b52 --- /dev/null +++ b/proving-system/stark/src/fri/fri_commitment.rs @@ -0,0 +1,10 @@ +pub use super::{FriMerkleTree, Polynomial, F, FE}; + +pub struct FriCommitment { + pub poly: Polynomial, + pub domain: Vec, + pub evaluation: Vec, + pub merkle_tree: FriMerkleTree, +} + +pub type FriCommitmentVec = Vec>; diff --git a/proving-system/stark/src/fri/fri_decommit.rs b/proving-system/stark/src/fri/fri_decommit.rs new file mode 100644 index 000000000..9837d0e06 --- /dev/null +++ b/proving-system/stark/src/fri/fri_decommit.rs @@ -0,0 +1,94 @@ +use super::FE; +use crate::{fri::fri_commitment::FriCommitmentVec, PrimeField}; +pub use lambdaworks_crypto::fiat_shamir::transcript::Transcript; +use lambdaworks_crypto::merkle_tree::DefaultHasher; + +use lambdaworks_crypto::merkle_tree::proof::Proof; + +#[derive(Debug, Clone)] +pub struct FriDecommitment { + pub layer_merkle_paths: Vec<( + Proof, + Proof, + )>, + pub last_layer_evaluation: FE, +} + +// verifier chooses a randomness and get the index where +// they want to evaluate the poly +// TODO: encapsulate the return type of this function in a struct. +// This returns a list of authentication paths for evaluations on points and their symmetric counterparts. +pub fn fri_decommit_layers( + commit: &FriCommitmentVec, + index_to_verify: usize, +) -> FriDecommitment { + let mut index = index_to_verify; + + let mut layer_merkle_paths = vec![]; + + // with every element of the commit, we look for that one in + // the merkle tree and get the corresponding element + for commit_i in commit { + let length_i = commit_i.domain.len(); + index %= length_i; + let evaluation_i = commit_i.evaluation[index].clone(); + let auth_path = commit_i.merkle_tree.get_proof(&evaluation_i).unwrap(); + + // symmetrical element + let index_sym = (index + length_i / 2) % length_i; + let evaluation_i_sym = commit_i.evaluation[index_sym].clone(); + let auth_path_sym = commit_i.merkle_tree.get_proof(&evaluation_i_sym).unwrap(); + + layer_merkle_paths.push((auth_path, auth_path_sym)); + } + + // send the last element of the polynomial + let last = commit.last().unwrap(); + let last_evaluation = last.poly.coefficients[0].clone(); + + FriDecommitment { + layer_merkle_paths, + last_layer_evaluation: last_evaluation, + } +} + +// Integration test: +// * get an arbitrary polynomial +// * have a domain containing roots of the unity (# is power of two) +// p = 65_537 +// * apply FRI commitment +// * apply FRI decommitment +// assert: +// * evaluations of the polynomials coincide with calculations from the decommitment +// * show a fail example: with a monomial + +#[cfg(test)] +mod tests { + use crate::fri::U64PrimeField; + use lambdaworks_math::field::element::FieldElement; + use std::collections::HashSet; + const PRIME_GENERATOR: (u64, u64) = (0xFFFF_FFFF_0000_0001_u64, 2717_u64); + pub type F = U64PrimeField<{ PRIME_GENERATOR.0 }>; + pub type FeGoldilocks = FieldElement; + + #[test] + fn test() { + let subgroup_size = 1024_u64; + let generator_field = FeGoldilocks::new(PRIME_GENERATOR.1); + let exp = (PRIME_GENERATOR.0 - 1) / subgroup_size; + let generator_of_subgroup = generator_field.pow(exp); + let mut numbers = HashSet::new(); + + let mut i = 0; + for exp in 0..1024_u64 { + i += 1; + let ret = generator_of_subgroup.pow(exp); + numbers.insert(*ret.value()); + println!("{ret:?}"); + } + + let count = numbers.len(); + println!("count: {count}"); + println!("iter: {i}"); + } +} diff --git a/proving-system/stark/src/fri/fri_functions.rs b/proving-system/stark/src/fri/fri_functions.rs new file mode 100644 index 000000000..195a38607 --- /dev/null +++ b/proving-system/stark/src/fri/fri_functions.rs @@ -0,0 +1,164 @@ +use lambdaworks_math::field::{element::FieldElement, traits::IsField}; + +use super::Polynomial; + +fn fold_polynomial( + poly: &Polynomial>, + beta: &FieldElement, +) -> Polynomial> +where + F: IsField, +{ + let coef = poly.coefficients(); + let even_coef: Vec> = coef.iter().step_by(2).cloned().collect(); + + // odd coeficients of poly are multiplied by beta + let odd_coef_mul_beta: Vec> = coef + .iter() + .skip(1) + .step_by(2) + .map(|v| (v.clone()) * beta) + .collect(); + + let (even_poly, odd_poly) = Polynomial::pad_with_zero_coefficients( + &Polynomial::new(&even_coef), + &Polynomial::new(&odd_coef_mul_beta), + ); + even_poly + odd_poly +} + +fn next_domain(input: &[FieldElement]) -> Vec> +where + F: IsField, +{ + let length = input.len() / 2; + let mut ret = Vec::with_capacity(length); + for v in input.iter().take(length) { + ret.push(v * v) + } + ret +} + +/// Returns: +/// * new polynomoial folded with FRI protocol +/// * new domain +/// * evaluations of the polynomial +// TODO: Remove this +#[allow(clippy::type_complexity)] +pub fn next_fri_layer( + poly: &Polynomial>, + domain: &[FieldElement], + beta: &FieldElement, +) -> ( + Polynomial>, + Vec>, + Vec>, +) +where + F: IsField, +{ + let ret_poly = fold_polynomial(poly, beta); + let ret_next_domain = next_domain(domain); + let ret_evaluation = ret_poly.evaluate_slice(&ret_next_domain); + (ret_poly, ret_next_domain, ret_evaluation) +} + +#[cfg(test)] +mod tests { + use super::{fold_polynomial, next_domain, next_fri_layer}; + use lambdaworks_math::field::element::FieldElement; + use lambdaworks_math::field::fields::u64_prime_field::U64PrimeField; + const MODULUS: u64 = 293; + type FE = FieldElement>; + use lambdaworks_math::polynomial::Polynomial; + + #[test] + fn test_fold() { + let p0 = Polynomial::new(&[ + FE::new(3), + FE::new(1), + FE::new(2), + FE::new(7), + FE::new(3), + FE::new(5), + ]); + let beta = FE::new(4); + let p1 = fold_polynomial(&p0, &beta); + assert_eq!( + p1, + Polynomial::new(&[FE::new(7), FE::new(30), FE::new(23),]) + ); + + let gamma = FE::new(3); + let p2 = fold_polynomial(&p1, &gamma); + assert_eq!(p2, Polynomial::new(&[FE::new(97), FE::new(23),])); + + let delta = FE::new(2); + let p3 = fold_polynomial(&p2, &delta); + assert_eq!(p3, Polynomial::new(&[FE::new(143)])); + assert_eq!(p3.degree(), 0); + } + + #[test] + fn test_next_domain() { + let input = [ + FE::new(5), + FE::new(7), + FE::new(13), + FE::new(20), + FE::new(1), + FE::new(1), + FE::new(1), + FE::new(1), + ]; + let ret_next_domain = next_domain(&input); + assert_eq!( + ret_next_domain, + &[FE::new(25), FE::new(49), FE::new(169), FE::new(107),] + ); + + let ret_next_domain_2 = next_domain(&ret_next_domain); + assert_eq!(ret_next_domain_2, &[FE::new(39), FE::new(57)]); + + let ret_next_domain_3 = next_domain(&ret_next_domain_2); + assert_eq!(ret_next_domain_3, &[FE::new(56)]); + } + + #[test] + fn text_next_fri_layer() { + let p0 = Polynomial::new(&[ + FE::new(3), + FE::new(1), + FE::new(2), + FE::new(7), + FE::new(3), + FE::new(5), + ]); + let beta = FE::new(4); + let input_domain = [ + FE::new(5), + FE::new(7), + FE::new(13), + FE::new(20), + FE::new(1), + FE::new(1), + FE::new(1), + FE::new(1), + ]; + + let (p1, ret_next_domain, ret_evaluation) = next_fri_layer(&p0, &input_domain, &beta); + + assert_eq!( + p1, + Polynomial::new(&[FE::new(7), FE::new(30), FE::new(23),]) + ); + assert_eq!( + ret_next_domain, + &[FE::new(25), FE::new(49), FE::new(169), FE::new(107),] + ); + assert_eq!( + ret_evaluation, + &[FE::new(189), FE::new(151), FE::new(93), FE::new(207),] + ); + } +} diff --git a/proving-system/stark/src/fri/mod.rs b/proving-system/stark/src/fri/mod.rs new file mode 100644 index 000000000..a7ac04e03 --- /dev/null +++ b/proving-system/stark/src/fri/mod.rs @@ -0,0 +1,114 @@ +mod fri_commitment; +pub mod fri_decommit; +mod fri_functions; + +use crate::fri::fri_commitment::{FriCommitment, FriCommitmentVec}; +use crate::fri::fri_functions::next_fri_layer; +pub use lambdaworks_crypto::merkle_tree::DefaultHasher; +pub type FriMerkleTree = MerkleTree; +pub use lambdaworks_crypto::fiat_shamir::transcript::Transcript; +pub use lambdaworks_crypto::merkle_tree::merkle::MerkleTree; +use lambdaworks_math::traits::ByteConversion; +use lambdaworks_math::unsigned_integer::element::U384; +pub use lambdaworks_math::{ + field::{element::FieldElement, fields::u64_prime_field::U64PrimeField}, + polynomial::Polynomial, +}; + +pub type F = crate::PrimeField; +pub type FE = crate::FE; + +/// # Params +/// +/// p_0, +/// original domain, +/// evaluation. +pub fn fri_commitment( + p_i: &Polynomial>, + domain_i: &[FE], + evaluation_i: &[FE], + transcript: &mut Transcript, +) -> FriCommitment { + // Merkle tree: + // - ret_evaluation + // - root + // - hasher + // Create a new merkle tree with evaluation_i + let merkle_tree = FriMerkleTree::build(evaluation_i); + + // append the root of the merkle tree to the transcript + let root = merkle_tree.root.clone(); + let root_bytes = (*root.value()).to_bytes_be(); + transcript.append(&root_bytes); + + FriCommitment { + poly: p_i.clone(), + domain: domain_i.to_vec(), + evaluation: evaluation_i.to_vec(), + merkle_tree, + } +} + +pub fn fri( + p_0: &mut Polynomial>, + domain_0: &[FE], + transcript: &mut Transcript, +) -> FriCommitmentVec { + let mut fri_commitment_list = FriCommitmentVec::new(); + let evaluation_0 = p_0.evaluate_slice(domain_0); + + let merkle_tree = FriMerkleTree::build(&evaluation_0); + + // append the root of the merkle tree to the transcript + let root = merkle_tree.root.clone(); + let root_bytes = (*root.value()).to_bytes_be(); + transcript.append(&root_bytes); + + let commitment_0 = FriCommitment { + poly: p_0.clone(), + domain: domain_0.to_vec(), + evaluation: evaluation_0.to_vec(), + merkle_tree, + }; + + // last poly of the list + let mut last_poly: Polynomial = p_0.clone(); + // last domain of the list + let mut last_domain: Vec = domain_0.to_vec(); + + // first evaluation in the list + fri_commitment_list.push(commitment_0); + let mut degree = p_0.degree(); + + let mut last_coef = last_poly.coefficients.get(0).unwrap(); + + while degree > 0 { + // sample beta: + // let beta_bytes = transcript.challenge(); + // let beta = FE::from_bytes_be(&beta_bytes).unwrap(); + let beta = FE::new(U384::from("4")); + + let (p_i, domain_i, evaluation_i) = next_fri_layer(&last_poly, &last_domain, &beta); + + let commitment_i = fri_commitment(&p_i, &domain_i, &evaluation_i, transcript); + + // append root of merkle tree to transcript + let tree = &commitment_i.merkle_tree; + let root = tree.root.clone(); + let root_bytes = (*root.value()).to_bytes_be(); + transcript.append(&root_bytes); + + fri_commitment_list.push(commitment_i); + degree = p_i.degree(); + + last_poly = p_i.clone(); + last_coef = last_poly.coefficients.get(0).unwrap(); + last_domain = domain_i.clone(); + } + + // append last value of the polynomial to the trasncript + let last_coef_bytes = (*last_coef.value()).to_bytes_be(); + transcript.append(&last_coef_bytes); + + fri_commitment_list +} diff --git a/proving-system/stark/src/lib.rs b/proving-system/stark/src/lib.rs new file mode 100644 index 000000000..dd0dea910 --- /dev/null +++ b/proving-system/stark/src/lib.rs @@ -0,0 +1,318 @@ +pub mod fri; + +use fri::fri_decommit::{fri_decommit_layers, FriDecommitment}; +use lambdaworks_crypto::fiat_shamir::transcript::Transcript; +use lambdaworks_crypto::merkle_tree::proof::Proof; +use lambdaworks_math::polynomial::{self, Polynomial}; + +use lambdaworks_math::field::element::FieldElement; +use lambdaworks_math::{ + field::fields::u384_prime_field::{IsMontgomeryConfiguration, MontgomeryBackendPrimeField}, + unsigned_integer::element::U384, +}; + +// DEFINITION OF THE USED FIELD +#[derive(Clone, Debug)] +pub struct MontgomeryConfig; +impl IsMontgomeryConfiguration for MontgomeryConfig { + const MODULUS: U384 = + // hex 17 + U384::from("800000000000011000000000000000000000000000000000000000000000001"); +} + +pub type PrimeField = MontgomeryBackendPrimeField; +pub type FE = FieldElement; + +const MODULUS_MINUS_1: U384 = U384::sub(&MontgomeryConfig::MODULUS, &U384::from("1")).0; + +/// Subgroup generator to generate the roots of unity +const FIELD_SUBGROUP_GENERATOR: u64 = 3; + +// DEFINITION OF CONSTANTS + +const ORDER_OF_ROOTS_OF_UNITY_TRACE: u64 = 32; +const ORDER_OF_ROOTS_OF_UNITY_FOR_LDE: u64 = 1024; + +// DEFINITION OF FUNCTIONS + +pub fn generate_primitive_root(subgroup_size: u64) -> FE { + let modulus_minus_1_field: FE = FE::new(MODULUS_MINUS_1); + let subgroup_size: FE = subgroup_size.into(); + let generator_field: FE = FIELD_SUBGROUP_GENERATOR.into(); + let exp = (&modulus_minus_1_field) / &subgroup_size; + generator_field.pow(exp.representative()) +} + +/// This functions takes a roots of unity and a coset factor +/// If coset_factor is 1, it's just expanding the roots of unity +/// w ^ 0, w ^ 1, w ^ 2 .... w ^ n-1 +/// If coset_factor is h +/// h * w ^ 0, h * w ^ 1 .... h * w ^ n-1 +pub fn generate_roots_of_unity_coset(coset_factor: u64, primitive_root: &FE) -> Vec { + let coset_factor: FE = coset_factor.into(); + + let mut numbers = vec![coset_factor.clone()]; + let mut exp: u64 = 1; + let mut next_root = primitive_root.pow(exp) * &coset_factor; + while next_root != coset_factor { + numbers.push(next_root); + exp += 1; + next_root = primitive_root.pow(exp) * &coset_factor; + } + numbers +} + +#[derive(Debug, Clone)] +pub struct StarkQueryProof { + pub trace_lde_poly_root: FE, + pub trace_lde_poly_evaluations: Vec, + /// Merkle paths for the trace polynomial evaluations + pub trace_lde_poly_inclusion_proofs: Vec>, + pub composition_poly_lde_evaluations: Vec, + pub fri_layers_merkle_roots: Vec, + pub fri_decommitment: FriDecommitment, +} + +pub type StarkProof = Vec; + +pub use lambdaworks_crypto::merkle_tree::merkle::MerkleTree; +pub use lambdaworks_crypto::merkle_tree::DefaultHasher; +pub type FriMerkleTree = MerkleTree; + +pub fn fibonacci_trace(initial_values: [FE; 2]) -> Vec { + let mut ret: Vec = vec![]; + + ret.push(initial_values[0].clone()); + ret.push(initial_values[1].clone()); + + for i in 2..(ORDER_OF_ROOTS_OF_UNITY_TRACE as usize) { + ret.push(ret[i - 1].clone() + ret[i - 2].clone()); + } + + ret +} + +pub fn prove(pub_inputs: [FE; 2]) -> StarkQueryProof { + let transcript = &mut Transcript::new(); + + // * Generate Coset + let trace_primitive_root = generate_primitive_root(ORDER_OF_ROOTS_OF_UNITY_TRACE); + let trace_roots_of_unity = generate_roots_of_unity_coset(1, &trace_primitive_root); + + let lde_primitive_root = generate_primitive_root(ORDER_OF_ROOTS_OF_UNITY_FOR_LDE); + let lde_roots_of_unity = generate_roots_of_unity_coset(1, &lde_primitive_root); + + let trace = fibonacci_trace(pub_inputs); + + let trace_poly = Polynomial::interpolate(&trace_roots_of_unity, &trace); + + // * Do Reed-Solomon on the trace and composition polynomials using some blowup factor + let trace_poly_lde = trace_poly.evaluate_slice(lde_roots_of_unity.as_slice()); + + // * Commit to both polynomials using a Merkle Tree + let trace_poly_lde_merkle_tree = FriMerkleTree::build(trace_poly_lde.as_slice()); + + // * Sample q_1, ..., q_m using Fiat-Shamir + // let q_1 = transcript.challenge(); + // @@@@@@@@@@@@@@@@@@@@@@ + let q_1: usize = 4; + + // START EVALUATION POINTS BLOCK + // This depends on the AIR + // It's related to the non FRI verification + + // These are evaluations over the trace polynomial + let evaluation_points = vec![ + lde_primitive_root.pow(q_1), + lde_primitive_root.pow(q_1) * &trace_primitive_root, + lde_primitive_root.pow(q_1) * (&trace_primitive_root * &trace_primitive_root), + ]; + let trace_lde_poly_evaluations = trace_poly.evaluate_slice(&evaluation_points); + let merkle_paths = vec![ + trace_poly_lde_merkle_tree + .get_proof_by_pos(q_1, trace_lde_poly_evaluations[0].clone()) + .unwrap(), + trace_poly_lde_merkle_tree + .get_proof_by_pos( + q_1 + (ORDER_OF_ROOTS_OF_UNITY_FOR_LDE / ORDER_OF_ROOTS_OF_UNITY_TRACE) as usize, + trace_lde_poly_evaluations[1].clone(), + ) + .unwrap(), + trace_poly_lde_merkle_tree + .get_proof_by_pos( + q_1 + (ORDER_OF_ROOTS_OF_UNITY_FOR_LDE / ORDER_OF_ROOTS_OF_UNITY_TRACE) as usize + * 2, + trace_lde_poly_evaluations[2].clone(), + ) + .unwrap(), + ]; + + // These are evaluations over the composition polynomial + let mut composition_poly = get_composition_poly(trace_poly, &trace_primitive_root); + let composition_poly_lde_evaluation = composition_poly.evaluate(&evaluation_points[0]); + + // This is needed to check the element is in the root + let trace_root = trace_poly_lde_merkle_tree.root; + + // END EVALUATION BLOCK + + // * Do FRI on the composition polynomials + let lde_fri_commitment = + crate::fri::fri(&mut composition_poly, &lde_roots_of_unity, transcript); + + // * For every q_i, do FRI decommitment + let fri_decommitment = fri_decommit_layers(&lde_fri_commitment, q_1); + + /* + IMPORTANT NOTE: + When we commit to the trace polynomial, let's call it f, we commit to an LDE of it. + On the other hand, the fibonacci constraint (and in general, any constraint) related to f applies + only using non-LDE roots of unity. + In this case, the constraint is f(w^2 x) - f(w x) - f(x), where w is a 2^n root of unity. + But for the commitment we use g, a 2^{nb} root of unity (b is the blowup factor). + When we sample a value x to evaluate the trace polynomial on, it has to be a 2^{nb} root of unity, + so with fiat-shamir we sample a random index in that range. + When we provide evaluations, we provide them for x*(w^2), x*w and x. + */ + + let fri_layers_merkle_roots: Vec = lde_fri_commitment + .iter() + .map(|fri_commitment| fri_commitment.merkle_tree.root.clone()) + .collect(); + + StarkQueryProof { + trace_lde_poly_root: trace_root, + trace_lde_poly_evaluations, + trace_lde_poly_inclusion_proofs: merkle_paths, + composition_poly_lde_evaluations: vec![composition_poly_lde_evaluation], + fri_layers_merkle_roots, + fri_decommitment, + } +} + +fn get_composition_poly(trace_poly: Polynomial, root_of_unity: &FE) -> Polynomial { + let w_squared_x = Polynomial::new(&[FE::zero(), root_of_unity * root_of_unity]); + let w_x = Polynomial::new(&[FE::zero(), root_of_unity.clone()]); + + polynomial::compose(&trace_poly, &w_squared_x) + - polynomial::compose(&trace_poly, &w_x) + - trace_poly +} + +pub fn verify(proof: &StarkQueryProof) -> bool { + let transcript = &mut Transcript::new(); + + let trace_poly_root = &proof.trace_lde_poly_root; + let trace_evaluation = &proof.trace_lde_poly_evaluations; + + // TODO: These could be multiple evaluations depending on how many q_i are sampled with Fiat Shamir + let composition_polynomial_evaluation_from_prover = &proof.composition_poly_lde_evaluations[0]; + + let composition_polynomial_evaluation_from_trace = + &trace_evaluation[2] - &trace_evaluation[1] - &trace_evaluation[0]; + + if *composition_polynomial_evaluation_from_prover + != composition_polynomial_evaluation_from_trace + { + return false; + } + + for merkle_proof in &proof.trace_lde_poly_inclusion_proofs { + if !merkle_proof.verify(trace_poly_root.clone()) { + return false; + } + } + + fri_verify( + &proof.fri_layers_merkle_roots, + &proof.fri_decommitment, + transcript, + ) +} + +/// Performs FRI verification for some decommitment +pub fn fri_verify( + fri_layers_merkle_roots: &[FE], + fri_decommitment: &FriDecommitment, + _transcript: &mut Transcript, +) -> bool { + // For each fri layer merkle proof check: + // That each merkle path verifies + + // Sample beta with fiat shamir + // Compute v = [P_i(z_i) + P_i(-z_i)] / 2 + beta * [P_i(z_i) - P_i(-z_i)] / (2 * z_i) + // Where P_i is the folded polynomial of the i-th fiat shamir round + // z_i is obtained from the first z (that was derived through fiat-shamir) through a known calculation + // The calculation is, given the index, index % length_of_evaluation_domain + + // Check that v = P_{i+1}(z_i) + + let decommitment_index: u64 = 4; + + let mut lde_primitive_root = generate_primitive_root(ORDER_OF_ROOTS_OF_UNITY_FOR_LDE); + + // For each (merkle_root, merkle_auth_path) / fold + // With the auth path containining the element that the + // path proves it's existance + for ( + layer_number, + (fri_layer_merkle_root, (fri_layer_auth_path, fri_layer_auth_path_symmetric)), + ) in fri_layers_merkle_roots + .iter() + .zip(fri_decommitment.layer_merkle_paths.iter()) + .enumerate() + // Since we always derive the current layer from the previous layer + // We start with the second one, skipping the first, so previous is layer is the first one + .skip(1) + { + if !fri_layer_auth_path.verify(fri_layer_merkle_root.clone()) { + return false; + } + + if !fri_layer_auth_path_symmetric.verify(fri_layer_merkle_root.clone()) { + return false; + } + + // TODO: use Fiat Shamir + let beta: u64 = 4; + + let (previous_auth_path, previous_auth_path_symmetric) = fri_decommitment + .layer_merkle_paths + .get(layer_number - 1) + // TODO: Check at the start of the FRI operation + // if layer_merkle_paths has the right amount of elements + .unwrap(); + + // evaluation point = w ^ i in the Stark literature + let evaluation_point = lde_primitive_root.pow(decommitment_index); + + // v is the calculated element for the + // co linearity check + let two = &FE::new(U384::from("2")); + let beta = FE::new(U384::from_u64(beta)); + let v = (&previous_auth_path.value + &previous_auth_path_symmetric.value) / two + + beta * (&previous_auth_path.value - &previous_auth_path_symmetric.value) + / (two * evaluation_point); + + lde_primitive_root = lde_primitive_root.pow(2_usize); + + if v != fri_layer_auth_path.value { + return false; + } + } + true +} + +#[cfg(test)] +mod tests { + use crate::{verify, FE}; + + use super::prove; + use lambdaworks_math::unsigned_integer::element::U384; + + #[test] + fn test_prove() { + let result = prove([FE::new(U384::from("1")), FE::new(U384::from("1"))]); + assert!(verify(&result)); + } +}