From b40f6c72c3b35f3dad7626cde275c0edc46bf15f Mon Sep 17 00:00:00 2001 From: Nicolas Sarlin Date: Wed, 29 Oct 2025 16:47:08 +0100 Subject: [PATCH 1/4] fix(versionable): handle `#[default]` in Versionize types --- .../src/versionize_attribute.rs | 35 ++++++++++++++++--- utils/tfhe-versionable/tests/enum_default.rs | 31 ++++++++++++++++ 2 files changed, 61 insertions(+), 5 deletions(-) create mode 100644 utils/tfhe-versionable/tests/enum_default.rs diff --git a/utils/tfhe-versionable-derive/src/versionize_attribute.rs b/utils/tfhe-versionable-derive/src/versionize_attribute.rs index e2e0d5e038..3e323caa15 100644 --- a/utils/tfhe-versionable-derive/src/versionize_attribute.rs +++ b/utils/tfhe-versionable-derive/src/versionize_attribute.rs @@ -16,6 +16,20 @@ pub(crate) const SERDE_ATTR_NAME: &str = "serde"; /// Transparent mode can also be activated using `#[repr(transparent)]` pub(crate) const REPR_ATTR_NAME: &str = "repr"; +/// The generated associated types will only derive Serialize/Deserialize. We should not propagate +/// any attribute from other derive macro (eg: `#[default]`). This is a list of attributes that +/// should be propagated to the newly created type. +pub(crate) const PRESERVED_FIELD_ATTRIBUTE_NAMES: [&str; 4] = [ + // Not all serde attribute might be good to propagate. However, as a first approach we allow + // all of them. This might need some refining later. + "serde", + // cfg and cfg_attr should be propagated because it might not be possible to define the + // associated fields if the feature are not enabled + "cfg", "cfg_attr", + // allow is propagated to avoid adding some warnings that the user wanted to disable + "allow", +]; + /// Represent the parsed `#[versionize(...)]` attribute pub(crate) enum VersionizeAttribute { Classic(ClassicVersionizeAttribute), @@ -341,27 +355,38 @@ pub(crate) fn is_skipped(attributes: &[Attribute]) -> syn::Result { Ok(false) } -/// Replace `#[versionize(skip)]` with `#[serde(skip)]` in an attributes list +/// Replace `#[versionize(skip)]` with `#[serde(skip)]` in an attributes list, and remove attributes +/// from other derived macro pub(crate) fn replace_versionize_skip_with_serde( attributes: &[Attribute], ) -> syn::Result> { attributes .iter() .cloned() - .map(|attr| { + .filter_map(|attr| { if attr.path().is_ident(VERSIONIZE_ATTR_NAME) { let nested = - attr.parse_args_with(Punctuated::::parse_terminated)?; + match attr.parse_args_with(Punctuated::::parse_terminated) { + Ok(nested) => nested, + Err(e) => return Some(Err(e)), + }; for meta in nested.iter() { if let Meta::Path(path) = meta { if path.is_ident("skip") { - return Ok(parse_quote! { #[serde(skip)] }); + return Some(Ok(parse_quote! { #[serde(skip)] })); } } } } - Ok(attr) + + for preserved_attr in PRESERVED_FIELD_ATTRIBUTE_NAMES { + if attr.path().is_ident(preserved_attr) { + return Some(Ok(attr)); + } + } + + None }) .collect() } diff --git a/utils/tfhe-versionable/tests/enum_default.rs b/utils/tfhe-versionable/tests/enum_default.rs new file mode 100644 index 0000000000..a61d805eab --- /dev/null +++ b/utils/tfhe-versionable/tests/enum_default.rs @@ -0,0 +1,31 @@ +//! Test an enum that derives Default using the `#[default]` attribute + +use std::io::Cursor; + +use tfhe_versionable::{Unversionize, Versionize, VersionsDispatch}; +#[derive(Default, Debug, PartialEq, Eq, Versionize)] +#[versionize(MyEnumVersions)] +pub enum MyEnum { + Var0, + #[default] + Var1, +} + +#[derive(VersionsDispatch)] +#[allow(unused)] +pub enum MyEnumVersions { + V0(MyEnum), +} + +#[test] +fn test() { + let enu = MyEnum::default(); + + let mut ser = Vec::new(); + ciborium::ser::into_writer(&enu.versionize(), &mut ser).unwrap(); + + let unvers = + MyEnum::unversionize(ciborium::de::from_reader(&mut Cursor::new(&ser)).unwrap()).unwrap(); + + assert_eq!(unvers, enu); +} From cb4b6f644a918a9c486bd3ee72889fd4ec0a025f Mon Sep 17 00:00:00 2001 From: Nicolas Sarlin Date: Mon, 27 Oct 2025 14:39:43 +0100 Subject: [PATCH 2/4] chore(zk): match zkv2 hash impl with the description - encode the position of bits proven to be 0 in the hashes - hash the infinite norm instead of the euclidean one - hash the value of k with the statement --- .../src/backward_compatibility/pke_v2.rs | 172 ++++++++++++- tfhe-zk-pok/src/proofs/pke_v2/hashes.rs | 243 +++++++++++++++++- tfhe-zk-pok/src/proofs/pke_v2/mod.rs | 71 +++-- tfhe/src/integer/ciphertext/compact_list.rs | 10 +- tfhe/src/shortint/ciphertext/zk.rs | 19 +- tfhe/src/zk/mod.rs | 44 ++-- 6 files changed, 480 insertions(+), 79 deletions(-) diff --git a/tfhe-zk-pok/src/backward_compatibility/pke_v2.rs b/tfhe-zk-pok/src/backward_compatibility/pke_v2.rs index 65ab02294a..2d114d83f8 100644 --- a/tfhe-zk-pok/src/backward_compatibility/pke_v2.rs +++ b/tfhe-zk-pok/src/backward_compatibility/pke_v2.rs @@ -2,12 +2,15 @@ #![allow(non_snake_case)] use std::convert::Infallible; +use std::error::Error; +use std::fmt::Display; use tfhe_versionable::{Upgrade, Version, VersionsDispatch}; use crate::curve_api::{CompressedG1, CompressedG2, Compressible, Curve}; use crate::proofs::pke_v2::{ - CompressedComputeLoadProofFields, CompressedProof, ComputeLoadProofFields, PkeV2HashMode, Proof, + CompressedComputeLoadProofFields, CompressedProof, ComputeLoadProofFields, PkeV2HashMode, + PkeV2SupportedHashConfig, Proof, }; use super::IncompleteProof; @@ -89,10 +92,10 @@ pub struct ProofV1 { compute_load_proof_fields: Option>, } -impl Upgrade> for ProofV1 { +impl Upgrade> for ProofV1 { type Error = Infallible; - fn upgrade(self) -> Result, Self::Error> { + fn upgrade(self) -> Result, Self::Error> { let ProofV1 { C_hat_e, C_e, @@ -108,7 +111,7 @@ impl Upgrade> for ProofV1 { compute_load_proof_fields, } = self; - Ok(Proof { + Ok(ProofV2 { C_hat_e, C_e, C_r_tilde, @@ -126,11 +129,92 @@ impl Upgrade> for ProofV1 { } } +#[derive(Version)] +pub struct ProofV2 { + C_hat_e: G::G2, + C_e: G::G1, + C_r_tilde: G::G1, + C_R: G::G1, + C_hat_bin: G::G2, + C_y: G::G1, + C_h1: G::G1, + C_h2: G::G1, + C_hat_t: G::G2, + pi: G::G1, + pi_kzg: G::G1, + compute_load_proof_fields: Option>, + hash_mode: PkeV2HashMode, +} + +#[derive(Debug)] +pub struct UnsupportedHashConfig(String); + +impl Display for UnsupportedHashConfig { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Unsupported Hash config in pke V2 Proof: {}", self.0) + } +} + +impl Error for UnsupportedHashConfig {} + +impl TryFrom for PkeV2SupportedHashConfig { + type Error = UnsupportedHashConfig; + + fn try_from(value: PkeV2HashMode) -> Result { + match value { + PkeV2HashMode::BackwardCompat => Ok(PkeV2SupportedHashConfig::V0_4_0), + PkeV2HashMode::Classical => Err(UnsupportedHashConfig(String::from( + "Proof use hash mode \"Classical\" which has never been part of a default configuration", + ))), + PkeV2HashMode::Compact => Ok(PkeV2SupportedHashConfig::V0_7_0), + } + } +} + +impl Upgrade> for ProofV2 { + type Error = UnsupportedHashConfig; + + fn upgrade(self) -> Result, Self::Error> { + let ProofV2 { + C_hat_e, + C_e, + C_r_tilde, + C_R, + C_hat_bin, + C_y, + C_h1, + C_h2, + C_hat_t, + pi, + pi_kzg, + compute_load_proof_fields, + hash_mode, + } = self; + + Ok(Proof { + C_hat_e, + C_e, + C_r_tilde, + C_R, + C_hat_bin, + C_y, + C_h1, + C_h2, + C_hat_t, + pi, + pi_kzg, + compute_load_proof_fields, + hash_config: hash_mode.try_into()?, + }) + } +} + #[derive(VersionsDispatch)] pub enum ProofVersions { V0(ProofV0), V1(ProofV1), - V2(Proof), + V2(ProofV2), + V3(Proof), } #[derive(VersionsDispatch)] @@ -230,14 +314,14 @@ where compute_load_proof_fields: Option>, } -impl Upgrade> for CompressedProofV1 +impl Upgrade> for CompressedProofV1 where G::G1: Compressible, G::G2: Compressible, { type Error = Infallible; - fn upgrade(self) -> Result, Self::Error> { + fn upgrade(self) -> Result, Self::Error> { let CompressedProofV1 { C_hat_e, C_e, @@ -253,7 +337,7 @@ where compute_load_proof_fields, } = self; - Ok(CompressedProof { + Ok(CompressedProofV2 { C_hat_e, C_e, C_r_tilde, @@ -271,6 +355,69 @@ where } } +#[derive(Version)] +pub struct CompressedProofV2 +where + G::G1: Compressible, + G::G2: Compressible, +{ + C_hat_e: CompressedG2, + C_e: CompressedG1, + C_r_tilde: CompressedG1, + C_R: CompressedG1, + C_hat_bin: CompressedG2, + C_y: CompressedG1, + C_h1: CompressedG1, + C_h2: CompressedG1, + C_hat_t: CompressedG2, + pi: CompressedG1, + pi_kzg: CompressedG1, + compute_load_proof_fields: Option>, + hash_mode: PkeV2HashMode, +} + +impl Upgrade> for CompressedProofV2 +where + G::G1: Compressible, + G::G2: Compressible, +{ + type Error = UnsupportedHashConfig; + + fn upgrade(self) -> Result, Self::Error> { + let CompressedProofV2 { + C_hat_e, + C_e, + C_r_tilde, + C_R, + C_hat_bin, + C_y, + C_h1, + C_h2, + C_hat_t, + pi, + pi_kzg, + compute_load_proof_fields, + hash_mode, + } = self; + + Ok(CompressedProof { + C_hat_e, + C_e, + C_r_tilde, + C_R, + C_hat_bin, + C_y, + C_h1, + C_h2, + C_hat_t, + pi, + pi_kzg, + compute_load_proof_fields, + hash_config: hash_mode.try_into()?, + }) + } +} + #[derive(VersionsDispatch)] pub enum CompressedProofVersions where @@ -279,7 +426,8 @@ where { V0(CompressedProofV0), V1(CompressedProofV1), - V2(CompressedProof), + V2(CompressedProofV2), + V3(CompressedProof), } #[derive(VersionsDispatch)] @@ -297,3 +445,9 @@ pub enum PkeV2HashModeVersions { #[allow(dead_code)] V0(PkeV2HashMode), } + +#[derive(VersionsDispatch)] +pub enum PkeV2SupportedHashConfigVersions { + #[allow(dead_code)] + V0(PkeV2SupportedHashConfig), +} diff --git a/tfhe-zk-pok/src/proofs/pke_v2/hashes.rs b/tfhe-zk-pok/src/proofs/pke_v2/hashes.rs index fe305fe15b..7e293833b6 100644 --- a/tfhe-zk-pok/src/proofs/pke_v2/hashes.rs +++ b/tfhe-zk-pok/src/proofs/pke_v2/hashes.rs @@ -5,7 +5,7 @@ use tfhe_versionable::Versionize; /// Scalar generation using the hash random oracle use crate::{ - backward_compatibility::pke_v2::PkeV2HashModeVersions, + backward_compatibility::pke_v2::{PkeV2HashModeVersions, PkeV2SupportedHashConfigVersions}, curve_api::{Curve, FieldOps}, proofs::pke_v2::{compute_crs_params, inf_norm_bound_to_euclidean_squared}, }; @@ -39,6 +39,191 @@ pub enum PkeV2HashMode { Compact = 2, } +#[derive(Debug, Clone, Copy)] +/// How the position of bits proven to be 0 is encoded +pub enum PkeV2ProvenZeroBitsEncoding { + /// Light encoding where we only store the number of msb bits, that is the same for all slots + MsbZeroBitsCountOnly = 0, + /// Flexible encoding that allows to define any bit in any slot as being proven to be 0 + AnyBitAnySlot = 1, +} + +impl PkeV2ProvenZeroBitsEncoding { + pub fn encode_proven_zero_bits( + &self, + msb_zero_padding_bit_count: u64, + t: u64, + k: usize, + ) -> Vec { + match self { + PkeV2ProvenZeroBitsEncoding::MsbZeroBitsCountOnly => { + msb_zero_padding_bit_count.to_le_bytes().to_vec() + } + PkeV2ProvenZeroBitsEncoding::AnyBitAnySlot => { + encode_proven_zero_bits_anybit_anyslot(msb_zero_padding_bit_count, t, k) + } + } + } +} + +#[derive(Debug, Clone, Copy)] +/// The kind of norm bound that is hashed in the statement. +pub enum PkeV2HashedBoundType { + /// Hash the square of the derived L2/Euclidean norm that is used for the proof + SquaredEuclideanNorm = 0, + /// Hash the infinite norm given as input by the prover + InfinityNorm = 1, +} + +#[derive(Debug, Clone, Copy)] +pub struct PkeV2HashConfig { + pub(crate) mode: PkeV2HashMode, + pub(crate) proven_zero_bits_encoding: PkeV2ProvenZeroBitsEncoding, + pub(crate) hashed_bound_type: PkeV2HashedBoundType, + /// Should we also hash the value of k with the statement + pub(crate) hash_k: bool, +} + +impl PkeV2HashConfig { + pub fn mode(&self) -> PkeV2HashMode { + self.mode + } + + pub fn proven_zero_bits_encoding(&self) -> PkeV2ProvenZeroBitsEncoding { + self.proven_zero_bits_encoding + } + + pub fn hashed_bound(&self) -> PkeV2HashedBoundType { + self.hashed_bound_type + } + + pub fn hash_k(&self) -> bool { + self.hash_k + } +} + +/// List of hash config that were used for a given version of this crate +/// +/// This is stored in the proof so that we only support a specific subset of all possible config. +#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, Versionize)] +#[versionize(PkeV2SupportedHashConfigVersions)] +pub enum PkeV2SupportedHashConfig { + V0_4_0 = 0, + V0_7_0 = 1, + // Default hashing configuration used for proofs. This can be updated for performance or + // compliance reasons as long as we still handle the previous version for backward + // compatibility. + #[default] + V0_8_0 = 2, +} + +const PKEV2_HASH_CONFIG_V0_4_0: PkeV2HashConfig = PkeV2HashConfig { + mode: PkeV2HashMode::BackwardCompat, + proven_zero_bits_encoding: PkeV2ProvenZeroBitsEncoding::MsbZeroBitsCountOnly, + hashed_bound_type: PkeV2HashedBoundType::SquaredEuclideanNorm, + hash_k: false, +}; + +const PKEV2_HASH_CONFIG_V0_7_0: PkeV2HashConfig = PkeV2HashConfig { + mode: PkeV2HashMode::Compact, + proven_zero_bits_encoding: PkeV2ProvenZeroBitsEncoding::MsbZeroBitsCountOnly, + hashed_bound_type: PkeV2HashedBoundType::SquaredEuclideanNorm, + hash_k: false, +}; + +const PKEV2_HASH_CONFIG_V0_8_0: PkeV2HashConfig = PkeV2HashConfig { + mode: PkeV2HashMode::Compact, + proven_zero_bits_encoding: PkeV2ProvenZeroBitsEncoding::AnyBitAnySlot, + hashed_bound_type: PkeV2HashedBoundType::InfinityNorm, + hash_k: true, +}; + +impl From for PkeV2HashConfig { + fn from(value: PkeV2SupportedHashConfig) -> Self { + match value { + PkeV2SupportedHashConfig::V0_4_0 => PKEV2_HASH_CONFIG_V0_4_0, + PkeV2SupportedHashConfig::V0_7_0 => PKEV2_HASH_CONFIG_V0_7_0, + PkeV2SupportedHashConfig::V0_8_0 => PKEV2_HASH_CONFIG_V0_8_0, + } + } +} + +/// Encode the bits proven to be 0 in a plaintext list. +/// +/// Today, the proof only allows to prove msb to be 0, and the same number of msb is used for every +/// slots. This function encodes the number of 0 bits in a more future proof way. This allows in the +/// future to prove any bit in any slot to be 0 without having to change the encoding. +/// +/// For example, for a list of 6 elements, composed of 4 bits of plaintext that can take any value +/// and 1 bit of padding that is proven to be 0, we have: +/// -> k = 6, t = 2**5, msb_zero_padding_bit_count = 1 +/// -> the base value to be encoded is 0b01111 (1 zero bit + 4 free bits). In lsb to msb this is +/// 11110. +/// -> By copying the base value in lsb to msb 6 times, we get the following bit string: +/// bit: 11110|11110|11110|11110|11110|11110 +/// pos: 01234 56789 abcde f ... +/// -> that is decomposed in bytes: +/// bit: 11110111 10111101 11101111 01111000 +/// pos: 01234567 89abcdef ... +/// -> in the usual msb to lsb notation, the resulting bytes are: +/// bit: 0b11101111 0b10111101 0b11110111 0b11110 +/// pos: 76543210 fedcba98 ... +fn encode_proven_zero_bits_anybit_anyslot( + msb_zero_padding_bit_count: u64, + t: u64, + k: usize, +) -> Vec { + let t_log2 = t.ilog2(); + + assert!(msb_zero_padding_bit_count <= t_log2 as u64); + assert!(k < u32::MAX as usize); + + let msb_zero_padding_bit_count = msb_zero_padding_bit_count as u32; + let k = k as u32; + + let effective_t_log2 = t_log2 - msb_zero_padding_bit_count; + + // true since t is a u64 + assert!(effective_t_log2 <= 64); + + // This is the base value that will be encoded for all slots. For example, for 4 bits of + // plaintext and one bit of padding proven to be 0, this will be 0b01111. + // This value is stored in a u64 to support plaintext + padding size > 8. + let encoded_base = if effective_t_log2 == 64 { + u64::MAX + } else { + !(u64::MAX << effective_t_log2) + }; + + let number_bits_to_pack = k * t_log2; + let packed_byte_len = number_bits_to_pack.div_ceil(u8::BITS); + let mut packed = Vec::with_capacity(packed_byte_len as usize); + + // A temporary buffer of 128 bits that is used to store `encoded_base` + a remainder of at + // most 7 bits. + let mut bit_buffer: u128 = 0; + let mut bits_in_buffer = 0; + + for _ in 0..k { + // Add new bits to the temporary buffer + bit_buffer |= (encoded_base as u128) << bits_in_buffer; + bits_in_buffer += t_log2; + + // Dump the temporary buffer into the byte vec until there is less that a full byte left + while bits_in_buffer >= u8::BITS { + packed.push(bit_buffer as u8); + bit_buffer >>= u8::BITS; + bits_in_buffer -= u8::BITS; + } + } + + if bits_in_buffer > 0 { + packed.push(bit_buffer as u8); + } + + packed +} + impl PkeV2HashMode { /// Generate a list of scalars using the hash random oracle. The generated hashes are written to /// the `output` slice and a byte representation is returned @@ -164,7 +349,7 @@ impl<'a> RHash<'a> { C_hat_e_bytes: &'a [u8], C_e_bytes: &'a [u8], C_r_tilde_bytes: &'a [u8], - mode: PkeV2HashMode, + config: PkeV2HashConfig, ) -> (Box<[i8]>, Self) { let ( &PublicParams { @@ -196,12 +381,30 @@ impl<'a> RHash<'a> { bound_type, ); + let encoded_zero_bits = config.proven_zero_bits_encoding.encode_proven_zero_bits( + msbs_zero_padding_bit_count, + t_input, + k, + ); + + let hashed_bound = match config.hashed_bound_type { + PkeV2HashedBoundType::SquaredEuclideanNorm => B_squared.to_le_bytes().to_vec(), + PkeV2HashedBoundType::InfinityNorm => B_inf.to_le_bytes().to_vec(), + }; + + let hashed_k = if config.hash_k { + (k as u64).to_le_bytes().to_vec() + } else { + Vec::new() + }; + let x_bytes = [ q.to_le_bytes().as_slice(), (d as u64).to_le_bytes().as_slice(), - B_squared.to_le_bytes().as_slice(), + hashed_k.as_slice(), + &hashed_bound, t_input.to_le_bytes().as_slice(), - msbs_zero_padding_bit_count.to_le_bytes().as_slice(), + encoded_zero_bits.as_slice(), &*a.iter() .flat_map(|&x| x.to_le_bytes()) .collect::>(), @@ -257,7 +460,7 @@ impl<'a> RHash<'a> { }) .collect::>(); - let R_bytes = mode.encode_R(&R); + let R_bytes = config.mode.encode_R(&R); ( R, @@ -275,7 +478,7 @@ impl<'a> RHash<'a> { n, k, d, - mode, + mode: config.mode, }, R_bytes, @@ -1177,3 +1380,31 @@ impl<'a> ZHash<'a> { chi } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_proven_zero_bits_encoding() { + // Test the most common case + let res = encode_proven_zero_bits_anybit_anyslot(1, 1 << 5, 6); + // base value is 0b01111 (msb to lsb) + // -> 11110 * 6 (lsb to msb) + // -> 11110|11110|11110|11110|11110|11110 (lsb to msb) + // -> 11110111 10111101 11101111 01111000 (lsb to msb) + // -> 0b11101111 0b10111101 0b11110111 0b11110 (msb to lsb) + let expected = vec![0b11101111, 0b10111101, 0b11110111, 0b11110]; + assert_eq!(expected, res); + + // Test a case where plaintext modulus log is > 8 + let res = encode_proven_zero_bits_anybit_anyslot(2, 1 << 9, 3); + // base value is 0b001111111 (msb to lsb) + // 111111100 * 3 (lsb to msb) + // 111111100|111111100|111111100 (lsb to msb) + // 11111110 01111111 00111111 10000000 (lsb to msb) + // 0b1111111, 0b11111110, 0b11111100, 0b1 (msb to lsb) + let expected = vec![0b1111111, 0b11111110, 0b11111100, 0b1]; + assert_eq!(expected, res); + } +} diff --git a/tfhe-zk-pok/src/proofs/pke_v2/mod.rs b/tfhe-zk-pok/src/proofs/pke_v2/mod.rs index 47f8b9a373..62f0d0230d 100644 --- a/tfhe-zk-pok/src/proofs/pke_v2/mod.rs +++ b/tfhe-zk-pok/src/proofs/pke_v2/mod.rs @@ -20,7 +20,7 @@ mod hashes; use hashes::RHash; -pub use hashes::PkeV2HashMode; +pub use hashes::*; fn bit_iter(x: u64, nbits: u32) -> impl Iterator { (0..nbits).map(move |idx| ((x >> idx) & 1) != 0) @@ -370,7 +370,7 @@ pub struct Proof { pub(crate) pi: G::G1, pub(crate) pi_kzg: G::G1, pub(crate) compute_load_proof_fields: Option>, - pub(crate) hash_mode: PkeV2HashMode, + pub(crate) hash_config: PkeV2SupportedHashConfig, } impl Proof { @@ -393,7 +393,7 @@ impl Proof { pi, pi_kzg, ref compute_load_proof_fields, - hash_mode: _, + hash_config: _, } = self; C_hat_e.validate_projective() @@ -421,8 +421,8 @@ impl Proof { } } - pub fn hash_mode(&self) -> PkeV2HashMode { - self.hash_mode + pub fn hash_config(&self) -> PkeV2SupportedHashConfig { + self.hash_config } } @@ -471,7 +471,7 @@ where pub(crate) pi: CompressedG1, pub(crate) pi_kzg: CompressedG1, pub(crate) compute_load_proof_fields: Option>, - pub(crate) hash_mode: PkeV2HashMode, + pub(crate) hash_config: PkeV2SupportedHashConfig, } #[derive(Serialize, Deserialize, Versionize)] @@ -512,7 +512,7 @@ where pi, pi_kzg, compute_load_proof_fields, - hash_mode, + hash_config, } = self; CompressedProof { @@ -534,7 +534,7 @@ where C_hat_w: C_hat_w.compress(), }, ), - hash_mode: *hash_mode, + hash_config: *hash_config, } } @@ -552,7 +552,7 @@ where pi, pi_kzg, compute_load_proof_fields, - hash_mode, + hash_config, } = compressed; Ok(Proof { @@ -580,7 +580,7 @@ where } else { None }, - hash_mode, + hash_config, }) } } @@ -831,7 +831,7 @@ pub fn prove( metadata, load, seed, - PkeV2HashMode::Compact, + PkeV2SupportedHashConfig::default(), ProofSanityCheckMode::Panic, ) } @@ -842,7 +842,7 @@ fn prove_impl( metadata: &[u8], load: ComputeLoad, seed: &[u8], - hash_mode: PkeV2HashMode, + hash_config: PkeV2SupportedHashConfig, sanity_check_mode: ProofSanityCheckMode, ) -> Proof { _ = load; @@ -864,6 +864,9 @@ fn prove_impl( }, PublicCommit { a, b, c1, c2, .. }, ) = public; + let stored_hash_config = hash_config; + let hash_config = hash_config.into(); + let g_list = &*g_lists.g_list.0; let g_hat_list = &*g_lists.g_hat_list.0; @@ -987,7 +990,7 @@ fn prove_impl( C_hat_e_bytes.as_ref(), C_e_bytes.as_ref(), C_r_tilde_bytes.as_ref(), - hash_mode, + hash_config, ); let R = |i: usize, j: usize| R[i + j * 128]; @@ -1606,7 +1609,7 @@ fn prove_impl( pi, pi_kzg, compute_load_proof_fields, - hash_mode, + hash_config: stored_hash_config, } } @@ -1743,8 +1746,9 @@ pub fn verify_impl( pi, pi_kzg, ref compute_load_proof_fields, - hash_mode, + hash_config, } = proof; + let hash_config = hash_config.into(); let pairing = G::Gt::pairing; @@ -1811,7 +1815,7 @@ pub fn verify_impl( C_hat_e_bytes.as_ref(), C_e_bytes.as_ref(), C_r_tilde_bytes.as_ref(), - hash_mode, + hash_config, ); let R = |i: usize, j: usize| R[i + j * 128]; @@ -2342,14 +2346,17 @@ mod tests { ); for load in [ComputeLoad::Proof, ComputeLoad::Verify] { - for hash_mode in [PkeV2HashMode::BackwardCompat, PkeV2HashMode::Classical] { + for hash_config in [ + PkeV2SupportedHashConfig::V0_4_0, + PkeV2SupportedHashConfig::V0_7_0, + ] { let proof = prove_impl( (&public_param, &public_commit), &private_commit, &testcase.metadata, load, &seed.to_le_bytes(), - hash_mode, + hash_config, ProofSanityCheckMode::Panic, ); @@ -2378,7 +2385,7 @@ mod tests { crs: &PublicParams, load: ComputeLoad, seed: &[u8], - hash_mode: PkeV2HashMode, + hash_config: PkeV2SupportedHashConfig, sanity_check_mode: ProofSanityCheckMode, ) -> VerificationResult { let (public_commit, private_commit) = commit( @@ -2399,7 +2406,7 @@ mod tests { &testcase.metadata, load, seed, - hash_mode, + hash_config, sanity_check_mode, ); @@ -2417,15 +2424,23 @@ mod tests { testcase_name: &str, crs: &PublicParams, seed: &[u8], - hash_mode: PkeV2HashMode, + hash_config: PkeV2SupportedHashConfig, sanity_check_mode: ProofSanityCheckMode, expected_result: VerificationResult, ) { for load in [ComputeLoad::Proof, ComputeLoad::Verify] { assert_eq!( - prove_and_verify(testcase, ct, crs, load, seed, hash_mode, sanity_check_mode), + prove_and_verify( + testcase, + ct, + crs, + load, + seed, + hash_config, + sanity_check_mode + ), expected_result, - "Testcase {testcase_name} {hash_mode:?} hash with load {load} failed" + "Testcase {testcase_name} {hash_config:?} hash with load {load} failed" ) } } @@ -2691,7 +2706,7 @@ mod tests { &format!("{name}_crs"), &crs, &seed.to_le_bytes(), - PkeV2HashMode::Compact, + PkeV2SupportedHashConfig::default(), ProofSanityCheckMode::Ignore, expected_result, ); @@ -2701,7 +2716,7 @@ mod tests { &format!("{name}_crs_max_k"), &crs_max_k, &seed.to_le_bytes(), - PkeV2HashMode::Compact, + PkeV2SupportedHashConfig::default(), ProofSanityCheckMode::Ignore, expected_result, ); @@ -2795,7 +2810,7 @@ mod tests { test_name, &public_param, &seed.to_le_bytes(), - PkeV2HashMode::Compact, + PkeV2SupportedHashConfig::default(), ProofSanityCheckMode::Panic, VerificationResult::Reject, ); @@ -3056,7 +3071,7 @@ mod tests { "testcase_bad_delta", &crs, &seed.to_le_bytes(), - PkeV2HashMode::Compact, + PkeV2SupportedHashConfig::default(), ProofSanityCheckMode::Panic, VerificationResult::Reject, ); @@ -3098,7 +3113,7 @@ mod tests { &format!("testcase_big_params_{bound:?}"), &crs, &seed.to_le_bytes(), - PkeV2HashMode::Compact, + PkeV2SupportedHashConfig::default(), ProofSanityCheckMode::Panic, VerificationResult::Accept, ); diff --git a/tfhe/src/integer/ciphertext/compact_list.rs b/tfhe/src/integer/ciphertext/compact_list.rs index 26a8650bbb..232b685c1a 100644 --- a/tfhe/src/integer/ciphertext/compact_list.rs +++ b/tfhe/src/integer/ciphertext/compact_list.rs @@ -24,7 +24,7 @@ use crate::shortint::server_key::LookupTableOwned; use crate::shortint::{CarryModulus, Ciphertext, MessageModulus}; #[cfg(feature = "zk-pok")] use crate::zk::{ - CompactPkeCrs, CompactPkeProofConformanceParams, ZkComputeLoad, ZkPkeV2HashMode, + CompactPkeCrs, CompactPkeProofConformanceParams, ZkComputeLoad, ZkPkeV2SupportedHashConfig, ZkVerificationOutcome, }; use std::num::NonZero; @@ -1147,13 +1147,13 @@ impl IntegerProvenCompactCiphertextListConformanceParams { } } - /// Forbid proofs coming with the provided [`ZkPkeV2HashMode`]. This has no effect on PkeV1 - /// proofs - pub fn forbid_hash_mode(self, forbidden_hash_mode: ZkPkeV2HashMode) -> Self { + /// Forbid proofs coming with the provided [`ZkPkeV2SupportedHashConfig`]. This has no effect on + /// PkeV1 proofs + pub fn forbid_hash_config(self, forbidden_hash_config: ZkPkeV2SupportedHashConfig) -> Self { Self { zk_conformance_params: self .zk_conformance_params - .forbid_hash_mode(forbidden_hash_mode), + .forbid_hash_config(forbidden_hash_config), ..self } } diff --git a/tfhe/src/shortint/ciphertext/zk.rs b/tfhe/src/shortint/ciphertext/zk.rs index e503d145f1..6d64c166e1 100644 --- a/tfhe/src/shortint/ciphertext/zk.rs +++ b/tfhe/src/shortint/ciphertext/zk.rs @@ -12,7 +12,7 @@ use crate::shortint::parameters::{ use crate::shortint::{Ciphertext, CompactPublicKey}; use crate::zk::{ CompactPkeCrs, CompactPkeProof, CompactPkeProofConformanceParams, ZkComputeLoad, - ZkMSBZeroPaddingBitCount, ZkPkeV2HashMode, ZkVerificationOutcome, + ZkMSBZeroPaddingBitCount, ZkPkeV2SupportedHashConfig, ZkVerificationOutcome, }; use rayon::prelude::*; @@ -240,13 +240,13 @@ impl ProvenCompactCiphertextListConformanceParams { } } - /// Forbid proofs coming with the provided [`ZkPkeV2HashMode`]. This has no effect on PkeV1 - /// proofs - pub fn forbid_hash_mode(self, forbidden_hash_mode: ZkPkeV2HashMode) -> Self { + /// Forbid proofs coming with the provided [`ZkPkeV2SupportedHashConfig`]. This has no effect on + /// PkeV1 proofs + pub fn forbid_hash_config(self, forbidden_hash_config: ZkPkeV2SupportedHashConfig) -> Self { Self { zk_conformance_params: self .zk_conformance_params - .forbid_hash_mode(forbidden_hash_mode), + .forbid_hash_config(forbidden_hash_config), ..self } } @@ -328,7 +328,7 @@ mod tests { ClientKey, CompactPrivateKey, CompactPublicKey, KeySwitchingKey, ServerKey, }; use crate::zk::{ - CompactPkeCrs, CompactPkeProofConformanceParams, ZkComputeLoad, ZkPkeV2HashMode, + CompactPkeCrs, CompactPkeProofConformanceParams, ZkComputeLoad, ZkPkeV2SupportedHashConfig, }; use rand::random; @@ -498,10 +498,9 @@ mod tests { assert!(!proven_ct.is_conformant(&no_cl_verif_conformance_params)); - // By default, zk proofs use compact hash mode. - let no_compact_hash_conformance_params = - conformance_params.forbid_hash_mode(ZkPkeV2HashMode::Compact); + let no_default_hash_config_conformance_params = + conformance_params.forbid_hash_config(ZkPkeV2SupportedHashConfig::default()); - assert!(!proven_ct.is_conformant(&no_compact_hash_conformance_params)); + assert!(!proven_ct.is_conformant(&no_default_hash_config_conformance_params)); } } diff --git a/tfhe/src/zk/mod.rs b/tfhe/src/zk/mod.rs index 0e50e8eb74..9815a676f9 100644 --- a/tfhe/src/zk/mod.rs +++ b/tfhe/src/zk/mod.rs @@ -22,11 +22,11 @@ use tfhe_zk_pok::proofs::pke::{ }; use tfhe_zk_pok::proofs::pke_v2::{ commit as commit_v2, crs_gen as crs_gen_v2, prove as prove_v2, verify as verify_v2, - PkeV2HashMode, Proof as ProofV2, PublicCommit as PublicCommitV2, + PkeV2SupportedHashConfig, Proof as ProofV2, PublicCommit as PublicCommitV2, }; pub use tfhe_zk_pok::curve_api::Compressible; -pub use tfhe_zk_pok::proofs::pke_v2::PkeV2HashMode as ZkPkeV2HashMode; +pub use tfhe_zk_pok::proofs::pke_v2::PkeV2SupportedHashConfig as ZkPkeV2SupportedHashConfig; pub use tfhe_zk_pok::proofs::ComputeLoad as ZkComputeLoad; type Curve = tfhe_zk_pok::curve_api::Bls12_446; @@ -48,7 +48,7 @@ impl CastInto for ZkComputeLoad { } } -impl CastInto for PkeV2HashMode { +impl CastInto for ZkPkeV2SupportedHashConfig { fn cast_into(self) -> usize { self as usize } @@ -104,7 +104,7 @@ impl ParameterSetConformant for ProofV1 { /// Used to explicitly reject [`ProofV2`] proofs that come with specific config pub struct CompactPkeV2ProofConformanceParams { accepted_compute_load: EnumSet, - accepted_hash_mode: EnumSet, + accepted_hash_config: EnumSet, } impl Default for CompactPkeV2ProofConformanceParams { @@ -120,14 +120,14 @@ impl CompactPkeV2ProofConformanceParams { accepted_compute_load.insert(ZkComputeLoad::Proof); accepted_compute_load.insert(ZkComputeLoad::Verify); - let mut accepted_hash_mode = EnumSet::new(); - accepted_hash_mode.insert(PkeV2HashMode::BackwardCompat); - accepted_hash_mode.insert(PkeV2HashMode::Classical); - accepted_hash_mode.insert(PkeV2HashMode::Compact); + let mut accepted_hash_config = EnumSet::new(); + accepted_hash_config.insert(PkeV2SupportedHashConfig::V0_4_0); + accepted_hash_config.insert(PkeV2SupportedHashConfig::V0_7_0); + accepted_hash_config.insert(PkeV2SupportedHashConfig::V0_8_0); Self { accepted_compute_load, - accepted_hash_mode, + accepted_hash_config, } } @@ -138,18 +138,18 @@ impl CompactPkeV2ProofConformanceParams { Self { accepted_compute_load, - accepted_hash_mode: self.accepted_hash_mode, + ..self } } - /// Forbid proofs coming with the provided [`ZkPkeV2HashMode`] - pub fn forbid_hash_mode(self, forbidden_hash_mode: ZkPkeV2HashMode) -> Self { - let mut accepted_hash_mode = self.accepted_hash_mode; - accepted_hash_mode.remove(forbidden_hash_mode); + /// Forbid proofs coming with the provided [`ZkPkeV2SupportedHashConfig`] + pub fn forbid_hash_config(self, forbidden_hash_config: ZkPkeV2SupportedHashConfig) -> Self { + let mut accepted_hash_config = self.accepted_hash_config; + accepted_hash_config.remove(forbidden_hash_config); Self { - accepted_compute_load: self.accepted_compute_load, - accepted_hash_mode, + accepted_hash_config, + ..self } } } @@ -161,7 +161,9 @@ impl ParameterSetConformant for ProofV2 { parameter_set .accepted_compute_load .contains(self.compute_load()) - && parameter_set.accepted_hash_mode.contains(self.hash_mode()) + && parameter_set + .accepted_hash_config + .contains(self.hash_config()) && self.is_usable() } } @@ -192,13 +194,13 @@ impl CompactPkeProofConformanceParams { } } - /// Forbid proofs coming with the provided [`ZkPkeV2HashMode`]. This has no effect on PkeV1 - /// proofs - pub fn forbid_hash_mode(self, forbidden_hash_mode: ZkPkeV2HashMode) -> Self { + /// Forbid proofs coming with the provided [`ZkPkeV2SupportedHashConfig`]. This has no effect on + /// PkeV1 proofs + pub fn forbid_hash_config(self, forbidden_hash_config: ZkPkeV2SupportedHashConfig) -> Self { match self { // There is no hash mode to configure in PkeV1 Self::PkeV1(params) => Self::PkeV1(params), - Self::PkeV2(params) => Self::PkeV2(params.forbid_hash_mode(forbidden_hash_mode)), + Self::PkeV2(params) => Self::PkeV2(params.forbid_hash_config(forbidden_hash_config)), } } } From 249b2075f5afa2ed9069b28372943aaa39cd6d0f Mon Sep 17 00:00:00 2001 From: Nicolas Sarlin Date: Mon, 27 Oct 2025 17:50:54 +0100 Subject: [PATCH 3/4] chore(backward): add data for the new zk proof --- .../crates/generate_1_5/src/lib.rs | 127 +++++++-- .../crates/generate_1_5/src/utils.rs | 252 +++++++++++++++--- .../1_5/high_level_api/client_key_for_zk.cbor | 3 + .../hl_proven_list_zkv2_1_5.bcode | 3 + .../hl_proven_list_zkv2_1_5.cbor | 3 + .../data/1_5/high_level_api/public_key.cbor | 3 + .../data/1_5/high_level_api/zk_pke_crs.cbor | 3 + .../data/high_level_api.ron | 20 ++ 8 files changed, 354 insertions(+), 60 deletions(-) create mode 100644 utils/tfhe-backward-compat-data/data/1_5/high_level_api/client_key_for_zk.cbor create mode 100644 utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.bcode create mode 100644 utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.cbor create mode 100644 utils/tfhe-backward-compat-data/data/1_5/high_level_api/public_key.cbor create mode 100644 utils/tfhe-backward-compat-data/data/1_5/high_level_api/zk_pke_crs.cbor diff --git a/utils/tfhe-backward-compat-data/crates/generate_1_5/src/lib.rs b/utils/tfhe-backward-compat-data/crates/generate_1_5/src/lib.rs index 1156200cb0..d535f06375 100644 --- a/utils/tfhe-backward-compat-data/crates/generate_1_5/src/lib.rs +++ b/utils/tfhe-backward-compat-data/crates/generate_1_5/src/lib.rs @@ -7,7 +7,11 @@ use tfhe::boolean::engine::BooleanEngine; use tfhe::core_crypto::commons::generators::DeterministicSeeder; use tfhe::core_crypto::prelude::DefaultRandomGenerator; use tfhe::shortint::engine::ShortintEngine; -use tfhe::{CompressedServerKey, Seed}; +use tfhe::zk::{CompactPkeCrs, ZkComputeLoad}; +use tfhe::{ + ClientKey, CompactPublicKey, CompressedServerKey, ProvenCompactCiphertextList, Seed, ServerKey, + set_server_key, +}; use tfhe_backward_compat_data::generate::*; use tfhe_backward_compat_data::*; use utils::*; @@ -30,6 +34,23 @@ const HL_SERVERKEY_WITH_COMPRESSION_TEST: HlServerKeyTest = HlServerKeyTest { rerand_cpk_filename: None, compressed: false, }; + +// We have a proven list generated for 0.11, but since this version the hash modes have evolved so +// we re generate one +const HL_PROVEN_COMPACTLIST_TEST_ZKV2: HlHeterogeneousCiphertextListTest = + HlHeterogeneousCiphertextListTest { + test_filename: Cow::Borrowed("hl_proven_list_zkv2_1_5"), + key_filename: Cow::Borrowed("client_key_for_zk"), + clear_values: Cow::Borrowed(&[17u8 as u64]), + data_kinds: Cow::Borrowed(&[DataKind::Unsigned]), + compressed: false, + proof_info: Some(PkeZkProofAuxiliaryInfo { + public_key_filename: Cow::Borrowed("public_key"), + params_filename: Cow::Borrowed("zk_pke_crs"), + metadata: Cow::Borrowed("2vdrawkcab"), + }), + }; + pub struct V1_5; impl TfhersVersion for V1_5 { @@ -54,29 +75,97 @@ impl TfhersVersion for V1_5 { let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME); create_dir_all(&dir).unwrap(); - let config = - tfhe::ConfigBuilder::with_custom_parameters(HL_CLIENTKEY_TEST.parameters.convert()) - .enable_compression(INSECURE_TEST_PARAMS_TUNIFORM_COMPRESSION_MULTIBIT.convert()) - .build(); - let (hl_client_key, hl_server_key) = tfhe::generate_keys(config); - let compressed_server_key = CompressedServerKey::new(&hl_client_key); - - store_versioned_test(&hl_client_key, &dir, &HL_CLIENTKEY_TEST.test_filename); - store_versioned_test( - &compressed_server_key, - &dir, - &HL_COMPRESSED_SERVERKEY_TEST.test_filename, - ); - store_versioned_test( - &hl_server_key, - &dir, - &HL_SERVERKEY_WITH_COMPRESSION_TEST.test_filename, - ); + { + let config = + tfhe::ConfigBuilder::with_custom_parameters(HL_CLIENTKEY_TEST.parameters.convert()) + .enable_compression( + INSECURE_TEST_PARAMS_TUNIFORM_COMPRESSION_MULTIBIT.convert(), + ) + .build(); + let (hl_client_key, hl_server_key) = tfhe::generate_keys(config); + let compressed_server_key = CompressedServerKey::new(&hl_client_key); + + store_versioned_test(&hl_client_key, &dir, &HL_CLIENTKEY_TEST.test_filename); + store_versioned_test( + &compressed_server_key, + &dir, + &HL_COMPRESSED_SERVERKEY_TEST.test_filename, + ); + store_versioned_test( + &hl_server_key, + &dir, + &HL_SERVERKEY_WITH_COMPRESSION_TEST.test_filename, + ); + } + + // Generate a zk proof with the new hash modes + { + let config = tfhe::ConfigBuilder::with_custom_parameters( + INSECURE_SMALL_TEST_PARAMS_KS32.convert(), + ) + .use_dedicated_compact_public_key_parameters(( + INSECURE_DEDICATED_CPK_TEST_PARAMS.convert(), + KS_TO_SMALL_TEST_PARAMS.convert(), + )) + .build(); + let hl_client_key = ClientKey::generate(config); + let hl_server_key = ServerKey::new(&hl_client_key); + set_server_key(hl_server_key.clone()); + let compact_pub_key = CompactPublicKey::new(&hl_client_key); + let crs = CompactPkeCrs::from_config(config, 64).unwrap(); + + store_versioned_auxiliary( + &crs, + &dir, + &HL_PROVEN_COMPACTLIST_TEST_ZKV2 + .proof_info + .unwrap() + .params_filename, + ); + + // Store the associated client key to be able to decrypt the ciphertexts in the list + store_versioned_auxiliary( + &hl_client_key, + &dir, + &HL_PROVEN_COMPACTLIST_TEST_ZKV2.key_filename, + ); + + store_versioned_auxiliary( + &compact_pub_key, + &dir, + &HL_PROVEN_COMPACTLIST_TEST_ZKV2 + .proof_info + .unwrap() + .public_key_filename, + ); + + let mut proven_builder = ProvenCompactCiphertextList::builder(&compact_pub_key); + proven_builder.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[0] as u8); + + let proven_list_packed = proven_builder + .build_with_proof_packed( + &crs, + HL_PROVEN_COMPACTLIST_TEST_ZKV2 + .proof_info + .unwrap() + .metadata + .as_bytes(), + ZkComputeLoad::Proof, + ) + .unwrap(); + + store_versioned_test( + &proven_list_packed, + &dir, + &HL_PROVEN_COMPACTLIST_TEST_ZKV2.test_filename, + ); + } vec![ TestMetadata::HlClientKey(HL_CLIENTKEY_TEST), TestMetadata::HlServerKey(HL_COMPRESSED_SERVERKEY_TEST), TestMetadata::HlServerKey(HL_SERVERKEY_WITH_COMPRESSION_TEST), + TestMetadata::HlHeterogeneousCiphertextList(HL_PROVEN_COMPACTLIST_TEST_ZKV2), ] } } diff --git a/utils/tfhe-backward-compat-data/crates/generate_1_5/src/utils.rs b/utils/tfhe-backward-compat-data/crates/generate_1_5/src/utils.rs index 41c9334124..265d2cfc05 100644 --- a/utils/tfhe-backward-compat-data/crates/generate_1_5/src/utils.rs +++ b/utils/tfhe-backward-compat-data/crates/generate_1_5/src/utils.rs @@ -2,12 +2,13 @@ use std::path::Path; use tfhe::core_crypto::prelude::{ CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger, }; +use tfhe::shortint::MultiBitPBSParameters; use tfhe::shortint::parameters::list_compression::{ ClassicCompressionParameters, MultiBitCompressionParameters, }; +use tfhe::shortint::parameters::noise_squashing::NoiseSquashingMultiBitParameters; use tfhe::shortint::parameters::*; use tfhe::shortint::prelude::ModulusSwitchType; -use tfhe::shortint::{MultiBitPBSParameters, PBSParameters}; use tfhe_backward_compat_data::generate::*; use tfhe_backward_compat_data::*; use tfhe_versionable::Versionize; @@ -53,6 +54,40 @@ where } } +impl ConvertParams for TestModulusSwitchNoiseReductionParams { + fn convert(self) -> ModulusSwitchNoiseReductionParams { + let TestModulusSwitchNoiseReductionParams { + modulus_switch_zeros_count, + ms_bound, + ms_r_sigma_factor, + ms_input_variance, + } = self; + + ModulusSwitchNoiseReductionParams { + modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count), + ms_bound: NoiseEstimationMeasureBound(ms_bound), + ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor), + ms_input_variance: Variance(ms_input_variance), + } + } +} + +impl ConvertParams for TestModulusSwitchType { + fn convert(self) -> ModulusSwitchType { + match self { + TestModulusSwitchType::Standard => ModulusSwitchType::Standard, + TestModulusSwitchType::DriftTechniqueNoiseReduction( + test_modulus_switch_noise_reduction_params, + ) => ModulusSwitchType::DriftTechniqueNoiseReduction( + test_modulus_switch_noise_reduction_params.convert(), + ), + TestModulusSwitchType::CenteredMeanNoiseReduction => { + ModulusSwitchType::CenteredMeanNoiseReduction + } + } + } +} + impl ConvertParams for TestClassicParameterSet { fn convert(self) -> ClassicPBSParameters { let TestClassicParameterSet { @@ -101,40 +136,6 @@ impl ConvertParams for TestClassicParameterSet { } } -impl ConvertParams for TestModulusSwitchNoiseReductionParams { - fn convert(self) -> ModulusSwitchNoiseReductionParams { - let TestModulusSwitchNoiseReductionParams { - modulus_switch_zeros_count, - ms_bound, - ms_r_sigma_factor, - ms_input_variance, - } = self; - - ModulusSwitchNoiseReductionParams { - modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count), - ms_bound: NoiseEstimationMeasureBound(ms_bound), - ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor), - ms_input_variance: Variance(ms_input_variance), - } - } -} - -impl ConvertParams for TestModulusSwitchType { - fn convert(self) -> ModulusSwitchType { - match self { - TestModulusSwitchType::Standard => ModulusSwitchType::Standard, - TestModulusSwitchType::DriftTechniqueNoiseReduction( - test_modulus_switch_noise_reduction_params, - ) => ModulusSwitchType::DriftTechniqueNoiseReduction( - test_modulus_switch_noise_reduction_params.convert(), - ), - TestModulusSwitchType::CenteredMeanNoiseReduction => { - ModulusSwitchType::CenteredMeanNoiseReduction - } - } - } -} - impl ConvertParams for TestMultiBitParameterSet { fn convert(self) -> MultiBitPBSParameters { let TestMultiBitParameterSet { @@ -184,17 +185,62 @@ impl ConvertParams for TestMultiBitParameterSet { } } -impl ConvertParams for TestParameterSet { - fn convert(self) -> PBSParameters { +impl ConvertParams for TestKS32ParameterSet { + fn convert(self) -> KeySwitch32PBSParameters { + let TestKS32ParameterSet { + lwe_dimension, + glwe_dimension, + polynomial_size, + lwe_noise_distribution, + glwe_noise_distribution, + pbs_base_log, + pbs_level, + ks_base_log, + ks_level, + message_modulus, + ciphertext_modulus, + carry_modulus, + max_noise_level, + log2_p_fail, + modulus_switch_noise_reduction_params, + post_keyswitch_ciphertext_modulus, + } = self; + + KeySwitch32PBSParameters { + lwe_dimension: LweDimension(lwe_dimension), + glwe_dimension: GlweDimension(glwe_dimension), + polynomial_size: PolynomialSize(polynomial_size), + lwe_noise_distribution: lwe_noise_distribution.convert(), + glwe_noise_distribution: glwe_noise_distribution.convert(), + pbs_base_log: DecompositionBaseLog(pbs_base_log), + pbs_level: DecompositionLevelCount(pbs_level), + ks_base_log: DecompositionBaseLog(ks_base_log), + ks_level: DecompositionLevelCount(ks_level), + message_modulus: MessageModulus(message_modulus as u64), + carry_modulus: CarryModulus(carry_modulus as u64), + max_noise_level: MaxNoiseLevel::new(max_noise_level as u64), + log2_p_fail, + post_keyswitch_ciphertext_modulus: CiphertextModulus32::try_new( + post_keyswitch_ciphertext_modulus, + ) + .unwrap(), + ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(), + modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(), + } + } +} + +impl ConvertParams for TestParameterSet { + fn convert(self) -> AtomicPatternParameters { match self { TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => { - PBSParameters::PBS(test_classic_parameter_set.convert()) + AtomicPatternParameters::Standard(test_classic_parameter_set.convert().into()) } TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => { - PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.convert()) + AtomicPatternParameters::Standard(test_parameter_set_multi_bit.convert().into()) } - TestParameterSet::TestKS32ParameterSet(_) => { - panic!("unsupported ks32 parameters for version") + TestParameterSet::TestKS32ParameterSet(test_parameter_set_ks32) => { + AtomicPatternParameters::KeySwitch32(test_parameter_set_ks32.convert()) } } } @@ -246,3 +292,127 @@ impl ConvertParams for TestCompressionParameterSet { } } } + +impl ConvertParams for TestNoiseSquashingParams { + fn convert(self) -> NoiseSquashingParameters { + let TestNoiseSquashingParams { + glwe_dimension, + polynomial_size, + glwe_noise_distribution, + decomp_base_log, + decomp_level_count, + modulus_switch_noise_reduction_params, + message_modulus, + carry_modulus, + ciphertext_modulus, + } = self; + + NoiseSquashingParameters::Classic(NoiseSquashingClassicParameters { + glwe_dimension: GlweDimension(glwe_dimension), + polynomial_size: PolynomialSize(polynomial_size), + glwe_noise_distribution: glwe_noise_distribution.convert(), + decomp_base_log: DecompositionBaseLog(decomp_base_log), + decomp_level_count: DecompositionLevelCount(decomp_level_count), + modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(), + message_modulus: MessageModulus(message_modulus as u64), + carry_modulus: CarryModulus(carry_modulus as u64), + ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(), + }) + } +} + +impl ConvertParams for TestNoiseSquashingParamsMultiBit { + fn convert(self) -> NoiseSquashingParameters { + let TestNoiseSquashingParamsMultiBit { + glwe_dimension, + polynomial_size, + glwe_noise_distribution, + decomp_base_log, + decomp_level_count, + grouping_factor, + message_modulus, + carry_modulus, + ciphertext_modulus, + } = self; + + NoiseSquashingParameters::MultiBit(NoiseSquashingMultiBitParameters { + glwe_dimension: GlweDimension(glwe_dimension), + polynomial_size: PolynomialSize(polynomial_size), + glwe_noise_distribution: glwe_noise_distribution.convert(), + decomp_base_log: DecompositionBaseLog(decomp_base_log), + decomp_level_count: DecompositionLevelCount(decomp_level_count), + grouping_factor: LweBskGroupingFactor(grouping_factor), + message_modulus: MessageModulus(message_modulus as u64), + carry_modulus: CarryModulus(carry_modulus as u64), + ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(), + deterministic_execution: false, + }) + } +} + +impl ConvertParams for TestKeySwitchingParams { + fn convert(self) -> ShortintKeySwitchingParameters { + ShortintKeySwitchingParameters { + ks_level: DecompositionLevelCount(self.ks_level), + ks_base_log: DecompositionBaseLog(self.ks_base_log), + destination_key: match &*self.destination_key { + "big" => EncryptionKeyChoice::Big, + "small" => EncryptionKeyChoice::Small, + _ => panic!("Invalid encryption key choice"), + }, + } + } +} + +impl ConvertParams + for TestCompactPublicKeyEncryptionParameters +{ + fn convert(self) -> CompactPublicKeyEncryptionParameters { + CompactPublicKeyEncryptionParameters { + encryption_lwe_dimension: LweDimension(self.encryption_lwe_dimension), + encryption_noise_distribution: self.encryption_noise_distribution.convert(), + message_modulus: MessageModulus(self.message_modulus as u64), + carry_modulus: CarryModulus(self.carry_modulus as u64), + ciphertext_modulus: CoreCiphertextModulus::try_new(self.ciphertext_modulus).unwrap(), + expansion_kind: match &*self.expansion_kind { + "requires_casting" => CompactCiphertextListExpansionKind::RequiresCasting, + _ => panic!("Invalid expansion kind"), + }, + zk_scheme: match &*self.zk_scheme { + "zkv1" => SupportedCompactPkeZkScheme::V1, + "zkv2" => SupportedCompactPkeZkScheme::V2, + _ => panic!("Invalid zk scheme"), + }, + } + } +} + +impl ConvertParams + for TestNoiseSquashingCompressionParameters +{ + fn convert(self) -> NoiseSquashingCompressionParameters { + let TestNoiseSquashingCompressionParameters { + packing_ks_level, + packing_ks_base_log, + packing_ks_polynomial_size, + packing_ks_glwe_dimension, + lwe_per_glwe, + packing_ks_key_noise_distribution, + message_modulus, + carry_modulus, + ciphertext_modulus, + } = self; + + NoiseSquashingCompressionParameters { + packing_ks_level: DecompositionLevelCount(packing_ks_level), + packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log), + packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size), + packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension), + lwe_per_glwe: LweCiphertextCount(lwe_per_glwe), + packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(), + message_modulus: MessageModulus(message_modulus as u64), + carry_modulus: CarryModulus(carry_modulus as u64), + ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(), + } + } +} diff --git a/utils/tfhe-backward-compat-data/data/1_5/high_level_api/client_key_for_zk.cbor b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/client_key_for_zk.cbor new file mode 100644 index 0000000000..3fd0d3fa91 --- /dev/null +++ b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/client_key_for_zk.cbor @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87dbbf9c40468c4a86083be5e0c8775bb3d3493d43b0a60f91510e57b20739e7 +size 3286 diff --git a/utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.bcode b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.bcode new file mode 100644 index 0000000000..81b5817906 --- /dev/null +++ b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.bcode @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b55dacf133a9872eda68be150cd314476e6e92177701a934139d17009983f66 +size 3057 diff --git a/utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.cbor b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.cbor new file mode 100644 index 0000000000..c663f9ebf7 --- /dev/null +++ b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/hl_proven_list_zkv2_1_5.cbor @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b60e83254a0f7449647741711e019a73743fd94b550b2188bedaeb3121e773a7 +size 3780 diff --git a/utils/tfhe-backward-compat-data/data/1_5/high_level_api/public_key.cbor b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/public_key.cbor new file mode 100644 index 0000000000..25aeb3b5f9 --- /dev/null +++ b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/public_key.cbor @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:419d5ad2af241abde2fdd31d81f505cb6c39fc3ac7c0de22cfdbac9618c81d73 +size 990 diff --git a/utils/tfhe-backward-compat-data/data/1_5/high_level_api/zk_pke_crs.cbor b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/zk_pke_crs.cbor new file mode 100644 index 0000000000..768230449d --- /dev/null +++ b/utils/tfhe-backward-compat-data/data/1_5/high_level_api/zk_pke_crs.cbor @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a1e2db119c439f3c7093e25925aa9796e5f30edb2a934ef7d2af79731f3e9271 +size 4385130 diff --git a/utils/tfhe-backward-compat-data/data/high_level_api.ron b/utils/tfhe-backward-compat-data/data/high_level_api.ron index 754245f132..fd8187c5d2 100644 --- a/utils/tfhe-backward-compat-data/data/high_level_api.ron +++ b/utils/tfhe-backward-compat-data/data/high_level_api.ron @@ -845,4 +845,24 @@ compressed: false, )), ), + ( + tfhe_version_min: "1.5", + tfhe_module: "high_level_api", + metadata: HlHeterogeneousCiphertextList(( + test_filename: "hl_proven_list_zkv2_1_5", + key_filename: "client_key_for_zk", + compressed: false, + proof_info: Some(( + public_key_filename: "public_key", + params_filename: "zk_pke_crs", + metadata: "2vdrawkcab", + )), + clear_values: [ + 17, + ], + data_kinds: [ + Unsigned, + ], + )), + ), ] \ No newline at end of file From d037581dc4c17a38056356406a5ce30c21e626e5 Mon Sep 17 00:00:00 2001 From: Nicolas Sarlin Date: Wed, 29 Oct 2025 17:01:10 +0100 Subject: [PATCH 4/4] chore: bump tfhe-versionable to 0.6.3 and tfhe-zk-pok to 0.8.0 --- tfhe-zk-pok/Cargo.toml | 4 ++-- tfhe/Cargo.toml | 4 ++-- utils/tfhe-versionable-derive/Cargo.toml | 2 +- utils/tfhe-versionable/Cargo.toml | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tfhe-zk-pok/Cargo.toml b/tfhe-zk-pok/Cargo.toml index 18b3f7b21d..b746523701 100644 --- a/tfhe-zk-pok/Cargo.toml +++ b/tfhe-zk-pok/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tfhe-zk-pok" -version = "0.7.3" +version = "0.8.0" edition = "2021" keywords = ["zero", "knowledge", "proof", "vector-commitments"] homepage = "https://zama.ai/" @@ -23,7 +23,7 @@ sha3 = "0.10.8" serde = { workspace = true, features = ["default", "derive"] } zeroize = "1.7.0" num-bigint = "0.4.5" -tfhe-versionable = { version = "0.6.2", path = "../utils/tfhe-versionable" } +tfhe-versionable = { version = "0.6.3", path = "../utils/tfhe-versionable" } [features] experimental = [] diff --git a/tfhe/Cargo.toml b/tfhe/Cargo.toml index dbc4c4a48d..857a84e354 100644 --- a/tfhe/Cargo.toml +++ b/tfhe/Cargo.toml @@ -77,8 +77,8 @@ blake3 = { version = "1.8", optional = true } itertools = { workspace = true } rand_core = { version = "0.6.4", features = ["std"] } strum = { version = "0.27", features = ["derive"], optional = true } -tfhe-zk-pok = { version = "0.7.3", path = "../tfhe-zk-pok", optional = true } -tfhe-versionable = { version = "0.6.2", path = "../utils/tfhe-versionable" } +tfhe-zk-pok = { version = "0.8.0", path = "../tfhe-zk-pok", optional = true } +tfhe-versionable = { version = "0.6.3", path = "../utils/tfhe-versionable" } # wasm deps wasm-bindgen = { workspace = true, features = [ diff --git a/utils/tfhe-versionable-derive/Cargo.toml b/utils/tfhe-versionable-derive/Cargo.toml index 4af6755498..e7eb3bea5c 100644 --- a/utils/tfhe-versionable-derive/Cargo.toml +++ b/utils/tfhe-versionable-derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tfhe-versionable-derive" -version = "0.6.2" +version = "0.6.3" edition = "2021" keywords = ["versioning", "serialization", "encoding", "proc-macro", "derive"] homepage = "https://zama.ai/" diff --git a/utils/tfhe-versionable/Cargo.toml b/utils/tfhe-versionable/Cargo.toml index 30bdcaf0e4..b7839676a4 100644 --- a/utils/tfhe-versionable/Cargo.toml +++ b/utils/tfhe-versionable/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tfhe-versionable" -version = "0.6.2" +version = "0.6.3" edition = "2021" keywords = ["versioning", "serialization", "encoding"] homepage = "https://zama.ai/" @@ -26,7 +26,7 @@ toml = "0.8" [dependencies] serde = { workspace = true, features = ["default", "derive"] } -tfhe-versionable-derive = { version = "0.6.2", path = "../tfhe-versionable-derive" } +tfhe-versionable-derive = { version = "0.6.3", path = "../tfhe-versionable-derive" } num-complex = { workspace = true, features = ["serde"] } aligned-vec = { workspace = true, features = ["default", "serde"] }