diff --git a/crypto/dleq/src/cross_group/mod.rs b/crypto/dleq/src/cross_group/mod.rs index 4a0ce530..ff28bca7 100644 --- a/crypto/dleq/src/cross_group/mod.rs +++ b/crypto/dleq/src/cross_group/mod.rs @@ -3,7 +3,7 @@ use rand_core::{RngCore, CryptoRng}; use digest::Digest; -use subtle::{Choice, ConditionallySelectable}; +use subtle::{ConstantTimeEq, ConditionallySelectable}; use transcript::Transcript; @@ -34,24 +34,168 @@ pub(crate) fn read_point(r: &mut R) -> std::io::Result { +pub struct Bits { commitments: (G0, G1), // Merged challenges have a slight security reduction, yet one already applied to the scalar // being proven for, and this saves ~8kb. Alternatively, challenges could be redefined as a seed, // present here, which is then hashed for each of the two challenges, remaining unbiased/unique // while maintaining the bandwidth savings, yet also while adding 252 hashes for // Secp256k1/Ed25519 - e: G0::Scalar, - s: [(G0::Scalar, G1::Scalar); 2] + e_0: G0::Scalar, + s: [(G0::Scalar, G1::Scalar); POSSIBLE_VALUES] } -impl Bit { +impl Bits + where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits { + pub fn transcript(transcript: &mut T, i: usize, commitments: (G0, G1)) { + if i == 0 { + transcript.domain_separate(b"cross_group_dleq"); + } + transcript.append_message(b"bit_group", &u16::try_from(i).unwrap().to_le_bytes()); + transcript.append_message(b"commitment_0", commitments.0.to_bytes().as_ref()); + transcript.append_message(b"commitment_1", commitments.1.to_bytes().as_ref()); + } + + #[allow(non_snake_case)] + fn nonces(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) { + transcript.append_message(b"nonce_0", nonces.0.to_bytes().as_ref()); + transcript.append_message(b"nonce_1", nonces.1.to_bytes().as_ref()); + mutual_scalar_from_bytes(transcript.challenge(b"challenge").as_ref()) + } + + #[allow(non_snake_case)] + fn R( + generators: (Generators, Generators), + s: (G0::Scalar, G1::Scalar), + A: (G0, G1), + e: (G0::Scalar, G1::Scalar) + ) -> (G0, G1) { + (((generators.0.alt * s.0) - (A.0 * e.0)), ((generators.1.alt * s.1) - (A.1 * e.1))) + } + + #[allow(non_snake_case)] + fn R_nonces( + transcript: T, + generators: (Generators, Generators), + s: (G0::Scalar, G1::Scalar), + A: (G0, G1), + e: (G0::Scalar, G1::Scalar) + ) -> (G0::Scalar, G1::Scalar) { + Self::nonces(transcript, Self::R(generators, s, A, e)) + } + + fn ring(pow_2: (G0, G1), commitments: (G0, G1)) -> [(G0, G1); POSSIBLE_VALUES] { + let mut res = [(G0::identity(), G1::identity()); POSSIBLE_VALUES]; + res[POSSIBLE_VALUES - 1] = commitments; + for i in (0 .. (POSSIBLE_VALUES - 1)).rev() { + res[i] = (res[i + 1].0 - pow_2.0, res[i + 1].1 - pow_2.1); + } + res + } + + pub fn prove( + rng: &mut R, + transcript: &mut T, + generators: (Generators, Generators), + i: usize, + pow_2: &mut (G0, G1), + bits: u8, + blinding_key: (G0::Scalar, G1::Scalar) + ) -> Bits { + // While it is possible to use larger values, it's not efficient to do so + // 2 + 2 == 2^2, yet 2 + 2 + 2 < 2^3 + debug_assert!((POSSIBLE_VALUES == 2) || (POSSIBLE_VALUES == 4)); + + let mut commitments = ( + (generators.0.alt * blinding_key.0), + (generators.1.alt * blinding_key.1) + ); + commitments.0 += pow_2.0 * G0::Scalar::from(bits.into()); + commitments.1 += pow_2.1 * G1::Scalar::from(bits.into()); + Self::transcript(transcript, i, commitments); + + let ring = Self::ring(*pow_2, commitments); + // Invert the index to get the raw blinding key's position in the ring + let actual = POSSIBLE_VALUES - 1 - usize::from(bits); + + let mut e_0 = G0::Scalar::zero(); + let mut s = [(G0::Scalar::zero(), G1::Scalar::zero()); POSSIBLE_VALUES]; + + let r = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng)); + #[allow(non_snake_case)] + let original_R = (generators.0.alt * r.0, generators.1.alt * r.1); + #[allow(non_snake_case)] + let mut R = original_R; + + for i in ((actual + 1) .. (actual + POSSIBLE_VALUES + 1)).map(|i| i % POSSIBLE_VALUES) { + let e = Self::nonces(transcript.clone(), R); + e_0 = G0::Scalar::conditional_select(&e_0, &e.0, usize::ct_eq(&i, &1)); + + // Solve for the real index + if i == actual { + s[i] = (r.0 + (e.0 * blinding_key.0), r.1 + (e.1 * blinding_key.1)); + debug_assert_eq!(Self::R(generators, s[i], ring[actual], e), original_R); + break; + // Generate a decoy response + } else { + s[i] = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng)); + } + + R = Self::R(generators, s[i], ring[i], e); + } + + pow_2.0 = pow_2.0.double(); + pow_2.1 = pow_2.1.double(); + if POSSIBLE_VALUES == 4 { + pow_2.0 = pow_2.0.double(); + pow_2.1 = pow_2.1.double(); + } + + Bits { commitments, e_0, s } + } + + pub fn verify( + &self, + transcript: &mut T, + generators: (Generators, Generators), + i: usize, + pow_2: &mut (G0, G1) + ) -> Result<(), DLEqError> { + debug_assert!((POSSIBLE_VALUES == 2) || (POSSIBLE_VALUES == 4)); + + Self::transcript(transcript, i, self.commitments); + + let ring = Self::ring(*pow_2, self.commitments); + let e_0 = (self.e_0, scalar_convert(self.e_0).ok_or(DLEqError::InvalidChallenge)?); + let mut e = None; + for i in (1 .. (POSSIBLE_VALUES + 1)).map(|i| i % POSSIBLE_VALUES) { + e = Some( + Self::R_nonces(transcript.clone(), generators, self.s[i], ring[i], e.unwrap_or(e_0)) + ); + } + + // Will panic if the above loop is never run somehow + // If e wasn't an Option, and instead initially set to e_0, it'd always pass + if e_0 != e.unwrap() { + return Err(DLEqError::InvalidProof); + } + + pow_2.0 = pow_2.0.double(); + pow_2.1 = pow_2.1.double(); + if POSSIBLE_VALUES == 4 { + pow_2.0 = pow_2.0.double(); + pow_2.1 = pow_2.1.double(); + } + + Ok(()) + } + #[cfg(feature = "serialize")] pub fn serialize(&self, w: &mut W) -> std::io::Result<()> { w.write_all(self.commitments.0.to_bytes().as_ref())?; w.write_all(self.commitments.1.to_bytes().as_ref())?; - w.write_all(self.e.to_repr().as_ref())?; - for i in 0 .. 2 { + w.write_all(self.e_0.to_repr().as_ref())?; + for i in 0 .. POSSIBLE_VALUES { w.write_all(self.s[i].0.to_repr().as_ref())?; w.write_all(self.s[i].1.to_repr().as_ref())?; } @@ -59,17 +203,14 @@ impl Bit { } #[cfg(feature = "serialize")] - pub fn deserialize(r: &mut R) -> std::io::Result> { - Ok( - Bit { - commitments: (read_point(r)?, read_point(r)?), - e: read_scalar(r)?, - s: [ - (read_scalar(r)?, read_scalar(r)?), - (read_scalar(r)?, read_scalar(r)?) - ] - } - ) + pub fn deserialize(r: &mut R) -> std::io::Result> { + let commitments = (read_point(r)?, read_point(r)?); + let e_0 = read_scalar(r)?; + let mut s = [(G0::Scalar::zero(), G1::Scalar::zero()); POSSIBLE_VALUES]; + for i in 0 .. POSSIBLE_VALUES { + s[i] = (read_scalar(r)?, read_scalar(r)?); + } + Ok(Bits { commitments, e_0, s }) } } @@ -89,7 +230,8 @@ pub enum DLEqError { // anyone who wants it #[derive(Clone, PartialEq, Eq, Debug)] pub struct DLEqProof { - bits: Vec>, + bits: Vec>, + remainder: Option>, poks: (SchnorrPoK, SchnorrPoK) } @@ -121,43 +263,17 @@ impl DLEqProof blinding_key } - #[allow(non_snake_case)] - fn nonces(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) { - transcript.append_message(b"nonce_0", nonces.0.to_bytes().as_ref()); - transcript.append_message(b"nonce_1", nonces.1.to_bytes().as_ref()); - mutual_scalar_from_bytes(transcript.challenge(b"challenge").as_ref()) - } - - #[allow(non_snake_case)] - fn R_nonces( - transcript: T, - generators: (Generators, Generators), - s: (G0::Scalar, G1::Scalar), - A: (G0, G1), - e: (G0::Scalar, G1::Scalar) - ) -> (G0::Scalar, G1::Scalar) { - Self::nonces( - transcript, - (((generators.0.alt * s.0) - (A.0 * e.0)), ((generators.1.alt * s.1) - (A.1 * e.1))) - ) - } - fn reconstruct_keys(&self) -> (G0, G1) { + let remainder = self.remainder + .as_ref() + .map(|bit| bit.commitments) + .unwrap_or((G0::identity(), G1::identity())); ( - self.bits.iter().map(|bit| bit.commitments.0).sum(), - self.bits.iter().map(|bit| bit.commitments.1).sum() + self.bits.iter().map(|bit| bit.commitments.0).sum::() + remainder.0, + self.bits.iter().map(|bit| bit.commitments.1).sum::() + remainder.1 ) } - fn transcript_bit(transcript: &mut T, i: usize, commitments: (G0, G1)) { - if i == 0 { - transcript.domain_separate(b"cross_group_dleq"); - } - transcript.append_message(b"bit", &u16::try_from(i).unwrap().to_le_bytes()); - transcript.append_message(b"commitment_0", commitments.0.to_bytes().as_ref()); - transcript.append_message(b"commitment_1", commitments.1.to_bytes().as_ref()); - } - fn prove_internal( rng: &mut R, transcript: &mut T, @@ -176,16 +292,7 @@ impl DLEqProof ); let mut blinding_key_total = (G0::Scalar::zero(), G1::Scalar::zero()); - let mut pow_2 = (generators.0.primary, generators.1.primary); - - let raw_bits = f.0.to_le_bits(); - let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap(); - let mut bits = Vec::with_capacity(capacity); - for (i, bit) in raw_bits.iter().enumerate() { - let bit = *bit as u8; - debug_assert_eq!(bit | 1, 1); - - let last = i == (capacity - 1); + let mut blinding_key = |rng: &mut R, last| { let blinding_key = ( Self::blinding_key(&mut *rng, &mut blinding_key_total.0, last), Self::blinding_key(&mut *rng, &mut blinding_key_total.1, last) @@ -194,45 +301,54 @@ impl DLEqProof debug_assert_eq!(blinding_key_total.0, G0::Scalar::zero()); debug_assert_eq!(blinding_key_total.1, G1::Scalar::zero()); } + blinding_key + }; - let mut commitments = ( - (generators.0.alt * blinding_key.0), - (generators.1.alt * blinding_key.1) - ); - commitments.0 += pow_2.0 * G0::Scalar::from(bit.into()); - commitments.1 += pow_2.1 * G1::Scalar::from(bit.into()); - Self::transcript_bit(transcript, i, commitments); + let mut pow_2 = (generators.0.primary, generators.1.primary); - let nonces = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng)); - let e_0 = Self::nonces( - transcript.clone(), - ((generators.0.alt * nonces.0), (generators.1.alt * nonces.1)) - ); - let mut s_0 = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng)); - - let mut to_sign = commitments; - let bit = Choice::from(bit); - let inv_bit = (!bit).unwrap_u8(); - to_sign.0 -= pow_2.0 * G0::Scalar::from(inv_bit.into()); - to_sign.1 -= pow_2.1 * G1::Scalar::from(inv_bit.into()); - let e_1 = Self::R_nonces(transcript.clone(), generators, (s_0.0, s_0.1), to_sign, e_0); - let mut s_1 = (nonces.0 + (e_1.0 * blinding_key.0), nonces.1 + (e_1.1 * blinding_key.1)); - - let e = G0::Scalar::conditional_select(&e_1.0, &e_0.0, bit); - G0::Scalar::conditional_swap(&mut s_1.0, &mut s_0.0, bit); - G1::Scalar::conditional_swap(&mut s_1.1, &mut s_0.1, bit); - bits.push(Bit { commitments, e, s: [s_0, s_1] }); - - // Break in order to not generate commitments for unused bits - if last { + let raw_bits = f.0.to_le_bits(); + let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap(); + let mut bits = Vec::with_capacity(capacity); + let mut these_bits: u8 = 0; + for (i, bit) in raw_bits.iter().enumerate() { + if i > ((capacity / 2) * 2) { break; } - pow_2.0 = pow_2.0.double(); - pow_2.1 = pow_2.1.double(); + let bit = *bit as u8; + debug_assert_eq!(bit | 1, 1); + + if (i % 2) == 0 { + these_bits = bit; + continue; + } else { + these_bits += bit << 1; + } + + let last = i == (capacity - 1); + let blinding_key = blinding_key(&mut *rng, last); + bits.push( + Bits::prove(&mut *rng, transcript, generators, i / 2, &mut pow_2, these_bits, blinding_key) + ); } - let proof = DLEqProof { bits, poks }; + let mut remainder = None; + if (capacity % 2) == 1 { + let blinding_key = blinding_key(&mut *rng, true); + remainder = Some( + Bits::prove( + &mut *rng, + transcript, + generators, + capacity / 2, + &mut pow_2, + these_bits, + blinding_key + ) + ); + } + + let proof = DLEqProof { bits, remainder, poks }; debug_assert_eq!( proof.reconstruct_keys(), (generators.0.primary * f.0, generators.1.primary * f.1) @@ -280,7 +396,11 @@ impl DLEqProof generators: (Generators, Generators) ) -> Result<(G0, G1), DLEqError> { let capacity = G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY); - if self.bits.len() != capacity.try_into().unwrap() { + if (self.bits.len() != (capacity / 2).try_into().unwrap()) || ( + // This shouldn't be possible, as deserialize ensures this is present for fields with this + // characteristic, and proofs locally generated will have it. Regardless, best to ensure + self.remainder.is_none() && ((capacity % 2) == 1) + ) { return Err(DLEqError::InvalidProofLength); } @@ -294,31 +414,11 @@ impl DLEqProof } let mut pow_2 = (generators.0.primary, generators.1.primary); - for (i, bit) in self.bits.iter().enumerate() { - Self::transcript_bit(transcript, i, bit.commitments); - - let bit_e = (bit.e, scalar_convert(bit.e).ok_or(DLEqError::InvalidChallenge)?); - if bit_e != Self::R_nonces( - transcript.clone(), - generators, - bit.s[0], - ( - bit.commitments.0 - pow_2.0, - bit.commitments.1 - pow_2.1 - ), - Self::R_nonces( - transcript.clone(), - generators, - bit.s[1], - bit.commitments, - bit_e - ) - ) { - return Err(DLEqError::InvalidProof); - } - - pow_2.0 = pow_2.0.double(); - pow_2.1 = pow_2.1.double(); + for (i, bits) in self.bits.iter().enumerate() { + bits.verify(transcript, generators, i, &mut pow_2)?; + } + if let Some(bit) = &self.remainder { + bit.verify(transcript, generators, self.bits.len(), &mut pow_2)?; } Ok(keys) @@ -329,6 +429,9 @@ impl DLEqProof for bit in &self.bits { bit.serialize(w)?; } + if let Some(bit) = &self.remainder { + bit.serialize(w)?; + } self.poks.0.serialize(w)?; self.poks.1.serialize(w) } @@ -337,9 +440,19 @@ impl DLEqProof pub fn deserialize(r: &mut R) -> std::io::Result> { let capacity = G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY); let mut bits = Vec::with_capacity(capacity.try_into().unwrap()); - for _ in 0 .. capacity { - bits.push(Bit::deserialize(r)?); + for _ in 0 .. (capacity / 2) { + bits.push(Bits::deserialize(r)?); } - Ok(DLEqProof { bits, poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?) }) + let mut remainder = None; + if (capacity % 2) == 1 { + remainder = Some(Bits::deserialize(r)?); + } + Ok( + DLEqProof { + bits, + remainder, + poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?) + } + ) } } diff --git a/crypto/dleq/src/tests/cross_group/mod.rs b/crypto/dleq/src/tests/cross_group/mod.rs index 93ffeb0f..9f3a1916 100644 --- a/crypto/dleq/src/tests/cross_group/mod.rs +++ b/crypto/dleq/src/tests/cross_group/mod.rs @@ -14,7 +14,7 @@ use blake2::{Digest, Blake2b512}; use transcript::RecommendedTranscript; -use crate::{Generators, cross_group::DLEqProof}; +use crate::{Generators, cross_group::{DLEqProof, scalar::mutual_scalar_from_bytes}}; fn transcript() -> RecommendedTranscript { RecommendedTranscript::new(b"Cross-Group DLEq Proof Test") @@ -104,3 +104,35 @@ fn test_cross_group_dleq() { } } } + +#[test] +fn test_remainder() { + // Uses Secp256k1 for both to achieve an odd capacity of 255 + assert_eq!(Scalar::CAPACITY, 255); + let generators = (generators().0, generators().0); + let keys = mutual_scalar_from_bytes(&[0xFF; 32]); + assert_eq!(keys.0, keys.1); + + let (proof, res) = DLEqProof::prove_without_bias( + &mut OsRng, + &mut transcript(), + generators, + keys.0 + ).unwrap(); + assert_eq!(keys, res); + + let public_keys = proof.verify(&mut transcript(), generators).unwrap(); + assert_eq!(generators.0.primary * keys.0, public_keys.0); + assert_eq!(generators.1.primary * keys.1, public_keys.1); + + #[cfg(feature = "serialize")] + { + let mut buf = vec![]; + proof.serialize(&mut buf).unwrap(); + let deserialized = DLEqProof::::deserialize( + &mut std::io::Cursor::new(&buf) + ).unwrap(); + assert_eq!(proof, deserialized); + deserialized.verify(&mut transcript(), generators).unwrap(); + } +}