Unify the cross-group DLEq challenges

This does reduce the strength of the challenges to that of the weaker 
field, yet that doesn't have any impact on whether or not this is ZK due 
to the key being shared across fields.

Saves ~8kb.
This commit is contained in:
Luke Parker
2022-06-30 11:23:13 -04:00
parent 7890827a48
commit 4eafbe2a09
3 changed files with 16 additions and 18 deletions

View File

@@ -8,7 +8,7 @@ use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup};
use crate::{Generators, challenge}; use crate::{Generators, challenge};
pub mod scalar; pub mod scalar;
use scalar::scalar_normalize; use scalar::{scalar_normalize, scalar_convert};
pub(crate) mod schnorr; pub(crate) mod schnorr;
use schnorr::SchnorrPoK; use schnorr::SchnorrPoK;
@@ -32,7 +32,7 @@ pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct Bit<G0: PrimeGroup, G1: PrimeGroup> { pub struct Bit<G0: PrimeGroup, G1: PrimeGroup> {
commitments: (G0, G1), commitments: (G0, G1),
e: (G0::Scalar, G1::Scalar), e: G0::Scalar,
s: [(G0::Scalar, G1::Scalar); 2] s: [(G0::Scalar, G1::Scalar); 2]
} }
@@ -41,8 +41,7 @@ impl<G0: PrimeGroup, G1: PrimeGroup> Bit<G0, G1> {
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> { pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
w.write_all(self.commitments.0.to_bytes().as_ref())?; w.write_all(self.commitments.0.to_bytes().as_ref())?;
w.write_all(self.commitments.1.to_bytes().as_ref())?; w.write_all(self.commitments.1.to_bytes().as_ref())?;
w.write_all(self.e.0.to_repr().as_ref())?; w.write_all(self.e.to_repr().as_ref())?;
w.write_all(self.e.1.to_repr().as_ref())?;
for i in 0 .. 2 { for i in 0 .. 2 {
w.write_all(self.s[i].0.to_repr().as_ref())?; w.write_all(self.s[i].0.to_repr().as_ref())?;
w.write_all(self.s[i].1.to_repr().as_ref())?; w.write_all(self.s[i].1.to_repr().as_ref())?;
@@ -55,7 +54,7 @@ impl<G0: PrimeGroup, G1: PrimeGroup> Bit<G0, G1> {
Ok( Ok(
Bit { Bit {
commitments: (read_point(r)?, read_point(r)?), commitments: (read_point(r)?, read_point(r)?),
e: (read_scalar(r)?, read_scalar(r)?), e: read_scalar(r)?,
s: [ s: [
(read_scalar(r)?, read_scalar(r)?), (read_scalar(r)?, read_scalar(r)?),
(read_scalar(r)?, read_scalar(r)?) (read_scalar(r)?, read_scalar(r)?)
@@ -71,6 +70,8 @@ pub enum DLEqError {
InvalidProofOfKnowledge, InvalidProofOfKnowledge,
#[error("invalid proof length")] #[error("invalid proof length")]
InvalidProofLength, InvalidProofLength,
#[error("invalid challenge")]
InvalidChallenge,
#[error("invalid proof")] #[error("invalid proof")]
InvalidProof InvalidProof
} }
@@ -117,7 +118,7 @@ impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1>
fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) { fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) {
transcript.append_message(b"nonce_0", nonces.0.to_bytes().as_ref()); transcript.append_message(b"nonce_0", nonces.0.to_bytes().as_ref());
transcript.append_message(b"nonce_1", nonces.1.to_bytes().as_ref()); transcript.append_message(b"nonce_1", nonces.1.to_bytes().as_ref());
(challenge(&mut transcript, b"challenge_G"), challenge(&mut transcript, b"challenge_H")) scalar_normalize(challenge(&mut transcript))
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
@@ -134,7 +135,6 @@ impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1>
) )
} }
// TODO: Use multiexp here after https://github.com/serai-dex/serai/issues/17
fn reconstruct_key<G: PrimeGroup>( fn reconstruct_key<G: PrimeGroup>(
commitments: impl Iterator<Item = G> commitments: impl Iterator<Item = G>
) -> G where G::Scalar: PrimeFieldBits { ) -> G where G::Scalar: PrimeFieldBits {
@@ -240,9 +240,9 @@ impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1>
bits.push( bits.push(
if *bit { if *bit {
Bit { commitments, e: e_0, s: [s_1, s_0] } Bit { commitments, e: e_0.0, s: [s_1, s_0] }
} else { } else {
Bit { commitments, e: e_1, s: [s_0, s_1] } Bit { commitments, e: e_1.0, s: [s_0, s_1] }
} }
); );
@@ -282,7 +282,8 @@ impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1>
for (i, bit) in self.bits.iter().enumerate() { for (i, bit) in self.bits.iter().enumerate() {
Self::transcript_bit(transcript, i, bit.commitments); Self::transcript_bit(transcript, i, bit.commitments);
if bit.e != Self::R_nonces( let bit_e = (bit.e, scalar_convert(bit.e).ok_or(DLEqError::InvalidChallenge)?);
if bit_e != Self::R_nonces(
transcript.clone(), transcript.clone(),
generators, generators,
bit.s[0], bit.s[0],
@@ -295,7 +296,7 @@ impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1>
generators, generators,
bit.s[1], bit.s[1],
bit.commitments, bit.commitments,
bit.e bit_e
) )
) { ) {
return Err(DLEqError::InvalidProof); return Err(DLEqError::InvalidProof);

View File

@@ -28,7 +28,7 @@ impl<G: PrimeGroup> SchnorrPoK<G> {
transcript.append_message(b"generator", generator.to_bytes().as_ref()); transcript.append_message(b"generator", generator.to_bytes().as_ref());
transcript.append_message(b"nonce", R.to_bytes().as_ref()); transcript.append_message(b"nonce", R.to_bytes().as_ref());
transcript.append_message(b"public_key", A.to_bytes().as_ref()); transcript.append_message(b"public_key", A.to_bytes().as_ref());
challenge(transcript, b"challenge") challenge(transcript)
} }
pub(crate) fn prove<R: RngCore + CryptoRng, T: Transcript>( pub(crate) fn prove<R: RngCore + CryptoRng, T: Transcript>(

View File

@@ -33,10 +33,7 @@ impl<G: PrimeGroup> Generators<G> {
} }
} }
pub(crate) fn challenge<T: Transcript, F: PrimeField>( pub(crate) fn challenge<T: Transcript, F: PrimeField>(transcript: &mut T) -> F {
transcript: &mut T,
label: &'static [u8]
) -> F {
assert!(F::NUM_BITS <= 384); assert!(F::NUM_BITS <= 384);
// From here, there are three ways to get a scalar under the ff/group API // From here, there are three ways to get a scalar under the ff/group API
@@ -44,7 +41,7 @@ pub(crate) fn challenge<T: Transcript, F: PrimeField>(
// 2: Grabbing a UInt library to perform reduction by the modulus, then determining endianess // 2: Grabbing a UInt library to perform reduction by the modulus, then determining endianess
// and loading it in // and loading it in
// 3: Iterating over each byte and manually doubling/adding. This is simplest // 3: Iterating over each byte and manually doubling/adding. This is simplest
let challenge_bytes = transcript.challenge(label); let challenge_bytes = transcript.challenge(b"challenge");
assert!(challenge_bytes.as_ref().len() == 64); assert!(challenge_bytes.as_ref().len() == 64);
let mut challenge = F::zero(); let mut challenge = F::zero();
@@ -94,7 +91,7 @@ impl<G: PrimeGroup> DLEqProof<G> {
transcript.append_message(b"nonce_alternate", nonces.1.to_bytes().as_ref()); transcript.append_message(b"nonce_alternate", nonces.1.to_bytes().as_ref());
transcript.append_message(b"point_primary", points.0.to_bytes().as_ref()); transcript.append_message(b"point_primary", points.0.to_bytes().as_ref());
transcript.append_message(b"point_alternate", points.1.to_bytes().as_ref()); transcript.append_message(b"point_alternate", points.1.to_bytes().as_ref());
challenge(transcript, b"challenge") challenge(transcript)
} }
pub fn prove<R: RngCore + CryptoRng, T: Transcript>( pub fn prove<R: RngCore + CryptoRng, T: Transcript>(