mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-09 04:39:24 +00:00
Implement a DLEq library
While Serai only needs the simple DLEq which was already present under monero, this migrates the implementation of the cross-group DLEq I maintain into Serai. This was to have full access to the ecosystem of libraries built under Serai while also ensuring support for it. The cross_group curve, which is extremely experimental, is feature flagged off. So is the built in serialization functionality, as this should be possible to make nostd once const generics are full featured, yet the implemented serialization adds the additional barrier of std::io.
This commit is contained in:
324
crypto/dleq/src/cross_group/mod.rs
Normal file
324
crypto/dleq/src/cross_group/mod.rs
Normal file
@@ -0,0 +1,324 @@
|
||||
use thiserror::Error;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup};
|
||||
|
||||
use crate::{Generators, challenge};
|
||||
|
||||
pub mod scalar;
|
||||
use scalar::scalar_normalize;
|
||||
|
||||
pub(crate) mod schnorr;
|
||||
use schnorr::SchnorrPoK;
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
use std::io::{Read, Write};
|
||||
#[cfg(feature = "serialize")]
|
||||
use crate::read_scalar;
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G> {
|
||||
let mut repr = G::Repr::default();
|
||||
r.read_exact(repr.as_mut())?;
|
||||
let point = G::from_bytes(&repr);
|
||||
if point.is_none().into() {
|
||||
Err(std::io::Error::new(std::io::ErrorKind::Other, "invalid point"))?;
|
||||
}
|
||||
Ok(point.unwrap())
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Bit<G0: PrimeGroup, G1: PrimeGroup> {
|
||||
commitments: (G0, G1),
|
||||
e: (G0::Scalar, G1::Scalar),
|
||||
s: [(G0::Scalar, G1::Scalar); 2]
|
||||
}
|
||||
|
||||
impl<G0: PrimeGroup, G1: PrimeGroup> Bit<G0, G1> {
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
w.write_all(self.commitments.0.to_bytes().as_ref())?;
|
||||
w.write_all(self.commitments.1.to_bytes().as_ref())?;
|
||||
w.write_all(self.e.0.to_repr().as_ref())?;
|
||||
w.write_all(self.e.1.to_repr().as_ref())?;
|
||||
for i in 0 .. 2 {
|
||||
w.write_all(self.s[i].0.to_repr().as_ref())?;
|
||||
w.write_all(self.s[i].1.to_repr().as_ref())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Bit<G0, G1>> {
|
||||
Ok(
|
||||
Bit {
|
||||
commitments: (read_point(r)?, read_point(r)?),
|
||||
e: (read_scalar(r)?, read_scalar(r)?),
|
||||
s: [
|
||||
(read_scalar(r)?, read_scalar(r)?),
|
||||
(read_scalar(r)?, read_scalar(r)?)
|
||||
]
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, PartialEq, Eq, Debug)]
|
||||
pub enum DLEqError {
|
||||
#[error("invalid proof of knowledge")]
|
||||
InvalidProofOfKnowledge,
|
||||
#[error("invalid proof length")]
|
||||
InvalidProofLength,
|
||||
#[error("invalid proof")]
|
||||
InvalidProof
|
||||
}
|
||||
|
||||
// Debug would be such a dump of data this likely isn't helpful, but at least it's available to
|
||||
// anyone who wants it
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct DLEqProof<G0: PrimeGroup, G1: PrimeGroup> {
|
||||
bits: Vec<Bit<G0, G1>>,
|
||||
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>)
|
||||
}
|
||||
|
||||
impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1> {
|
||||
fn initialize_transcript<T: Transcript>(
|
||||
transcript: &mut T,
|
||||
generators: (Generators<G0>, Generators<G1>),
|
||||
keys: (G0, G1)
|
||||
) {
|
||||
generators.0.transcript(transcript);
|
||||
generators.1.transcript(transcript);
|
||||
transcript.domain_separate(b"points");
|
||||
transcript.append_message(b"point_0", keys.0.to_bytes().as_ref());
|
||||
transcript.append_message(b"point_1", keys.1.to_bytes().as_ref());
|
||||
}
|
||||
|
||||
fn blinding_key<R: RngCore + CryptoRng, F: PrimeField>(
|
||||
rng: &mut R,
|
||||
total: &mut F,
|
||||
pow_2: &mut F,
|
||||
last: bool
|
||||
) -> F {
|
||||
let blinding_key = if last {
|
||||
-*total * pow_2.invert().unwrap()
|
||||
} else {
|
||||
F::random(&mut *rng)
|
||||
};
|
||||
*total += blinding_key * *pow_2;
|
||||
*pow_2 = pow_2.double();
|
||||
blinding_key
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) {
|
||||
transcript.append_message(b"nonce_0", nonces.0.to_bytes().as_ref());
|
||||
transcript.append_message(b"nonce_1", nonces.1.to_bytes().as_ref());
|
||||
(challenge(&mut transcript, b"challenge_G"), challenge(&mut transcript, b"challenge_H"))
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn R_nonces<T: Transcript>(
|
||||
transcript: T,
|
||||
generators: (Generators<G0>, Generators<G1>),
|
||||
s: (G0::Scalar, G1::Scalar),
|
||||
A: (G0, G1),
|
||||
e: (G0::Scalar, G1::Scalar)
|
||||
) -> (G0::Scalar, G1::Scalar) {
|
||||
Self::nonces(
|
||||
transcript,
|
||||
(((generators.0.alt * s.0) - (A.0 * e.0)), ((generators.1.alt * s.1) - (A.1 * e.1)))
|
||||
)
|
||||
}
|
||||
|
||||
// TODO: Use multiexp here after https://github.com/serai-dex/serai/issues/17
|
||||
fn reconstruct_key<G: PrimeGroup>(commitments: impl Iterator<Item = G>) -> G {
|
||||
let mut pow_2 = G::Scalar::one();
|
||||
commitments.fold(G::identity(), |key, commitment| {
|
||||
let res = key + (commitment * pow_2);
|
||||
pow_2 = pow_2.double();
|
||||
res
|
||||
})
|
||||
}
|
||||
|
||||
fn reconstruct_keys(&self) -> (G0, G1) {
|
||||
(
|
||||
Self::reconstruct_key(self.bits.iter().map(|bit| bit.commitments.0)),
|
||||
Self::reconstruct_key(self.bits.iter().map(|bit| bit.commitments.1))
|
||||
)
|
||||
}
|
||||
|
||||
fn transcript_bit<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) {
|
||||
if i == 0 {
|
||||
transcript.domain_separate(b"cross_group_dleq");
|
||||
}
|
||||
transcript.append_message(b"bit", &u16::try_from(i).unwrap().to_le_bytes());
|
||||
transcript.append_message(b"commitment_0", commitments.0.to_bytes().as_ref());
|
||||
transcript.append_message(b"commitment_1", commitments.1.to_bytes().as_ref());
|
||||
}
|
||||
|
||||
/// Prove the cross-Group Discrete Log Equality for the points derived from the provided Scalar.
|
||||
/// Since DLEq is proven for the same Scalar in both fields, and the provided Scalar may not be
|
||||
/// valid in the other Scalar field, the Scalar is normalized as needed and the normalized forms
|
||||
/// are returned. These are the actually equal discrete logarithms. The passed in Scalar is
|
||||
/// solely to enable various forms of Scalar generation, such as deterministic schemes
|
||||
pub fn prove<R: RngCore + CryptoRng, T: Clone + Transcript>(
|
||||
rng: &mut R,
|
||||
transcript: &mut T,
|
||||
generators: (Generators<G0>, Generators<G1>),
|
||||
f: G0::Scalar
|
||||
) -> (
|
||||
Self,
|
||||
(G0::Scalar, G1::Scalar)
|
||||
) where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
|
||||
// At least one bit will be dropped from either field element, making it irrelevant which one
|
||||
// we get a random element in
|
||||
let f = scalar_normalize::<_, G1::Scalar>(f);
|
||||
|
||||
Self::initialize_transcript(
|
||||
transcript,
|
||||
generators,
|
||||
((generators.0.primary * f.0), (generators.1.primary * f.1))
|
||||
);
|
||||
|
||||
let poks = (
|
||||
SchnorrPoK::<G0>::prove(rng, transcript, generators.0.primary, f.0),
|
||||
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, f.1)
|
||||
);
|
||||
|
||||
let mut blinding_key_total = (G0::Scalar::zero(), G1::Scalar::zero());
|
||||
let mut pow_2 = (G0::Scalar::one(), G1::Scalar::one());
|
||||
|
||||
let raw_bits = f.0.to_le_bits();
|
||||
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
|
||||
let mut bits = Vec::with_capacity(capacity);
|
||||
for (i, bit) in raw_bits.iter().enumerate() {
|
||||
let last = i == (capacity - 1);
|
||||
let blinding_key = (
|
||||
Self::blinding_key(&mut *rng, &mut blinding_key_total.0, &mut pow_2.0, last),
|
||||
Self::blinding_key(&mut *rng, &mut blinding_key_total.1, &mut pow_2.1, last)
|
||||
);
|
||||
if last {
|
||||
debug_assert_eq!(blinding_key_total.0, G0::Scalar::zero());
|
||||
debug_assert_eq!(blinding_key_total.1, G1::Scalar::zero());
|
||||
}
|
||||
|
||||
let mut commitments = (
|
||||
(generators.0.alt * blinding_key.0),
|
||||
(generators.1.alt * blinding_key.1)
|
||||
);
|
||||
// TODO: Not constant time
|
||||
if *bit {
|
||||
commitments.0 += generators.0.primary;
|
||||
commitments.1 += generators.1.primary;
|
||||
}
|
||||
Self::transcript_bit(transcript, i, commitments);
|
||||
|
||||
let nonces = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng));
|
||||
let e_0 = Self::nonces(
|
||||
transcript.clone(),
|
||||
((generators.0.alt * nonces.0), (generators.1.alt * nonces.1))
|
||||
);
|
||||
let s_0 = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng));
|
||||
|
||||
let e_1 = Self::R_nonces(
|
||||
transcript.clone(),
|
||||
generators,
|
||||
(s_0.0, s_0.1),
|
||||
if *bit {
|
||||
commitments
|
||||
} else {
|
||||
((commitments.0 - generators.0.primary), (commitments.1 - generators.1.primary))
|
||||
},
|
||||
e_0
|
||||
);
|
||||
let s_1 = (nonces.0 + (e_1.0 * blinding_key.0), nonces.1 + (e_1.1 * blinding_key.1));
|
||||
|
||||
bits.push(
|
||||
if *bit {
|
||||
Bit { commitments, e: e_0, s: [s_1, s_0] }
|
||||
} else {
|
||||
Bit { commitments, e: e_1, s: [s_0, s_1] }
|
||||
}
|
||||
);
|
||||
|
||||
if last {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let proof = DLEqProof { bits, poks };
|
||||
debug_assert_eq!(
|
||||
proof.reconstruct_keys(),
|
||||
(generators.0.primary * f.0, generators.1.primary * f.1)
|
||||
);
|
||||
(proof, f)
|
||||
}
|
||||
|
||||
/// Verify a cross-Group Discrete Log Equality statement, returning the points proven for
|
||||
pub fn verify<T: Clone + Transcript>(
|
||||
&self,
|
||||
transcript: &mut T,
|
||||
generators: (Generators<G0>, Generators<G1>)
|
||||
) -> Result<(G0, G1), DLEqError> where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
|
||||
let capacity = G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY);
|
||||
if self.bits.len() != capacity.try_into().unwrap() {
|
||||
return Err(DLEqError::InvalidProofLength);
|
||||
}
|
||||
|
||||
let keys = self.reconstruct_keys();
|
||||
Self::initialize_transcript(transcript, generators, keys);
|
||||
if !(
|
||||
self.poks.0.verify(transcript, generators.0.primary, keys.0) &&
|
||||
self.poks.1.verify(transcript, generators.1.primary, keys.1)
|
||||
) {
|
||||
Err(DLEqError::InvalidProofOfKnowledge)?;
|
||||
}
|
||||
|
||||
for (i, bit) in self.bits.iter().enumerate() {
|
||||
Self::transcript_bit(transcript, i, bit.commitments);
|
||||
|
||||
if bit.e != Self::R_nonces(
|
||||
transcript.clone(),
|
||||
generators,
|
||||
bit.s[0],
|
||||
(
|
||||
bit.commitments.0 - generators.0.primary,
|
||||
bit.commitments.1 - generators.1.primary
|
||||
),
|
||||
Self::R_nonces(
|
||||
transcript.clone(),
|
||||
generators,
|
||||
bit.s[1],
|
||||
bit.commitments,
|
||||
bit.e
|
||||
)
|
||||
) {
|
||||
return Err(DLEqError::InvalidProof);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
for bit in &self.bits {
|
||||
bit.serialize(w)?;
|
||||
}
|
||||
self.poks.0.serialize(w)?;
|
||||
self.poks.1.serialize(w)
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<DLEqProof<G0, G1>> {
|
||||
let capacity = G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY);
|
||||
let mut bits = Vec::with_capacity(capacity.try_into().unwrap());
|
||||
for _ in 0 .. capacity {
|
||||
bits.push(Bit::deserialize(r)?);
|
||||
}
|
||||
Ok(DLEqProof { bits, poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?) })
|
||||
}
|
||||
}
|
||||
34
crypto/dleq/src/cross_group/scalar.rs
Normal file
34
crypto/dleq/src/cross_group/scalar.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use ff::PrimeFieldBits;
|
||||
|
||||
/// Convert a uniform scalar into one usable on both fields, clearing the top bits as needed
|
||||
pub fn scalar_normalize<F0: PrimeFieldBits, F1: PrimeFieldBits>(scalar: F0) -> (F0, F1) {
|
||||
let mutual_capacity = F0::CAPACITY.min(F1::CAPACITY);
|
||||
|
||||
// The security of a mutual key is the security of the lower field. Accordingly, this bans a
|
||||
// difference of more than 4 bits
|
||||
#[cfg(feature = "secure_capacity_difference")]
|
||||
assert!((F0::CAPACITY.max(F1::CAPACITY) - mutual_capacity) < 4);
|
||||
|
||||
let mut res1 = F0::zero();
|
||||
let mut res2 = F1::zero();
|
||||
// Uses the bit view API to ensure a consistent endianess
|
||||
let mut bits = scalar.to_le_bits();
|
||||
// Convert it to big endian
|
||||
bits.reverse();
|
||||
for bit in bits.iter().skip(bits.len() - usize::try_from(mutual_capacity).unwrap()) {
|
||||
res1 = res1.double();
|
||||
res2 = res2.double();
|
||||
if *bit {
|
||||
res1 += F0::one();
|
||||
res2 += F1::one();
|
||||
}
|
||||
}
|
||||
|
||||
(res1, res2)
|
||||
}
|
||||
|
||||
/// Helper to convert a scalar between fields. Returns None if the scalar isn't mutually valid
|
||||
pub fn scalar_convert<F0: PrimeFieldBits, F1: PrimeFieldBits>(scalar: F0) -> Option<F1> {
|
||||
let (valid, converted) = scalar_normalize(scalar);
|
||||
Some(converted).filter(|_| scalar == valid)
|
||||
}
|
||||
71
crypto/dleq/src/cross_group/schnorr.rs
Normal file
71
crypto/dleq/src/cross_group/schnorr.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use group::{ff::Field, prime::PrimeGroup};
|
||||
|
||||
use crate::challenge;
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
use std::io::{Read, Write};
|
||||
#[cfg(feature = "serialize")]
|
||||
use ff::PrimeField;
|
||||
#[cfg(feature = "serialize")]
|
||||
use crate::{read_scalar, cross_group::read_point};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub(crate) struct SchnorrPoK<G: PrimeGroup> {
|
||||
R: G,
|
||||
s: G::Scalar
|
||||
}
|
||||
|
||||
impl<G: PrimeGroup> SchnorrPoK<G> {
|
||||
// Not hram due to the lack of m
|
||||
#[allow(non_snake_case)]
|
||||
fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar {
|
||||
transcript.domain_separate(b"schnorr_proof_of_knowledge");
|
||||
transcript.append_message(b"generator", generator.to_bytes().as_ref());
|
||||
transcript.append_message(b"nonce", R.to_bytes().as_ref());
|
||||
transcript.append_message(b"public_key", A.to_bytes().as_ref());
|
||||
challenge(transcript, b"challenge")
|
||||
}
|
||||
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng, T: Transcript>(
|
||||
rng: &mut R,
|
||||
transcript: &mut T,
|
||||
generator: G,
|
||||
private_key: G::Scalar
|
||||
) -> SchnorrPoK<G> {
|
||||
let nonce = G::Scalar::random(rng);
|
||||
#[allow(non_snake_case)]
|
||||
let R = generator * nonce;
|
||||
SchnorrPoK {
|
||||
R,
|
||||
s: nonce + (private_key * SchnorrPoK::hra(transcript, generator, R, generator * private_key))
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn verify<T: Transcript>(
|
||||
&self,
|
||||
transcript: &mut T,
|
||||
generator: G,
|
||||
public_key: G
|
||||
) -> bool {
|
||||
(generator * self.s) == (
|
||||
self.R + (public_key * Self::hra(transcript, generator, self.R, public_key))
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
w.write_all(self.R.to_bytes().as_ref())?;
|
||||
w.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<SchnorrPoK<G>> {
|
||||
Ok(SchnorrPoK { R: read_point(r)?, s: read_scalar(r)? })
|
||||
}
|
||||
}
|
||||
149
crypto/dleq/src/lib.rs
Normal file
149
crypto/dleq/src/lib.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
use thiserror::Error;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use ff::{Field, PrimeField};
|
||||
use group::prime::PrimeGroup;
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
use std::io::{self, ErrorKind, Error, Read, Write};
|
||||
|
||||
#[cfg(feature = "cross_group")]
|
||||
pub mod cross_group;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Generators<G: PrimeGroup> {
|
||||
primary: G,
|
||||
alt: G
|
||||
}
|
||||
|
||||
impl<G: PrimeGroup> Generators<G> {
|
||||
pub fn new(primary: G, alt: G) -> Generators<G> {
|
||||
Generators { primary, alt }
|
||||
}
|
||||
|
||||
fn transcript<T: Transcript>(&self, transcript: &mut T) {
|
||||
transcript.domain_separate(b"generators");
|
||||
transcript.append_message(b"primary", self.primary.to_bytes().as_ref());
|
||||
transcript.append_message(b"alternate", self.alt.to_bytes().as_ref());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn challenge<T: Transcript, F: PrimeField>(
|
||||
transcript: &mut T,
|
||||
label: &'static [u8]
|
||||
) -> F {
|
||||
assert!(F::NUM_BITS <= 384);
|
||||
|
||||
// From here, there are three ways to get a scalar under the ff/group API
|
||||
// 1: Scalar::random(ChaCha12Rng::from_seed(self.transcript.rng_seed(b"challenge")))
|
||||
// 2: Grabbing a UInt library to perform reduction by the modulus, then determining endianess
|
||||
// and loading it in
|
||||
// 3: Iterating over each byte and manually doubling/adding. This is simplest
|
||||
let challenge_bytes = transcript.challenge(label);
|
||||
assert!(challenge_bytes.as_ref().len() == 64);
|
||||
|
||||
let mut challenge = F::zero();
|
||||
for b in challenge_bytes.as_ref() {
|
||||
for _ in 0 .. 8 {
|
||||
challenge = challenge.double();
|
||||
}
|
||||
challenge += F::from(u64::from(*b));
|
||||
}
|
||||
challenge
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
fn read_scalar<R: Read, F: PrimeField>(r: &mut R) -> io::Result<F> {
|
||||
let mut repr = F::Repr::default();
|
||||
r.read_exact(repr.as_mut())?;
|
||||
let scalar = F::from_repr(repr);
|
||||
if scalar.is_none().into() {
|
||||
Err(Error::new(ErrorKind::Other, "invalid scalar"))?;
|
||||
}
|
||||
Ok(scalar.unwrap())
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DLEqError {
|
||||
#[error("invalid proof")]
|
||||
InvalidProof
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct DLEqProof<G: PrimeGroup> {
|
||||
c: G::Scalar,
|
||||
s: G::Scalar
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
impl<G: PrimeGroup> DLEqProof<G> {
|
||||
fn challenge<T: Transcript>(
|
||||
transcript: &mut T,
|
||||
generators: Generators<G>,
|
||||
nonces: (G, G),
|
||||
points: (G, G)
|
||||
) -> G::Scalar {
|
||||
generators.transcript(transcript);
|
||||
transcript.domain_separate(b"dleq");
|
||||
transcript.append_message(b"nonce_primary", nonces.0.to_bytes().as_ref());
|
||||
transcript.append_message(b"nonce_alternate", nonces.1.to_bytes().as_ref());
|
||||
transcript.append_message(b"point_primary", points.0.to_bytes().as_ref());
|
||||
transcript.append_message(b"point_alternate", points.1.to_bytes().as_ref());
|
||||
challenge(transcript, b"challenge")
|
||||
}
|
||||
|
||||
pub fn prove<R: RngCore + CryptoRng, T: Transcript>(
|
||||
rng: &mut R,
|
||||
transcript: &mut T,
|
||||
generators: Generators<G>,
|
||||
scalar: G::Scalar
|
||||
) -> DLEqProof<G> {
|
||||
let r = G::Scalar::random(rng);
|
||||
let c = Self::challenge(
|
||||
transcript,
|
||||
generators,
|
||||
(generators.primary * r, generators.alt * r),
|
||||
(generators.primary * scalar, generators.alt * scalar)
|
||||
);
|
||||
let s = r + (c * scalar);
|
||||
|
||||
DLEqProof { c, s }
|
||||
}
|
||||
|
||||
pub fn verify<T: Transcript>(
|
||||
&self,
|
||||
transcript: &mut T,
|
||||
generators: Generators<G>,
|
||||
points: (G, G)
|
||||
) -> Result<(), DLEqError> {
|
||||
if self.c != Self::challenge(
|
||||
transcript,
|
||||
generators,
|
||||
(
|
||||
(generators.primary * self.s) - (points.0 * self.c),
|
||||
(generators.alt * self.s) - (points.1 * self.c)
|
||||
),
|
||||
points
|
||||
) {
|
||||
Err(DLEqError::InvalidProof)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(self.c.to_repr().as_ref())?;
|
||||
w.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> io::Result<DLEqProof<G>> {
|
||||
Ok(DLEqProof { c: read_scalar(r)?, s: read_scalar(r)? })
|
||||
}
|
||||
}
|
||||
54
crypto/dleq/src/tests/cross_group/mod.rs
Normal file
54
crypto/dleq/src/tests/cross_group/mod.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
mod scalar;
|
||||
mod schnorr;
|
||||
|
||||
use hex_literal::hex;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use ff::Field;
|
||||
use group::{Group, GroupEncoding};
|
||||
|
||||
use k256::{Scalar, ProjectivePoint};
|
||||
use dalek_ff_group::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use transcript::RecommendedTranscript;
|
||||
|
||||
use crate::{Generators, cross_group::DLEqProof};
|
||||
|
||||
#[test]
|
||||
fn test_dleq() {
|
||||
let transcript = || RecommendedTranscript::new(b"Cross-Group DLEq Proof Test");
|
||||
|
||||
let generators = (
|
||||
Generators::new(
|
||||
ProjectivePoint::GENERATOR,
|
||||
ProjectivePoint::from_bytes(
|
||||
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
|
||||
).unwrap()
|
||||
),
|
||||
|
||||
Generators::new(
|
||||
EdwardsPoint::generator(),
|
||||
CompressedEdwardsY::new(
|
||||
hex!("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94")
|
||||
).decompress().unwrap()
|
||||
)
|
||||
);
|
||||
|
||||
let key = Scalar::random(&mut OsRng);
|
||||
let (proof, keys) = DLEqProof::prove(&mut OsRng, &mut transcript(), generators, key);
|
||||
|
||||
let public_keys = proof.verify(&mut transcript(), generators).unwrap();
|
||||
assert_eq!(generators.0.primary * keys.0, public_keys.0);
|
||||
assert_eq!(generators.1.primary * keys.1, public_keys.1);
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
{
|
||||
let mut buf = vec![];
|
||||
proof.serialize(&mut buf).unwrap();
|
||||
let deserialized = DLEqProof::<ProjectivePoint, EdwardsPoint>::deserialize(
|
||||
&mut std::io::Cursor::new(&buf)
|
||||
).unwrap();
|
||||
assert_eq!(proof, deserialized);
|
||||
deserialized.verify(&mut transcript(), generators).unwrap();
|
||||
}
|
||||
}
|
||||
47
crypto/dleq/src/tests/cross_group/scalar.rs
Normal file
47
crypto/dleq/src/tests/cross_group/scalar.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use rand_core::OsRng;
|
||||
|
||||
use ff::{Field, PrimeField};
|
||||
|
||||
use k256::Scalar as K256Scalar;
|
||||
use dalek_ff_group::Scalar as DalekScalar;
|
||||
|
||||
use crate::cross_group::scalar::{scalar_normalize, scalar_convert};
|
||||
|
||||
#[test]
|
||||
fn test_scalar() {
|
||||
assert_eq!(
|
||||
scalar_normalize::<_, DalekScalar>(K256Scalar::zero()),
|
||||
(K256Scalar::zero(), DalekScalar::zero())
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
scalar_normalize::<_, DalekScalar>(K256Scalar::one()),
|
||||
(K256Scalar::one(), DalekScalar::one())
|
||||
);
|
||||
|
||||
let mut initial;
|
||||
while {
|
||||
initial = K256Scalar::random(&mut OsRng);
|
||||
let (k, ed) = scalar_normalize::<_, DalekScalar>(initial);
|
||||
|
||||
// The initial scalar should equal the new scalar with Ed25519's capacity
|
||||
let mut initial_bytes = (&initial.to_repr()).to_vec();
|
||||
// Drop the first 4 bits to hit 252
|
||||
initial_bytes[0] = initial_bytes[0] & 0b00001111;
|
||||
let k_bytes = (&k.to_repr()).to_vec();
|
||||
assert_eq!(initial_bytes, k_bytes);
|
||||
|
||||
let mut ed_bytes = ed.to_repr().as_ref().to_vec();
|
||||
// Reverse to big endian
|
||||
ed_bytes.reverse();
|
||||
assert_eq!(k_bytes, ed_bytes);
|
||||
|
||||
// Verify conversion works as expected
|
||||
assert_eq!(scalar_convert::<_, DalekScalar>(k), Some(ed));
|
||||
|
||||
// Run this test again if this secp256k1 scalar didn't have any bits cleared
|
||||
initial == k
|
||||
} {}
|
||||
// Verify conversion returns None when the scalar isn't mutually valid
|
||||
assert!(scalar_convert::<_, DalekScalar>(initial).is_none());
|
||||
}
|
||||
31
crypto/dleq/src/tests/cross_group/schnorr.rs
Normal file
31
crypto/dleq/src/tests/cross_group/schnorr.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use rand_core::OsRng;
|
||||
|
||||
use group::{ff::Field, prime::PrimeGroup};
|
||||
|
||||
use transcript::RecommendedTranscript;
|
||||
|
||||
use crate::cross_group::schnorr::SchnorrPoK;
|
||||
|
||||
fn test_schnorr<G: PrimeGroup>() {
|
||||
let private = G::Scalar::random(&mut OsRng);
|
||||
|
||||
let transcript = RecommendedTranscript::new(b"Schnorr Test");
|
||||
assert!(
|
||||
SchnorrPoK::prove(
|
||||
&mut OsRng,
|
||||
&mut transcript.clone(),
|
||||
G::generator(),
|
||||
private
|
||||
).verify(&mut transcript.clone(), G::generator(), G::generator() * private)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_secp256k1() {
|
||||
test_schnorr::<k256::ProjectivePoint>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ed25519() {
|
||||
test_schnorr::<dalek_ff_group::EdwardsPoint>();
|
||||
}
|
||||
43
crypto/dleq/src/tests/mod.rs
Normal file
43
crypto/dleq/src/tests/mod.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
#[cfg(feature = "cross_group")]
|
||||
mod cross_group;
|
||||
|
||||
use hex_literal::hex;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use ff::Field;
|
||||
use group::GroupEncoding;
|
||||
|
||||
use k256::{Scalar, ProjectivePoint};
|
||||
|
||||
use transcript::RecommendedTranscript;
|
||||
|
||||
use crate::{Generators, DLEqProof};
|
||||
|
||||
#[test]
|
||||
fn test_dleq() {
|
||||
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
|
||||
|
||||
let generators = Generators::new(
|
||||
ProjectivePoint::GENERATOR,
|
||||
ProjectivePoint::from_bytes(
|
||||
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
|
||||
).unwrap()
|
||||
);
|
||||
|
||||
let key = Scalar::random(&mut OsRng);
|
||||
let proof = DLEqProof::prove(&mut OsRng, &mut transcript(), generators, key);
|
||||
|
||||
let keys = (generators.primary * key, generators.alt * key);
|
||||
proof.verify(&mut transcript(), generators, keys).unwrap();
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
{
|
||||
let mut buf = vec![];
|
||||
proof.serialize(&mut buf).unwrap();
|
||||
let deserialized = DLEqProof::<ProjectivePoint>::deserialize(
|
||||
&mut std::io::Cursor::new(&buf)
|
||||
).unwrap();
|
||||
assert_eq!(proof, deserialized);
|
||||
deserialized.verify(&mut transcript(), generators, keys).unwrap();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user