Offer a multi-DLEq proof which simply merges challenges for n underlying proofs

This converts proofs from 2n elements to 1+n.

Moves FROST over to it. Additionally, for FROST's binomial nonces, provides
a single DLEq proof (2, not 1+2 elements) by proving the discrete log equality
of their aggregate (with an appropriate binding factor). This may be split back
up depending on later commentary...
This commit is contained in:
Luke Parker
2023-01-01 09:16:09 -05:00
parent 49c4acffbb
commit eeca440fa7
6 changed files with 291 additions and 86 deletions

View File

@@ -36,7 +36,7 @@ type FrostError<C> = DkgError<EncryptionKeyProof<C>>;
#[allow(non_snake_case)]
fn challenge<C: Ciphersuite>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2");
transcript.domain_separate(b"Schnorr Proof of Knowledge");
transcript.domain_separate(b"schnorr_proof_of_knowledge");
transcript.append_message(b"context", context.as_bytes());
transcript.append_message(b"participant", l.to_le_bytes());
transcript.append_message(b"nonce", R);

View File

@@ -177,3 +177,115 @@ impl<G: PrimeGroup> DLEqProof<G> {
res
}
}
#[cfg(feature = "std")]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct MultiDLEqProof<G: PrimeGroup> {
c: G::Scalar,
s: Vec<G::Scalar>,
}
#[cfg(feature = "std")]
#[allow(non_snake_case)]
impl<G: PrimeGroup> MultiDLEqProof<G> {
pub fn prove<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
transcript: &mut T,
generators: &[Vec<G>],
scalars: &[Zeroizing<G::Scalar>],
) -> MultiDLEqProof<G>
where
G::Scalar: Zeroize,
{
transcript.domain_separate(b"multi-dleq");
let mut nonces = vec![];
for (i, (scalar, generators)) in scalars.iter().zip(generators).enumerate() {
// Delineate between discrete logarithms
transcript.append_message(b"discrete_logarithm", i.to_le_bytes());
let nonce = Zeroizing::new(G::Scalar::random(&mut *rng));
for generator in generators {
DLEqProof::transcript(
transcript,
*generator,
*generator * nonce.deref(),
*generator * scalar.deref(),
);
}
nonces.push(nonce);
}
let c = challenge(transcript);
let mut s = vec![];
for (scalar, nonce) in scalars.iter().zip(nonces) {
s.push((c * scalar.deref()) + nonce.deref());
}
MultiDLEqProof { c, s }
}
pub fn verify<T: Transcript>(
&self,
transcript: &mut T,
generators: &[Vec<G>],
points: &[Vec<G>],
) -> Result<(), DLEqError> {
if points.len() != generators.len() {
Err(DLEqError::InvalidProof)?;
}
if self.s.len() != generators.len() {
Err(DLEqError::InvalidProof)?;
}
transcript.domain_separate(b"multi-dleq");
for (i, (generators, points)) in generators.iter().zip(points).enumerate() {
if points.len() != generators.len() {
Err(DLEqError::InvalidProof)?;
}
transcript.append_message(b"discrete_logarithm", i.to_le_bytes());
for (generator, point) in generators.iter().zip(points) {
DLEqProof::transcript(
transcript,
*generator,
(*generator * self.s[i]) - (*point * self.c),
*point,
);
}
}
if self.c != challenge(transcript) {
Err(DLEqError::InvalidProof)?;
}
Ok(())
}
#[cfg(feature = "serialize")]
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(self.c.to_repr().as_ref())?;
for s in &self.s {
w.write_all(s.to_repr().as_ref())?;
}
Ok(())
}
#[cfg(feature = "serialize")]
pub fn read<R: Read>(r: &mut R, discrete_logs: usize) -> io::Result<MultiDLEqProof<G>> {
let c = read_scalar(r)?;
let mut s = vec![];
for _ in 0 .. discrete_logs {
s.push(read_scalar(r)?);
}
Ok(MultiDLEqProof { c, s })
}
#[cfg(feature = "serialize")]
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
}

View File

@@ -13,16 +13,13 @@ use k256::{Scalar, ProjectivePoint};
use transcript::{Transcript, RecommendedTranscript};
use crate::DLEqProof;
use crate::{DLEqProof, MultiDLEqProof};
#[cfg(feature = "experimental")]
mod cross_group;
#[test]
fn test_dleq() {
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
let generators = [
fn generators() -> [k256::ProjectivePoint; 5] {
[
ProjectivePoint::GENERATOR,
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()),
@@ -41,7 +38,13 @@ fn test_dleq() {
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into()),
)
.unwrap(),
];
]
}
#[test]
fn test_dleq() {
let generators = generators();
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
for i in 0 .. 5 {
let key = Zeroizing::new(Scalar::random(&mut OsRng));
@@ -61,6 +64,9 @@ fn test_dleq() {
)
.is_err());
// All of these following tests should effectively be a different challenge and accordingly
// pointless. They're still nice to have though
// We could edit these tests to always test with at least two generators
// Then we don't test proofs with zero/one generator(s)
// While those are stupid, and pointless, and potentially point to a failure in the caller,
@@ -94,3 +100,53 @@ fn test_dleq() {
}
}
}
#[test]
fn test_multi_dleq() {
let generators = generators();
let transcript = || RecommendedTranscript::new(b"MultiDLEq Proof Test");
// Test up to 3 keys
for k in 0 ..= 3 {
let mut keys = vec![];
let mut these_generators = vec![];
let mut pub_keys = vec![];
for i in 0 .. k {
let key = Zeroizing::new(Scalar::random(&mut OsRng));
// For each key, test a variable set of generators
// 0: 0
// 1: 1, 2
// 2: 2, 3, 4
let key_generators = generators[i .. (i + i + 1)].to_vec();
let mut these_pub_keys = vec![];
for generator in &key_generators {
these_pub_keys.push(generator * key.deref());
}
keys.push(key);
these_generators.push(key_generators);
pub_keys.push(these_pub_keys);
}
let proof = MultiDLEqProof::prove(&mut OsRng, &mut transcript(), &these_generators, &keys);
proof.verify(&mut transcript(), &these_generators, &pub_keys).unwrap();
// Different challenge
assert!(proof
.verify(&mut RecommendedTranscript::new(b"different challenge"), &these_generators, &pub_keys)
.is_err());
// Test verifying for a different amount of keys fail
if k > 0 {
assert!(proof.verify(&mut transcript(), &these_generators, &pub_keys[.. k - 1]).is_err());
}
#[cfg(feature = "serialize")]
{
let mut buf = vec![];
proof.write(&mut buf).unwrap();
let deserialized =
MultiDLEqProof::<ProjectivePoint>::read::<&[u8]>(&mut buf.as_ref(), k).unwrap();
assert_eq!(proof, deserialized);
}
}
}

View File

@@ -23,10 +23,17 @@ use transcript::Transcript;
use group::{ff::PrimeField, Group, GroupEncoding};
use multiexp::multiexp_vartime;
use dleq::DLEqProof;
use dleq::MultiDLEqProof;
use crate::curve::Curve;
// Transcript used to aggregate binomial nonces for usage within a single DLEq proof.
fn aggregation_transcript<T: Transcript>(context: &[u8]) -> T {
let mut transcript = T::new(b"FROST DLEq Aggregation v0.5");
transcript.append_message(b"context", context);
transcript
}
// Every participant proves for their commitments at the start of the protocol
// These proofs are verified sequentially, requiring independent transcripts
// In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is
@@ -37,7 +44,7 @@ use crate::curve::Curve;
// constructed). For higher level protocols, the transcript may have contextual info these proofs
// will then be bound to
fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
let mut transcript = T::new(b"FROST_commitments");
let mut transcript = T::new(b"FROST Commitments DLEq v0.5");
transcript.append_message(b"context", context);
transcript
}
@@ -47,7 +54,7 @@ fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
#[derive(Clone, Zeroize)]
pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
// Commitments to a specific generator for this nonce
// Commitments to a specific generator for this binomial nonce
#[derive(Copy, Clone, PartialEq, Eq)]
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
impl<C: Curve> GeneratorCommitments<C> {
@@ -64,13 +71,8 @@ impl<C: Curve> GeneratorCommitments<C> {
// A single nonce's commitments and relevant proofs
#[derive(Clone, PartialEq, Eq)]
pub(crate) struct NonceCommitments<C: Curve> {
// Called generators as these commitments are indexed by generator
// Called generators as these commitments are indexed by generator later on
pub(crate) generators: Vec<GeneratorCommitments<C>>,
// DLEq Proofs proving that these commitments are generated using the same scalar pair
// This could be further optimized with a multi-nonce proof, offering just one proof for all
// nonces. See https://github.com/serai-dex/serai/issues/38
// TODO
pub(crate) dleqs: Option<[DLEqProof<C::G>; 2]>,
}
impl<C: Curve> NonceCommitments<C> {
@@ -78,7 +80,6 @@ impl<C: Curve> NonceCommitments<C> {
rng: &mut R,
secret_share: &Zeroizing<C::F>,
generators: &[C::G],
context: &[u8],
) -> (Nonce<C>, NonceCommitments<C>) {
let nonce = Nonce::<C>([
C::random_nonce(secret_share, &mut *rng),
@@ -93,64 +94,49 @@ impl<C: Curve> NonceCommitments<C> {
]));
}
let mut dleqs = None;
if generators.len() >= 2 {
let mut dleq = |nonce| {
// Uses an independent transcript as each signer must prove this with their commitments,
// yet they're validated while processing everyone's data sequentially, by the global order
// This avoids needing to clone and fork the transcript around
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(context), generators, nonce)
};
dleqs = Some([dleq(&nonce.0[0]), dleq(&nonce.0[1])]);
}
(nonce, NonceCommitments { generators: commitments, dleqs })
(nonce, NonceCommitments { generators: commitments })
}
fn read<R: Read, T: Transcript>(
reader: &mut R,
generators: &[C::G],
context: &[u8],
) -> io::Result<NonceCommitments<C>> {
let commitments: Vec<GeneratorCommitments<C>> = (0 .. generators.len())
.map(|_| GeneratorCommitments::read(reader))
.collect::<Result<_, _>>()?;
let mut dleqs = None;
if generators.len() >= 2 {
let mut verify = |i| -> io::Result<_> {
let dleq = DLEqProof::read(reader)?;
dleq
.verify(
&mut dleq_transcript::<T>(context),
generators,
&commitments.iter().map(|commitments| commitments.0[i]).collect::<Vec<_>>(),
)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
Ok(dleq)
};
dleqs = Some([verify(0)?, verify(1)?]);
}
Ok(NonceCommitments { generators: commitments, dleqs })
Ok(NonceCommitments {
generators: (0 .. generators.len())
.map(|_| GeneratorCommitments::read(reader))
.collect::<Result<_, _>>()?,
})
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
for generator in &self.generators {
generator.write(writer)?;
}
if let Some(dleqs) = &self.dleqs {
dleqs[0].write(writer)?;
dleqs[1].write(writer)?;
}
Ok(())
}
fn transcript<T: Transcript>(&self, t: &mut T) {
t.domain_separate(b"nonce");
for commitments in &self.generators {
t.append_message(b"commitment_D", commitments.0[0].to_bytes());
t.append_message(b"commitment_E", commitments.0[1].to_bytes());
}
}
fn aggregation_factor<T: Transcript>(&self, context: &[u8]) -> C::F {
let mut transcript = aggregation_transcript::<T>(context);
self.transcript(&mut transcript);
<C as Curve>::hash_to_F(b"dleq_aggregation", transcript.challenge(b"binding").as_ref())
}
}
#[derive(Clone, PartialEq, Eq)]
pub(crate) struct Commitments<C: Curve> {
// Called nonces as these commitments are indexed by nonce
pub(crate) nonces: Vec<NonceCommitments<C>>,
// DLEq Proof proving that each set of commitments were generated using a single pair of discrete
// logarithms
pub(crate) dleq: Option<MultiDLEqProof<C::G>>,
}
impl<C: Curve> Commitments<C> {
@@ -162,53 +148,96 @@ impl<C: Curve> Commitments<C> {
) -> (Vec<Nonce<C>>, Commitments<C>) {
let mut nonces = vec![];
let mut commitments = vec![];
let mut dleq_generators = vec![];
let mut dleq_nonces = vec![];
for generators in planned_nonces {
let (nonce, these_commitments) =
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators, context);
let (nonce, these_commitments): (Nonce<C>, _) =
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators);
if generators.len() > 1 {
dleq_generators.push(generators.clone());
dleq_nonces.push(Zeroizing::new(
(these_commitments.aggregation_factor::<T>(context) * nonce.0[1].deref()) +
nonce.0[0].deref(),
));
}
nonces.push(nonce);
commitments.push(these_commitments);
}
(nonces, Commitments { nonces: commitments })
let dleq = if !dleq_generators.is_empty() {
Some(MultiDLEqProof::prove(
rng,
&mut dleq_transcript::<T>(context),
&dleq_generators,
&dleq_nonces,
))
} else {
None
};
(nonces, Commitments { nonces: commitments, dleq })
}
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
t.domain_separate(b"commitments");
for nonce in &self.nonces {
for commitments in &nonce.generators {
t.append_message(b"commitment_D", commitments.0[0].to_bytes());
t.append_message(b"commitment_E", commitments.0[1].to_bytes());
}
nonce.transcript(t);
}
// Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce
// This means it shouldn't be possible for variadic generators to cause conflicts as they're
// committed to as their entire series per-nonce, not as isolates
if let Some(dleqs) = &nonce.dleqs {
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
let mut buf = vec![];
dleq.write(&mut buf).unwrap();
t.append_message(label, &buf);
};
transcript_dleq(b"dleq_D", &dleqs[0]);
transcript_dleq(b"dleq_E", &dleqs[1]);
}
// Transcripting the DLEqs implicitly transcripts the exact generators used for the nonces in
// an exact order
// This means it shouldn't be possible for variadic generators to cause conflicts
if let Some(dleq) = &self.dleq {
t.append_message(b"dleq", dleq.serialize());
}
}
pub(crate) fn read<R: Read, T: Transcript>(
reader: &mut R,
nonces: &[Vec<C::G>],
generators: &[Vec<C::G>],
context: &[u8],
) -> io::Result<Self> {
Ok(Commitments {
nonces: (0 .. nonces.len())
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i], context))
.collect::<Result<_, _>>()?,
})
let nonces = (0 .. generators.len())
.map(|i| NonceCommitments::read::<_, T>(reader, &generators[i]))
.collect::<Result<Vec<NonceCommitments<C>>, _>>()?;
let mut dleq_generators = vec![];
let mut dleq_nonces = vec![];
for (generators, nonce) in generators.iter().cloned().zip(&nonces) {
if generators.len() > 1 {
let binding = nonce.aggregation_factor::<T>(context);
let mut aggregated = vec![];
for commitments in &nonce.generators {
aggregated.push(commitments.0[0] + (commitments.0[1] * binding));
}
dleq_generators.push(generators);
dleq_nonces.push(aggregated);
}
}
let dleq = if !dleq_generators.is_empty() {
let dleq = MultiDLEqProof::read(reader, dleq_generators.len())?;
dleq
.verify(&mut dleq_transcript::<T>(context), &dleq_generators, &dleq_nonces)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
Some(dleq)
} else {
None
};
Ok(Commitments { nonces, dleq })
}
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
for nonce in &self.nonces {
nonce.write(writer)?;
}
if let Some(dleq) = &self.dleq {
dleq.write(writer)?;
}
Ok(())
}
}

View File

@@ -182,8 +182,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
commitments: Commitments {
nonces: vec![NonceCommitments {
generators: vec![GeneratorCommitments(these_commitments)],
dleqs: None,
}],
dleq: None,
},
addendum: (),
},