Remove DLEq proofs from CLSAG multisig

1) Removes the key image DLEq on the Monero side of things, as the produced
   signature share serves as a DLEq for it.
2) Removes the nonce DLEqs from modular-frost as they're unnecessary for
   monero-serai. Updates documentation accordingly.

Without the proof the nonces are internally consistent, the produced signatures
from modular-frost can be argued as a batch-verifiable CP93 DLEq (R0, R1, s),
or as a GSP for the CP93 DLEq statement (which naturally produces (R0, R1, s)).

The lack of proving the nonces consistent does make the process weaker, yet
it's also unnecessary for the class of protocols this is intended to service.
To provide DLEqs for the nonces would be to provide PoKs for the nonce
commitments (in the traditional Schnorr case).
This commit is contained in:
Luke Parker
2024-04-21 22:50:07 -04:00
parent 558a2bfa46
commit a25e6330bd
12 changed files with 131 additions and 306 deletions

View File

@@ -38,7 +38,6 @@ ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features =
multiexp = { path = "../multiexp", version = "0.4", default-features = false, features = ["std", "batch"] }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] }
dleq = { path = "../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] }
dkg = { path = "../dkg", version = "^0.5.1", default-features = false, features = ["std"] }

View File

@@ -39,6 +39,13 @@ pub trait Algorithm<C: Curve>: Send + Sync + Clone {
/// Obtain the list of nonces to generate, as specified by the generators to create commitments
/// against per-nonce.
///
/// The Algorithm is responsible for all transcripting of these nonce specifications/generators.
///
/// The prover will be passed the commitments, and the commitments will be sent to all other
/// participants. No guarantees the commitments are internally consistent (have the same discrete
/// logarithm across generators) are made. Any Algorithm which specifies multiple generators for
/// a single nonce must handle that itself.
fn nonces(&self) -> Vec<Vec<C::G>>;
/// Generate an addendum to FROST"s preprocessing stage.

View File

@@ -1,13 +1,9 @@
// FROST defines its nonce as sum(Di, Ei * bi)
// Monero needs not just the nonce over G however, yet also over H
// Then there is a signature (a modified Chaum Pedersen proof) using multiple nonces at once
//
// Accordingly, in order for this library to be robust, it supports generating an arbitrary amount
// of nonces, each against an arbitrary list of generators
// In order for this library to be robust, it supports generating an arbitrary amount of nonces,
// each against an arbitrary list of generators
//
// Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b)
// When representations across multiple generators are provided, a DLEq proof is also provided to
// confirm their integrity
use core::ops::Deref;
use std::{
@@ -24,32 +20,8 @@ use transcript::Transcript;
use ciphersuite::group::{ff::PrimeField, Group, GroupEncoding};
use multiexp::multiexp_vartime;
use dleq::MultiDLEqProof;
use crate::{curve::Curve, Participant};
// Transcript used to aggregate binomial nonces for usage within a single DLEq proof.
fn aggregation_transcript<T: Transcript>(context: &[u8]) -> T {
let mut transcript = T::new(b"FROST DLEq Aggregation v0.5");
transcript.append_message(b"context", context);
transcript
}
// Every participant proves for their commitments at the start of the protocol
// These proofs are verified sequentially, requiring independent transcripts
// In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is
// challenged in order to create a commitment to it, carried in each independent transcript
// (effectively forking the original transcript)
//
// For FROST, as defined by the IETF, this will do nothing (and this transcript will never even be
// constructed). For higher level protocols, the transcript may have contextual info these proofs
// will then be bound to
fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
let mut transcript = T::new(b"FROST Commitments DLEq v0.5");
transcript.append_message(b"context", context);
transcript
}
// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper
// This is considered a single nonce as r = d + be
#[derive(Clone, Zeroize)]
@@ -69,7 +41,7 @@ impl<C: Curve> GeneratorCommitments<C> {
}
}
// A single nonce's commitments and relevant proofs
// A single nonce's commitments
#[derive(Clone, PartialEq, Eq)]
pub(crate) struct NonceCommitments<C: Curve> {
// Called generators as these commitments are indexed by generator later on
@@ -121,12 +93,6 @@ impl<C: Curve> NonceCommitments<C> {
t.append_message(b"commitment_E", commitments.0[1].to_bytes());
}
}
fn aggregation_factor<T: Transcript>(&self, context: &[u8]) -> C::F {
let mut transcript = aggregation_transcript::<T>(context);
self.transcript(&mut transcript);
<C as Curve>::hash_to_F(b"dleq_aggregation", transcript.challenge(b"binding").as_ref())
}
}
/// Commitments for all the nonces across all their generators.
@@ -135,51 +101,26 @@ pub(crate) struct Commitments<C: Curve> {
// Called nonces as these commitments are indexed by nonce
// So to get the commitments for the first nonce, it'd be commitments.nonces[0]
pub(crate) nonces: Vec<NonceCommitments<C>>,
// DLEq Proof proving that each set of commitments were generated using a single pair of discrete
// logarithms
pub(crate) dleq: Option<MultiDLEqProof<C::G>>,
}
impl<C: Curve> Commitments<C> {
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
pub(crate) fn new<R: RngCore + CryptoRng>(
rng: &mut R,
secret_share: &Zeroizing<C::F>,
planned_nonces: &[Vec<C::G>],
context: &[u8],
) -> (Vec<Nonce<C>>, Commitments<C>) {
let mut nonces = vec![];
let mut commitments = vec![];
let mut dleq_generators = vec![];
let mut dleq_nonces = vec![];
for generators in planned_nonces {
let (nonce, these_commitments): (Nonce<C>, _) =
NonceCommitments::new(&mut *rng, secret_share, generators);
if generators.len() > 1 {
dleq_generators.push(generators.clone());
dleq_nonces.push(Zeroizing::new(
(these_commitments.aggregation_factor::<T>(context) * nonce.0[1].deref()) +
nonce.0[0].deref(),
));
}
nonces.push(nonce);
commitments.push(these_commitments);
}
let dleq = if !dleq_generators.is_empty() {
Some(MultiDLEqProof::prove(
rng,
&mut dleq_transcript::<T>(context),
&dleq_generators,
&dleq_nonces,
))
} else {
None
};
(nonces, Commitments { nonces: commitments, dleq })
(nonces, Commitments { nonces: commitments })
}
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
@@ -187,58 +128,20 @@ impl<C: Curve> Commitments<C> {
for nonce in &self.nonces {
nonce.transcript(t);
}
// Transcripting the DLEqs implicitly transcripts the exact generators used for the nonces in
// an exact order
// This means it shouldn't be possible for variadic generators to cause conflicts
if let Some(dleq) = &self.dleq {
t.append_message(b"dleq", dleq.serialize());
}
}
pub(crate) fn read<R: Read, T: Transcript>(
reader: &mut R,
generators: &[Vec<C::G>],
context: &[u8],
) -> io::Result<Self> {
pub(crate) fn read<R: Read>(reader: &mut R, generators: &[Vec<C::G>]) -> io::Result<Self> {
let nonces = (0 .. generators.len())
.map(|i| NonceCommitments::read(reader, &generators[i]))
.collect::<Result<Vec<NonceCommitments<C>>, _>>()?;
let mut dleq_generators = vec![];
let mut dleq_nonces = vec![];
for (generators, nonce) in generators.iter().cloned().zip(&nonces) {
if generators.len() > 1 {
let binding = nonce.aggregation_factor::<T>(context);
let mut aggregated = vec![];
for commitments in &nonce.generators {
aggregated.push(commitments.0[0] + (commitments.0[1] * binding));
}
dleq_generators.push(generators);
dleq_nonces.push(aggregated);
}
}
let dleq = if !dleq_generators.is_empty() {
let dleq = MultiDLEqProof::read(reader, dleq_generators.len())?;
dleq
.verify(&mut dleq_transcript::<T>(context), &dleq_generators, &dleq_nonces)
.map_err(|_| io::Error::other("invalid DLEq proof"))?;
Some(dleq)
} else {
None
};
Ok(Commitments { nonces, dleq })
Ok(Commitments { nonces })
}
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
for nonce in &self.nonces {
nonce.write(writer)?;
}
if let Some(dleq) = &self.dleq {
dleq.write(writer)?;
}
Ok(())
}
}

View File

@@ -125,14 +125,8 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
let mut params = self.params;
let mut rng = ChaCha20Rng::from_seed(*seed.0);
// Get a challenge to the existing transcript for use when proving for the commitments
let commitments_challenge = params.algorithm.transcript().challenge(b"commitments");
let (nonces, commitments) = Commitments::new::<_, A::Transcript>(
&mut rng,
params.keys.secret_share(),
&params.algorithm.nonces(),
commitments_challenge.as_ref(),
);
let (nonces, commitments) =
Commitments::new::<_>(&mut rng, params.keys.secret_share(), &params.algorithm.nonces());
let addendum = params.algorithm.preprocess_addendum(&mut rng, &params.keys);
let preprocess = Preprocess { commitments, addendum };
@@ -141,27 +135,18 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
let mut blame_entropy = [0; 32];
rng.fill_bytes(&mut blame_entropy);
(
AlgorithmSignMachine {
params,
seed,
commitments_challenge,
nonces,
preprocess: preprocess.clone(),
blame_entropy,
},
AlgorithmSignMachine { params, seed, nonces, preprocess: preprocess.clone(), blame_entropy },
preprocess,
)
}
#[cfg(any(test, feature = "tests"))]
pub(crate) fn unsafe_override_preprocess(
mut self,
self,
nonces: Vec<Nonce<C>>,
preprocess: Preprocess<C, A::Addendum>,
) -> AlgorithmSignMachine<C, A> {
AlgorithmSignMachine {
commitments_challenge: self.params.algorithm.transcript().challenge(b"commitments"),
params: self.params,
seed: CachedPreprocess(Zeroizing::new([0; 32])),
@@ -255,8 +240,6 @@ pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
params: Params<C, A>,
seed: CachedPreprocess,
#[zeroize(skip)]
commitments_challenge: <A::Transcript as Transcript>::Challenge,
pub(crate) nonces: Vec<Nonce<C>>,
// Skips the preprocess due to being too large a bound to feasibly enforce on users
#[zeroize(skip)]
@@ -285,11 +268,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
Ok(Preprocess {
commitments: Commitments::read::<_, A::Transcript>(
reader,
&self.params.algorithm.nonces(),
self.commitments_challenge.as_ref(),
)?,
commitments: Commitments::read::<_>(reader, &self.params.algorithm.nonces())?,
addendum: self.params.algorithm.read_addendum(reader)?,
})
}

View File

@@ -12,7 +12,7 @@ use crate::{
/// Tests for the nonce handling code.
pub mod nonces;
use nonces::{test_multi_nonce, test_invalid_commitment, test_invalid_dleq_proof};
use nonces::test_multi_nonce;
/// Vectorized test suite to ensure consistency.
pub mod vectors;
@@ -267,6 +267,4 @@ pub fn test_ciphersuite<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &mut
test_schnorr_blame::<R, C, H>(rng);
test_multi_nonce::<R, C>(rng);
test_invalid_commitment::<R, C>(rng);
test_invalid_dleq_proof::<R, C>(rng);
}

View File

@@ -9,14 +9,12 @@ use transcript::{Transcript, RecommendedTranscript};
use ciphersuite::group::{ff::Field, Group, GroupEncoding};
use dleq::MultiDLEqProof;
pub use dkg::tests::{key_gen, recover_key};
use crate::{
Curve, Participant, ThresholdView, ThresholdKeys, FrostError,
algorithm::Algorithm,
sign::{Writable, SignMachine},
tests::{algorithm_machines, preprocess, sign},
tests::{algorithm_machines, sign},
};
#[derive(Clone)]
@@ -157,75 +155,3 @@ pub fn test_multi_nonce<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let machines = algorithm_machines(&mut *rng, &MultiNonce::<C>::new(), &keys);
sign(&mut *rng, &MultiNonce::<C>::new(), keys.clone(), machines, &[]);
}
/// Test malleating a commitment for a nonce across generators causes the preprocess to error.
pub fn test_invalid_commitment<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let keys = key_gen::<R, C>(&mut *rng);
let machines = algorithm_machines(&mut *rng, &MultiNonce::<C>::new(), &keys);
let (machines, mut preprocesses) = preprocess(&mut *rng, machines, |_, _| {});
// Select a random participant to give an invalid commitment
let participants = preprocesses.keys().collect::<Vec<_>>();
let faulty = *participants
[usize::try_from(rng.next_u64() % u64::try_from(participants.len()).unwrap()).unwrap()];
// Grab their preprocess
let mut preprocess = preprocesses.remove(&faulty).unwrap();
// Mutate one of the commitments
let nonce =
preprocess.commitments.nonces.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2).unwrap();
let generators_len = nonce.generators.len();
nonce.generators[usize::try_from(rng.next_u64()).unwrap() % generators_len].0
[usize::try_from(rng.next_u64()).unwrap() % 2] = C::G::random(&mut *rng);
// The commitments are validated at time of deserialization (read_preprocess)
// Accordingly, serialize it and read it again to make sure that errors
assert!(machines
.iter()
.next()
.unwrap()
.1
.read_preprocess::<&[u8]>(&mut preprocess.serialize().as_ref())
.is_err());
}
/// Test malleating the DLEq proof for a preprocess causes it to error.
pub fn test_invalid_dleq_proof<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let keys = key_gen::<R, C>(&mut *rng);
let machines = algorithm_machines(&mut *rng, &MultiNonce::<C>::new(), &keys);
let (machines, mut preprocesses) = preprocess(&mut *rng, machines, |_, _| {});
// Select a random participant to give an invalid DLEq proof
let participants = preprocesses.keys().collect::<Vec<_>>();
let faulty = *participants
[usize::try_from(rng.next_u64() % u64::try_from(participants.len()).unwrap()).unwrap()];
// Invalidate it by replacing it with a completely different proof
let dlogs = [Zeroizing::new(C::F::random(&mut *rng)), Zeroizing::new(C::F::random(&mut *rng))];
let mut preprocess = preprocesses.remove(&faulty).unwrap();
preprocess.commitments.dleq = Some(MultiDLEqProof::prove(
&mut *rng,
&mut RecommendedTranscript::new(b"Invalid DLEq Proof"),
&nonces::<C>(),
&dlogs,
));
assert!(machines
.iter()
.next()
.unwrap()
.1
.read_preprocess::<&[u8]>(&mut preprocess.serialize().as_ref())
.is_err());
// Also test None for a proof will cause an error
preprocess.commitments.dleq = None;
assert!(machines
.iter()
.next()
.unwrap()
.1
.read_preprocess::<&[u8]>(&mut preprocess.serialize().as_ref())
.is_err());
}

View File

@@ -14,7 +14,7 @@ use ciphersuite::group::{ff::PrimeField, GroupEncoding};
use crate::{
curve::Curve,
Participant, ThresholdCore, ThresholdKeys,
algorithm::{IetfTranscript, Hram, IetfSchnorr},
algorithm::{Hram, IetfSchnorr},
sign::{
Writable, Nonce, GeneratorCommitments, NonceCommitments, Commitments, Preprocess,
PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine,
@@ -191,7 +191,6 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
nonces: vec![NonceCommitments {
generators: vec![GeneratorCommitments(these_commitments)],
}],
dleq: None,
},
addendum: (),
};
@@ -301,12 +300,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
}
// Also test it at the Commitments level
let (generated_nonces, commitments) = Commitments::<C>::new::<_, IetfTranscript>(
&mut TransparentRng(randomness),
&share,
&[vec![C::generator()]],
&[],
);
let (generated_nonces, commitments) =
Commitments::<C>::new::<_>(&mut TransparentRng(randomness), &share, &[vec![C::generator()]]);
assert_eq!(generated_nonces.len(), 1);
assert_eq!(generated_nonces[0].0, [nonces[0].clone(), nonces[1].clone()]);