mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Create dedicated message structures for FROST messages (#140)
* Create message types for FROST key gen Taking in reader borrows absolutely wasn't feasible. Now, proper types which can be read (and then passed directly, without a mutable borrow) exist for key_gen. sign coming next. * Move FROST signing to messages, not Readers/Writers/Vec<u8> Also takes the nonce handling code and makes a dedicated file for it, aiming to resolve complex types and make the code more legible by replacing its previously inlined state. * clippy * Update FROST tests * read_signature_share * Update the Monero library to the new FROST packages * Update processor to latest FROST * Tweaks to terminology and documentation
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "dleq"
|
||||
version = "0.1.1"
|
||||
version = "0.1.2"
|
||||
description = "Implementation of single and cross-curve Discrete Log Equality proofs"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq"
|
||||
|
||||
@@ -61,7 +61,7 @@ pub enum DLEqError {
|
||||
InvalidProof,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct DLEqProof<G: PrimeGroup> {
|
||||
c: G::Scalar,
|
||||
s: G::Scalar,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "modular-frost"
|
||||
version = "0.2.4"
|
||||
version = "0.3.0"
|
||||
description = "Modular implementation of FROST over ff/group"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost"
|
||||
@@ -40,7 +40,7 @@ transcript = { package = "flexible-transcript", path = "../transcript", features
|
||||
|
||||
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
|
||||
|
||||
dleq = { path = "../dleq", version = "0.1", features = ["serialize"] }
|
||||
dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] }
|
||||
|
||||
[dev-dependencies]
|
||||
sha2 = "0.10"
|
||||
|
||||
@@ -1,26 +1,45 @@
|
||||
use core::{marker::PhantomData, fmt::Debug};
|
||||
use std::io::Read;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use crate::{Curve, FrostError, FrostView, schnorr};
|
||||
pub use schnorr::SchnorrSignature;
|
||||
|
||||
/// Serialize an addendum to a writer.
|
||||
pub trait AddendumSerialize {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
|
||||
}
|
||||
|
||||
impl AddendumSerialize for () {
|
||||
fn write<W: Write>(&self, _: &mut W) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait alias for the requirements to be used as an addendum.
|
||||
pub trait Addendum: Clone + PartialEq + Debug + Zeroize + AddendumSerialize {}
|
||||
impl<A: Clone + PartialEq + Debug + Zeroize + AddendumSerialize> Addendum for A {}
|
||||
|
||||
/// Algorithm trait usable by the FROST signing machine to produce signatures..
|
||||
pub trait Algorithm<C: Curve>: Clone {
|
||||
/// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible
|
||||
/// transcript included in this crate.
|
||||
type Transcript: Transcript + Clone + Debug;
|
||||
type Transcript: Clone + Debug + Transcript;
|
||||
/// Serializable addendum, used in algorithms requiring more data than just the nonces.
|
||||
type Addendum: Addendum;
|
||||
/// The resulting type of the signatures this algorithm will produce.
|
||||
type Signature: Clone + PartialEq + Debug;
|
||||
|
||||
/// Obtain a mutable borrow of the underlying transcript.
|
||||
fn transcript(&mut self) -> &mut Self::Transcript;
|
||||
|
||||
/// Obtain the list of nonces to generate, as specified by the basepoints to create commitments.
|
||||
/// against per-nonce. These are not committed to by FROST on the underlying transcript.
|
||||
/// Obtain the list of nonces to generate, as specified by the generators to create commitments
|
||||
/// against per-nonce
|
||||
fn nonces(&self) -> Vec<Vec<C::G>>;
|
||||
|
||||
/// Generate an addendum to FROST"s preprocessing stage.
|
||||
@@ -28,14 +47,17 @@ pub trait Algorithm<C: Curve>: Clone {
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
params: &FrostView<C>,
|
||||
) -> Vec<u8>;
|
||||
) -> Self::Addendum;
|
||||
|
||||
/// Proccess the addendum for the specified participant. Guaranteed to be ordered.
|
||||
fn process_addendum<Re: Read>(
|
||||
/// Read an addendum from a reader.
|
||||
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<Self::Addendum>;
|
||||
|
||||
/// Proccess the addendum for the specified participant. Guaranteed to be called in order.
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
params: &FrostView<C>,
|
||||
l: u16,
|
||||
reader: &mut Re,
|
||||
reader: Self::Addendum,
|
||||
) -> Result<(), FrostError>;
|
||||
|
||||
/// Sign a share with the given secret/nonce.
|
||||
@@ -116,6 +138,7 @@ impl<C: Curve, H: Hram<C>> Schnorr<C, H> {
|
||||
|
||||
impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
|
||||
type Transcript = IetfTranscript;
|
||||
type Addendum = ();
|
||||
type Signature = SchnorrSignature<C>;
|
||||
|
||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||
@@ -126,20 +149,13 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
|
||||
vec![vec![C::generator()]]
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
_: &mut R,
|
||||
_: &FrostView<C>,
|
||||
) -> Vec<u8> {
|
||||
vec![]
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(&mut self, _: &mut R, _: &FrostView<C>) {}
|
||||
|
||||
fn read_addendum<R: Read>(&self, _: &mut R) -> io::Result<Self::Addendum> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_addendum<Re: Read>(
|
||||
&mut self,
|
||||
_: &FrostView<C>,
|
||||
_: u16,
|
||||
_: &mut Re,
|
||||
) -> Result<(), FrostError> {
|
||||
fn process_addendum(&mut self, _: &FrostView<C>, _: u16, _: ()) -> Result<(), FrostError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
use core::fmt::Debug;
|
||||
use std::io::Read;
|
||||
|
||||
use thiserror::Error;
|
||||
use std::io::{self, Read};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
@@ -30,15 +28,6 @@ mod ed448;
|
||||
#[cfg(feature = "ed448")]
|
||||
pub use ed448::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram};
|
||||
|
||||
/// Set of errors for curve-related operations, namely encoding and decoding.
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum CurveError {
|
||||
#[error("invalid scalar")]
|
||||
InvalidScalar,
|
||||
#[error("invalid point")]
|
||||
InvalidPoint,
|
||||
}
|
||||
|
||||
/// Unified trait to manage an elliptic curve.
|
||||
// This should be moved into its own crate if the need for generic cryptography over ff/group
|
||||
// continues, which is the exact reason ff/group exists (to provide a generic interface)
|
||||
@@ -127,13 +116,13 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn read_F<R: Read>(r: &mut R) -> Result<Self::F, CurveError> {
|
||||
fn read_F<R: Read>(r: &mut R) -> io::Result<Self::F> {
|
||||
let mut encoding = <Self::F as PrimeField>::Repr::default();
|
||||
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidScalar)?;
|
||||
r.read_exact(encoding.as_mut())?;
|
||||
|
||||
// ff mandates this is canonical
|
||||
let res =
|
||||
Option::<Self::F>::from(Self::F::from_repr(encoding)).ok_or(CurveError::InvalidScalar);
|
||||
let res = Option::<Self::F>::from(Self::F::from_repr(encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar"));
|
||||
for b in encoding.as_mut() {
|
||||
b.zeroize();
|
||||
}
|
||||
@@ -141,15 +130,15 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn read_G<R: Read>(r: &mut R) -> Result<Self::G, CurveError> {
|
||||
fn read_G<R: Read>(r: &mut R) -> io::Result<Self::G> {
|
||||
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
|
||||
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidPoint)?;
|
||||
r.read_exact(encoding.as_mut())?;
|
||||
|
||||
let point =
|
||||
Option::<Self::G>::from(Self::G::from_bytes(&encoding)).ok_or(CurveError::InvalidPoint)?;
|
||||
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
|
||||
// Ban the identity, per the FROST spec, and non-canonical points
|
||||
if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) {
|
||||
Err(CurveError::InvalidPoint)?;
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical or identity point"))?;
|
||||
}
|
||||
Ok(point)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{
|
||||
marker::PhantomData,
|
||||
io::{Read, Cursor},
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
@@ -34,101 +34,97 @@ fn challenge<C: Curve>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
||||
C::hash_to_F(DST, &transcript)
|
||||
}
|
||||
|
||||
/// Commitments message to be broadcast to all other parties.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Commitments<C: Curve>(Vec<C::G>, Vec<u8>, SchnorrSignature<C>);
|
||||
impl<C: Curve> Commitments<C> {
|
||||
pub fn read<R: Read>(reader: &mut R, params: FrostParams) -> io::Result<Self> {
|
||||
let mut commitments = Vec::with_capacity(params.t().into());
|
||||
let mut serialized = Vec::with_capacity(usize::from(params.t()) * C::G_len());
|
||||
for _ in 0 .. params.t() {
|
||||
let mut buf = <C::G as GroupEncoding>::Repr::default();
|
||||
reader.read_exact(buf.as_mut())?;
|
||||
|
||||
commitments.push(C::read_G(&mut buf.as_ref())?);
|
||||
serialized.extend(buf.as_ref());
|
||||
}
|
||||
|
||||
Ok(Commitments(commitments, serialized, SchnorrSignature::read(reader)?))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(&self.1)?;
|
||||
self.2.write(writer)
|
||||
}
|
||||
}
|
||||
|
||||
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
|
||||
// the serialized commitments to be broadcasted over an authenticated channel to all parties
|
||||
// the commitments to be broadcasted over an authenticated channel to all parties
|
||||
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &FrostParams,
|
||||
context: &str,
|
||||
) -> (Vec<C::F>, Vec<C::G>, Vec<u8>) {
|
||||
) -> (Vec<C::F>, Vec<C::G>, Commitments<C>) {
|
||||
let t = usize::from(params.t);
|
||||
let mut coefficients = Vec::with_capacity(t);
|
||||
let mut commitments = Vec::with_capacity(t);
|
||||
let mut serialized = Vec::with_capacity((C::G_len() * t) + C::G_len() + C::F_len());
|
||||
let mut serialized = Vec::with_capacity(t * C::G_len());
|
||||
|
||||
for i in 0 .. t {
|
||||
// Step 1: Generate t random values to form a polynomial with
|
||||
coefficients.push(C::random_F(&mut *rng));
|
||||
// Step 3: Generate public commitments
|
||||
commitments.push(C::generator() * coefficients[i]);
|
||||
// Serialize them for publication
|
||||
serialized.extend(commitments[i].to_bytes().as_ref());
|
||||
}
|
||||
|
||||
// Step 2: Provide a proof of knowledge
|
||||
let mut r = C::random_F(rng);
|
||||
serialized.extend(
|
||||
schnorr::sign::<C>(
|
||||
coefficients[0],
|
||||
// This could be deterministic as the PoK is a singleton never opened up to cooperative
|
||||
// discussion
|
||||
// There's no reason to spend the time and effort to make this deterministic besides a
|
||||
// general obsession with canonicity and determinism though
|
||||
r,
|
||||
challenge::<C>(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized),
|
||||
)
|
||||
.serialize(),
|
||||
let sig = schnorr::sign::<C>(
|
||||
coefficients[0],
|
||||
// This could be deterministic as the PoK is a singleton never opened up to cooperative
|
||||
// discussion
|
||||
// There's no reason to spend the time and effort to make this deterministic besides a
|
||||
// general obsession with canonicity and determinism though
|
||||
r,
|
||||
challenge::<C>(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized),
|
||||
);
|
||||
r.zeroize();
|
||||
|
||||
// Step 4: Broadcast
|
||||
(coefficients, commitments, serialized)
|
||||
(coefficients, commitments.clone(), Commitments(commitments, serialized, sig))
|
||||
}
|
||||
|
||||
// Verify the received data from the first round of key generation
|
||||
fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &FrostParams,
|
||||
context: &str,
|
||||
our_commitments: Vec<C::G>,
|
||||
mut serialized: HashMap<u16, Re>,
|
||||
mut msgs: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
|
||||
validate_map(&serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
|
||||
|
||||
let mut commitments = HashMap::new();
|
||||
commitments.insert(params.i, our_commitments);
|
||||
validate_map(&msgs, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
|
||||
|
||||
let mut signatures = Vec::with_capacity(usize::from(params.n() - 1));
|
||||
for l in 1 ..= params.n() {
|
||||
if l == params.i {
|
||||
continue;
|
||||
}
|
||||
|
||||
let invalid = FrostError::InvalidCommitment(l);
|
||||
|
||||
// Read the entire list of commitments as the key we're providing a PoK for (A) and the message
|
||||
#[allow(non_snake_case)]
|
||||
let mut Am = vec![0; usize::from(params.t()) * C::G_len()];
|
||||
serialized.get_mut(&l).unwrap().read_exact(&mut Am).map_err(|_| invalid)?;
|
||||
|
||||
let mut these_commitments = vec![];
|
||||
let mut cursor = Cursor::new(&Am);
|
||||
for _ in 0 .. usize::from(params.t()) {
|
||||
these_commitments.push(C::read_G(&mut cursor).map_err(|_| invalid)?);
|
||||
}
|
||||
|
||||
// Don't bother validating our own proof of knowledge
|
||||
if l != params.i() {
|
||||
let cursor = serialized.get_mut(&l).unwrap();
|
||||
#[allow(non_snake_case)]
|
||||
let R = C::read_G(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
let s = C::read_F(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
|
||||
let mut commitments = msgs
|
||||
.drain()
|
||||
.map(|(l, msg)| {
|
||||
// Step 5: Validate each proof of knowledge
|
||||
// This is solely the prep step for the latter batch verification
|
||||
signatures.push((
|
||||
l,
|
||||
these_commitments[0],
|
||||
challenge::<C>(context, l, R.to_bytes().as_ref(), &Am),
|
||||
SchnorrSignature::<C> { R, s },
|
||||
msg.0[0],
|
||||
challenge::<C>(context, l, msg.2.R.to_bytes().as_ref(), &msg.1),
|
||||
msg.2,
|
||||
));
|
||||
}
|
||||
|
||||
commitments.insert(l, these_commitments);
|
||||
}
|
||||
(l, msg.0)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?;
|
||||
|
||||
commitments.insert(params.i, our_commitments);
|
||||
Ok(commitments)
|
||||
}
|
||||
|
||||
@@ -144,18 +140,39 @@ fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
|
||||
share
|
||||
}
|
||||
|
||||
// Implements round 1, step 5 and round 2, step 1 of FROST key generation
|
||||
/// Secret share, to be sent only to the party it's intended for, over an encrypted and
|
||||
/// authenticated channel.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct SecretShare<C: Curve>(C::F);
|
||||
impl<C: Curve> SecretShare<C> {
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(SecretShare(C::read_F(reader)?))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.0.to_repr().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for SecretShare<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for SecretShare<C> {}
|
||||
|
||||
// Calls round 1, step 5 and implements round 2, step 1 of FROST key generation
|
||||
// Returns our secret share part, commitments for the next step, and a vector for each
|
||||
// counterparty to receive
|
||||
fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &FrostParams,
|
||||
context: &str,
|
||||
coefficients: &mut Vec<C::F>,
|
||||
our_commitments: Vec<C::G>,
|
||||
commitments: HashMap<u16, Re>,
|
||||
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> {
|
||||
let commitments = verify_r1::<_, _, C>(rng, params, context, our_commitments, commitments)?;
|
||||
msgs: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, SecretShare<C>>), FrostError> {
|
||||
let commitments = verify_r1::<_, C>(rng, params, context, our_commitments, msgs)?;
|
||||
|
||||
// Step 1: Generate secret shares for all other parties
|
||||
let mut res = HashMap::new();
|
||||
@@ -166,7 +183,7 @@ fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
continue;
|
||||
}
|
||||
|
||||
res.insert(l, polynomial(coefficients, l).to_repr().as_ref().to_vec());
|
||||
res.insert(l, SecretShare(polynomial(coefficients, l)));
|
||||
}
|
||||
|
||||
// Calculate our own share
|
||||
@@ -177,24 +194,17 @@ fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
Ok((share, commitments, res))
|
||||
}
|
||||
|
||||
/// Finishes round 2 and returns both the secret share and the serialized public key.
|
||||
/// This key MUST NOT be considered usable until all parties confirm they have completed the
|
||||
/// protocol without issue.
|
||||
fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
// Finishes round 2 and returns the keys.
|
||||
// This key MUST NOT be considered usable until all parties confirm they have completed the
|
||||
// protocol without issue.
|
||||
fn complete_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: FrostParams,
|
||||
mut secret_share: C::F,
|
||||
commitments: &mut HashMap<u16, Vec<C::G>>,
|
||||
mut serialized: HashMap<u16, Re>,
|
||||
mut shares: HashMap<u16, SecretShare<C>>,
|
||||
) -> Result<FrostCore<C>, FrostError> {
|
||||
validate_map(&serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
|
||||
|
||||
// Step 2. Verify each share
|
||||
let mut shares = HashMap::new();
|
||||
// TODO: Clear serialized
|
||||
for (l, share) in serialized.iter_mut() {
|
||||
shares.insert(*l, C::read_F(share).map_err(|_| FrostError::InvalidShare(*l))?);
|
||||
}
|
||||
validate_map(&shares, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
|
||||
|
||||
// Calculate the exponent for a given participant and apply it to a series of commitments
|
||||
// Initially used with the actual commitments to verify the secret share, later used with stripes
|
||||
@@ -210,22 +220,18 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
};
|
||||
|
||||
let mut batch = BatchVerifier::new(shares.len());
|
||||
for (l, share) in shares.iter_mut() {
|
||||
if *l == params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
secret_share += *share;
|
||||
for (l, mut share) in shares.drain() {
|
||||
secret_share += share.0;
|
||||
|
||||
// This can be insecurely linearized from n * t to just n using the below sums for a given
|
||||
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
|
||||
// ensure that malleability isn't present is to use this n * t algorithm, which runs
|
||||
// per sender and not as an aggregate of all senders, which also enables blame
|
||||
let mut values = exponential(params.i, &commitments[l]);
|
||||
values.push((-*share, C::generator()));
|
||||
let mut values = exponential(params.i, &commitments[&l]);
|
||||
values.push((-share.0, C::generator()));
|
||||
share.zeroize();
|
||||
|
||||
batch.queue(rng, *l, values);
|
||||
batch.queue(rng, l, values);
|
||||
}
|
||||
batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?;
|
||||
|
||||
@@ -299,14 +305,14 @@ impl<C: Curve> KeyGenMachine<C> {
|
||||
}
|
||||
|
||||
/// Start generating a key according to the FROST DKG spec.
|
||||
/// Returns a serialized list of commitments to be sent to all parties over an authenticated
|
||||
/// Returns a commitments message to be sent to all parties over an authenticated
|
||||
/// channel. If any party submits multiple sets of commitments, they MUST be treated as
|
||||
/// malicious.
|
||||
pub fn generate_coefficients<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
) -> (SecretShareMachine<C>, Vec<u8>) {
|
||||
let (coefficients, our_commitments, serialized) =
|
||||
) -> (SecretShareMachine<C>, Commitments<C>) {
|
||||
let (coefficients, our_commitments, commitments) =
|
||||
generate_key_r1::<_, C>(rng, &self.params, &self.context);
|
||||
|
||||
(
|
||||
@@ -316,21 +322,21 @@ impl<C: Curve> KeyGenMachine<C> {
|
||||
coefficients,
|
||||
our_commitments,
|
||||
},
|
||||
serialized,
|
||||
commitments,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> SecretShareMachine<C> {
|
||||
/// Continue generating a key.
|
||||
/// Takes in everyone else's commitments. Returns a HashMap of byte vectors representing secret
|
||||
/// shares. These MUST be encrypted and only then sent to their respective participants.
|
||||
pub fn generate_secret_shares<Re: Read, R: RngCore + CryptoRng>(
|
||||
/// Takes in everyone else's commitments. Returns a HashMap of secret shares.
|
||||
/// These MUST be encrypted and only then sent to their respective participants.
|
||||
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
commitments: HashMap<u16, Re>,
|
||||
) -> Result<(KeyMachine<C>, HashMap<u16, Vec<u8>>), FrostError> {
|
||||
let (secret, commitments, shares) = generate_key_r2::<_, _, C>(
|
||||
commitments: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<(KeyMachine<C>, HashMap<u16, SecretShare<C>>), FrostError> {
|
||||
let (secret, commitments, shares) = generate_key_r2::<_, C>(
|
||||
rng,
|
||||
&self.params,
|
||||
&self.context,
|
||||
@@ -347,10 +353,10 @@ impl<C: Curve> KeyMachine<C> {
|
||||
/// Takes in everyone elses' shares submitted to us. Returns a FrostCore object representing the
|
||||
/// generated keys. Successful protocol completion MUST be confirmed by all parties before these
|
||||
/// keys may be safely used.
|
||||
pub fn complete<Re: Read, R: RngCore + CryptoRng>(
|
||||
pub fn complete<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
shares: HashMap<u16, Re>,
|
||||
shares: HashMap<u16, SecretShare<C>>,
|
||||
) -> Result<FrostCore<C>, FrostError> {
|
||||
complete_r2(rng, self.params, self.secret, &mut self.commitments, shares)
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ pub mod promote;
|
||||
|
||||
/// Algorithm for the signing process.
|
||||
pub mod algorithm;
|
||||
mod nonce;
|
||||
/// Threshold signing protocol.
|
||||
pub mod sign;
|
||||
|
||||
@@ -45,7 +46,7 @@ pub mod sign;
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub mod tests;
|
||||
|
||||
// Validate a map of serialized values to have the expected included participants
|
||||
// Validate a map of values to have the expected included participants
|
||||
pub(crate) fn validate_map<T>(
|
||||
map: &HashMap<u16, T>,
|
||||
included: &[u16],
|
||||
@@ -136,6 +137,8 @@ pub enum FrostError {
|
||||
InvalidCommitment(u16),
|
||||
#[error("invalid proof of knowledge (participant {0})")]
|
||||
InvalidProofOfKnowledge(u16),
|
||||
#[error("invalid preprocess (participant {0})")]
|
||||
InvalidPreprocess(u16),
|
||||
#[error("invalid share (participant {0})")]
|
||||
InvalidShare(u16),
|
||||
|
||||
|
||||
271
crypto/frost/src/nonce.rs
Normal file
271
crypto/frost/src/nonce.rs
Normal file
@@ -0,0 +1,271 @@
|
||||
// FROST defines its nonce as sum(Di, Ei * bi)
|
||||
// Monero needs not just the nonce over G however, yet also over H
|
||||
// Then there is a signature (a modified Chaum Pedersen proof) using multiple nonces at once
|
||||
//
|
||||
// Accordingly, in order for this library to be robust, it supports generating an arbitrary amount
|
||||
// of nonces, each against an arbitrary list of basepoints
|
||||
//
|
||||
// Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b)
|
||||
// When multiple D, E pairs are provided, a DLEq proof is also provided to confirm their integrity
|
||||
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use group::{ff::PrimeField, Group, GroupEncoding};
|
||||
use multiexp::multiexp_vartime;
|
||||
|
||||
use dleq::DLEqProof;
|
||||
|
||||
use crate::curve::Curve;
|
||||
|
||||
fn dleq_transcript<T: Transcript>() -> T {
|
||||
T::new(b"FROST_nonce_dleq")
|
||||
}
|
||||
|
||||
// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper
|
||||
// This is considered a single nonce as r = d + be
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub(crate) struct Nonce<C: Curve>(pub(crate) [C::F; 2]);
|
||||
|
||||
// Commitments to a specific generator for this nonce
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Zeroize)]
|
||||
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
|
||||
impl<C: Curve> GeneratorCommitments<C> {
|
||||
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
|
||||
Ok(GeneratorCommitments([C::read_G(reader)?, C::read_G(reader)?]))
|
||||
}
|
||||
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.0[0].to_bytes().as_ref())?;
|
||||
writer.write_all(self.0[1].to_bytes().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
// A single nonce's commitments and relevant proofs
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub(crate) struct NonceCommitments<C: Curve> {
|
||||
// Called generators as these commitments are indexed by generator
|
||||
pub(crate) generators: Vec<GeneratorCommitments<C>>,
|
||||
// DLEq Proofs proving that these commitments are generated using the same scalar pair
|
||||
// This could be further optimized with a multi-nonce proof, offering just one proof for all
|
||||
// nonces. See https://github.com/serai-dex/serai/issues/38
|
||||
// TODO
|
||||
pub(crate) dleqs: Option<[DLEqProof<C::G>; 2]>,
|
||||
}
|
||||
|
||||
impl<C: Curve> NonceCommitments<C> {
|
||||
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
|
||||
rng: &mut R,
|
||||
mut secret_share: C::F,
|
||||
generators: &[C::G],
|
||||
) -> (Nonce<C>, NonceCommitments<C>) {
|
||||
let nonce =
|
||||
Nonce([C::random_nonce(secret_share, &mut *rng), C::random_nonce(secret_share, &mut *rng)]);
|
||||
secret_share.zeroize();
|
||||
|
||||
let mut commitments = Vec::with_capacity(generators.len());
|
||||
for generator in generators {
|
||||
commitments.push(GeneratorCommitments([*generator * nonce.0[0], *generator * nonce.0[1]]));
|
||||
}
|
||||
|
||||
let mut dleqs = None;
|
||||
if generators.len() >= 2 {
|
||||
let mut dleq = |nonce| {
|
||||
// Uses an independent transcript as each signer must prove this with their commitments,
|
||||
// yet they're validated while processing everyone's data sequentially, by the global order
|
||||
// This avoids needing to clone and fork the transcript around
|
||||
// TODO: At least include a challenge from the existing transcript
|
||||
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(), generators, nonce)
|
||||
};
|
||||
dleqs = Some([dleq(nonce.0[0]), dleq(nonce.0[1])]);
|
||||
}
|
||||
|
||||
(nonce, NonceCommitments { generators: commitments, dleqs })
|
||||
}
|
||||
|
||||
fn read<R: Read, T: Transcript>(
|
||||
reader: &mut R,
|
||||
generators: &[C::G],
|
||||
) -> io::Result<NonceCommitments<C>> {
|
||||
let commitments: Vec<GeneratorCommitments<C>> = (0 .. generators.len())
|
||||
.map(|_| GeneratorCommitments::read(reader))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let mut dleqs = None;
|
||||
if generators.len() >= 2 {
|
||||
let mut verify = |i| -> io::Result<_> {
|
||||
let dleq = DLEqProof::deserialize(reader)?;
|
||||
dleq
|
||||
.verify(
|
||||
&mut dleq_transcript::<T>(),
|
||||
generators,
|
||||
&commitments.iter().map(|commitments| commitments.0[i]).collect::<Vec<_>>(),
|
||||
)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
|
||||
Ok(dleq)
|
||||
};
|
||||
dleqs = Some([verify(0)?, verify(1)?]);
|
||||
}
|
||||
|
||||
Ok(NonceCommitments { generators: commitments, dleqs })
|
||||
}
|
||||
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
for generator in &self.generators {
|
||||
generator.write(writer)?;
|
||||
}
|
||||
if let Some(dleqs) = &self.dleqs {
|
||||
dleqs[0].serialize(writer)?;
|
||||
dleqs[1].serialize(writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub(crate) struct Commitments<C: Curve> {
|
||||
// Called nonces as these commitments are indexed by nonce
|
||||
pub(crate) nonces: Vec<NonceCommitments<C>>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Commitments<C> {
|
||||
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
|
||||
rng: &mut R,
|
||||
secret_share: C::F,
|
||||
planned_nonces: &[Vec<C::G>],
|
||||
) -> (Vec<Nonce<C>>, Commitments<C>) {
|
||||
let mut nonces = vec![];
|
||||
let mut commitments = vec![];
|
||||
for generators in planned_nonces {
|
||||
let (nonce, these_commitments) =
|
||||
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators);
|
||||
nonces.push(nonce);
|
||||
commitments.push(these_commitments);
|
||||
}
|
||||
(nonces, Commitments { nonces: commitments })
|
||||
}
|
||||
|
||||
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
|
||||
for nonce in &self.nonces {
|
||||
for commitments in &nonce.generators {
|
||||
t.append_message(b"commitment_D", commitments.0[0].to_bytes().as_ref());
|
||||
t.append_message(b"commitment_E", commitments.0[1].to_bytes().as_ref());
|
||||
}
|
||||
|
||||
// Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce
|
||||
// This means it shouldn't be possible for variadic generators to cause conflicts as they're
|
||||
// committed to as their entire series per-nonce, not as isolates
|
||||
if let Some(dleqs) = &nonce.dleqs {
|
||||
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
|
||||
let mut buf = Vec::with_capacity(C::G_len() + C::F_len());
|
||||
dleq.serialize(&mut buf).unwrap();
|
||||
t.append_message(label, &buf);
|
||||
};
|
||||
transcript_dleq(b"dleq_D", &dleqs[0]);
|
||||
transcript_dleq(b"dleq_E", &dleqs[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn read<R: Read, T: Transcript>(
|
||||
reader: &mut R,
|
||||
nonces: &[Vec<C::G>],
|
||||
) -> io::Result<Self> {
|
||||
Ok(Commitments {
|
||||
nonces: (0 .. nonces.len())
|
||||
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i]))
|
||||
.collect::<Result<_, _>>()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
for nonce in &self.nonces {
|
||||
nonce.write(writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Zeroize)]
|
||||
pub(crate) struct IndividualBinding<C: Curve> {
|
||||
commitments: Commitments<C>,
|
||||
binding_factors: Option<Vec<C::F>>,
|
||||
}
|
||||
|
||||
pub(crate) struct BindingFactor<C: Curve>(pub(crate) HashMap<u16, IndividualBinding<C>>);
|
||||
|
||||
impl<C: Curve> Zeroize for BindingFactor<C> {
|
||||
fn zeroize(&mut self) {
|
||||
for (mut validator, mut binding) in self.0.drain() {
|
||||
validator.zeroize();
|
||||
binding.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> BindingFactor<C> {
|
||||
pub(crate) fn insert(&mut self, i: u16, commitments: Commitments<C>) {
|
||||
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });
|
||||
}
|
||||
|
||||
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &mut T) {
|
||||
for (l, binding) in self.0.iter_mut() {
|
||||
let mut transcript = transcript.clone();
|
||||
transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
|
||||
// It *should* be perfectly fine to reuse a binding factor for multiple nonces
|
||||
// This generates a binding factor per nonce just to ensure it never comes up as a question
|
||||
binding.binding_factors = Some(
|
||||
(0 .. binding.commitments.nonces.len())
|
||||
.map(|_| C::hash_binding_factor(transcript.challenge(b"rho").as_ref()))
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn binding_factors(&self, i: u16) -> &[C::F] {
|
||||
self.0[&i].binding_factors.as_ref().unwrap()
|
||||
}
|
||||
|
||||
// Get the bound nonces for a specific party
|
||||
pub(crate) fn bound(&self, l: u16) -> Vec<Vec<C::G>> {
|
||||
let mut res = vec![];
|
||||
for (i, (nonce, rho)) in
|
||||
self.0[&l].commitments.nonces.iter().zip(self.binding_factors(l).iter()).enumerate()
|
||||
{
|
||||
res.push(vec![]);
|
||||
for generator in &nonce.generators {
|
||||
res[i].push(generator.0[0] + (generator.0[1] * rho));
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Get the nonces for this signing session
|
||||
pub(crate) fn nonces(&self, planned_nonces: &[Vec<C::G>]) -> Vec<Vec<C::G>> {
|
||||
let mut nonces = Vec::with_capacity(planned_nonces.len());
|
||||
for n in 0 .. planned_nonces.len() {
|
||||
nonces.push(Vec::with_capacity(planned_nonces[n].len()));
|
||||
for g in 0 .. planned_nonces[n].len() {
|
||||
#[allow(non_snake_case)]
|
||||
let mut D = C::G::identity();
|
||||
let mut statements = Vec::with_capacity(self.0.len());
|
||||
#[allow(non_snake_case)]
|
||||
for IndividualBinding { commitments, binding_factors } in self.0.values() {
|
||||
D += commitments.nonces[n].generators[g].0[0];
|
||||
statements
|
||||
.push((binding_factors.as_ref().unwrap()[n], commitments.nonces[n].generators[g].0[1]));
|
||||
}
|
||||
nonces[n].push(D + multiexp_vartime(&statements));
|
||||
}
|
||||
}
|
||||
nonces
|
||||
}
|
||||
}
|
||||
@@ -12,10 +12,7 @@ use group::GroupEncoding;
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use dleq::DLEqProof;
|
||||
|
||||
use crate::{
|
||||
curve::{CurveError, Curve},
|
||||
FrostError, FrostCore, FrostKeys, validate_map,
|
||||
};
|
||||
use crate::{curve::Curve, FrostError, FrostCore, FrostKeys, validate_map};
|
||||
|
||||
/// Promote a set of keys to another Curve definition.
|
||||
pub trait CurvePromote<C2: Curve> {
|
||||
@@ -73,11 +70,8 @@ impl<C: Curve> GeneratorProof<C> {
|
||||
self.proof.serialize(writer)
|
||||
}
|
||||
|
||||
pub fn deserialize<R: Read>(reader: &mut R) -> Result<GeneratorProof<C>, CurveError> {
|
||||
Ok(GeneratorProof {
|
||||
share: C::read_G(reader)?,
|
||||
proof: DLEqProof::deserialize(reader).map_err(|_| CurveError::InvalidScalar)?,
|
||||
})
|
||||
pub fn deserialize<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
|
||||
Ok(GeneratorProof { share: C::read_G(reader)?, proof: DLEqProof::deserialize(reader)? })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
@@ -9,7 +11,7 @@ use group::{
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::Curve;
|
||||
use crate::curve::Curve;
|
||||
|
||||
/// A Schnorr signature of the form (R, s) where s = r + cx.
|
||||
#[allow(non_snake_case)]
|
||||
@@ -20,11 +22,13 @@ pub struct SchnorrSignature<C: Curve> {
|
||||
}
|
||||
|
||||
impl<C: Curve> SchnorrSignature<C> {
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(C::G_len() + C::F_len());
|
||||
res.extend(self.R.to_bytes().as_ref());
|
||||
res.extend(self.s.to_repr().as_ref());
|
||||
res
|
||||
pub(crate) fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(SchnorrSignature { R: C::read_G(reader)?, s: C::read_F(reader)? })
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.R.to_bytes().as_ref())?;
|
||||
writer.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,28 +1,40 @@
|
||||
use core::fmt;
|
||||
use std::{
|
||||
io::{Read, Cursor},
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
use subtle::ConstantTimeEq;
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
Group, GroupEncoding,
|
||||
};
|
||||
use multiexp::multiexp_vartime;
|
||||
|
||||
use dleq::DLEqProof;
|
||||
use group::{ff::PrimeField, GroupEncoding};
|
||||
|
||||
use crate::{
|
||||
curve::Curve, FrostError, FrostParams, FrostKeys, FrostView, algorithm::Algorithm, validate_map,
|
||||
curve::Curve,
|
||||
FrostError, FrostParams, FrostKeys, FrostView,
|
||||
algorithm::{AddendumSerialize, Addendum, Algorithm},
|
||||
validate_map,
|
||||
};
|
||||
|
||||
pub(crate) use crate::nonce::*;
|
||||
|
||||
/// Trait enabling writing preprocesses and signature shares.
|
||||
pub trait Writable {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
|
||||
}
|
||||
|
||||
impl<T: Writable> Writable for Vec<T> {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
for w in self {
|
||||
w.write(writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Pairing of an Algorithm with a FrostKeys instance and this specific signing set.
|
||||
#[derive(Clone)]
|
||||
pub struct Params<C: Curve, A: Algorithm<C>> {
|
||||
@@ -31,7 +43,6 @@ pub struct Params<C: Curve, A: Algorithm<C>> {
|
||||
view: FrostView<C>,
|
||||
}
|
||||
|
||||
// Currently public to enable more complex operations as desired, yet solely used in testing
|
||||
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
||||
pub fn new(
|
||||
algorithm: A,
|
||||
@@ -79,104 +90,75 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
||||
}
|
||||
}
|
||||
|
||||
fn nonce_transcript<T: Transcript>() -> T {
|
||||
T::new(b"FROST_nonce_dleq")
|
||||
/// Preprocess for an instance of the FROST signing protocol.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub struct Preprocess<C: Curve, A: Addendum> {
|
||||
pub(crate) commitments: Commitments<C>,
|
||||
pub addendum: A,
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Addendum> Writable for Preprocess<C, A> {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
self.commitments.write(writer)?;
|
||||
self.addendum.write(writer)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Zeroize)]
|
||||
pub(crate) struct PreprocessPackage<C: Curve> {
|
||||
pub(crate) nonces: Vec<[C::F; 2]>,
|
||||
#[zeroize(skip)]
|
||||
pub(crate) commitments: Vec<Vec<[C::G; 2]>>,
|
||||
pub(crate) addendum: Vec<u8>,
|
||||
pub(crate) struct PreprocessData<C: Curve, A: Addendum> {
|
||||
pub(crate) nonces: Vec<Nonce<C>>,
|
||||
pub(crate) preprocess: Preprocess<C, A>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for PreprocessPackage<C> {
|
||||
impl<C: Curve, A: Addendum> Drop for PreprocessData<C, A> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for PreprocessPackage<C> {}
|
||||
impl<C: Curve, A: Addendum> ZeroizeOnDrop for PreprocessData<C, A> {}
|
||||
|
||||
fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
|
||||
rng: &mut R,
|
||||
params: &mut Params<C, A>,
|
||||
) -> (PreprocessPackage<C>, Vec<u8>) {
|
||||
let mut serialized = Vec::with_capacity(2 * C::G_len());
|
||||
let (nonces, commitments) = params
|
||||
.algorithm
|
||||
.nonces()
|
||||
.iter()
|
||||
.map(|generators| {
|
||||
let nonces = [
|
||||
C::random_nonce(params.view().secret_share(), &mut *rng),
|
||||
C::random_nonce(params.view().secret_share(), &mut *rng),
|
||||
];
|
||||
|
||||
let commit = |generator: C::G, buf: &mut Vec<u8>| {
|
||||
let commitments = [generator * nonces[0], generator * nonces[1]];
|
||||
buf.extend(commitments[0].to_bytes().as_ref());
|
||||
buf.extend(commitments[1].to_bytes().as_ref());
|
||||
commitments
|
||||
};
|
||||
|
||||
let mut commitments = Vec::with_capacity(generators.len());
|
||||
for generator in generators.iter() {
|
||||
commitments.push(commit(*generator, &mut serialized));
|
||||
}
|
||||
|
||||
// Provide a DLEq proof to verify these commitments are for the same nonce
|
||||
if generators.len() >= 2 {
|
||||
// Uses an independent transcript as each signer must do this now, yet we validate them
|
||||
// sequentially by the global order. Avoids needing to clone and fork the transcript around
|
||||
let mut transcript = nonce_transcript::<A::Transcript>();
|
||||
|
||||
// This could be further optimized with a multi-nonce proof.
|
||||
// See https://github.com/serai-dex/serai/issues/38
|
||||
for mut nonce in nonces {
|
||||
DLEqProof::prove(&mut *rng, &mut transcript, generators, nonce)
|
||||
.serialize(&mut serialized)
|
||||
.unwrap();
|
||||
nonce.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
(nonces, commitments)
|
||||
})
|
||||
.unzip();
|
||||
|
||||
) -> (PreprocessData<C, A::Addendum>, Preprocess<C, A::Addendum>) {
|
||||
let (nonces, commitments) = Commitments::new::<_, A::Transcript>(
|
||||
&mut *rng,
|
||||
params.view().secret_share(),
|
||||
¶ms.algorithm.nonces(),
|
||||
);
|
||||
let addendum = params.algorithm.preprocess_addendum(rng, ¶ms.view);
|
||||
serialized.extend(&addendum);
|
||||
|
||||
(PreprocessPackage { nonces, commitments, addendum }, serialized)
|
||||
let preprocess = Preprocess { commitments, addendum };
|
||||
(PreprocessData { nonces, preprocess: preprocess.clone() }, preprocess)
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn read_D_E<Re: Read, C: Curve>(cursor: &mut Re, l: u16) -> Result<[C::G; 2], FrostError> {
|
||||
Ok([
|
||||
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
|
||||
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
|
||||
])
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
struct Package<C: Curve> {
|
||||
B: HashMap<u16, (Vec<Vec<[C::G; 2]>>, C::F)>,
|
||||
struct SignData<C: Curve> {
|
||||
B: BindingFactor<C>,
|
||||
Rs: Vec<Vec<C::G>>,
|
||||
share: C::F,
|
||||
}
|
||||
|
||||
/// Share of a signature produced via FROST.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub struct SignatureShare<C: Curve>(C::F);
|
||||
impl<C: Curve> Writable for SignatureShare<C> {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.0.to_repr().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
// Has every signer perform the role of the signature aggregator
|
||||
// Step 1 was already deprecated by performing nonce generation as needed
|
||||
// Step 2 is simply the broadcast round from step 1
|
||||
fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
fn sign_with_share<C: Curve, A: Algorithm<C>>(
|
||||
params: &mut Params<C, A>,
|
||||
our_preprocess: PreprocessPackage<C>,
|
||||
mut commitments: HashMap<u16, Re>,
|
||||
mut our_preprocess: PreprocessData<C, A::Addendum>,
|
||||
mut preprocesses: HashMap<u16, Preprocess<C, A::Addendum>>,
|
||||
msg: &[u8],
|
||||
) -> Result<(Package<C>, Vec<u8>), FrostError> {
|
||||
) -> Result<(SignData<C>, SignatureShare<C>), FrostError> {
|
||||
let multisig_params = params.multisig_params();
|
||||
validate_map(&commitments, ¶ms.view.included, multisig_params.i)?;
|
||||
validate_map(&preprocesses, ¶ms.view.included, multisig_params.i)?;
|
||||
|
||||
{
|
||||
// Domain separate FROST
|
||||
@@ -185,9 +167,9 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
|
||||
let nonces = params.algorithm.nonces();
|
||||
#[allow(non_snake_case)]
|
||||
let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len());
|
||||
let mut B = BindingFactor(HashMap::<u16, _>::with_capacity(params.view.included.len()));
|
||||
{
|
||||
// Parse the commitments
|
||||
// Parse the preprocesses
|
||||
for l in ¶ms.view.included {
|
||||
{
|
||||
params
|
||||
@@ -196,73 +178,39 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
|
||||
}
|
||||
|
||||
// While this doesn't note which nonce/basepoint this is for, those are expected to be
|
||||
// static. Beyond that, they're committed to in the DLEq proof transcripts, ensuring
|
||||
// consistency. While this is suboptimal, it maintains IETF compliance, and Algorithm is
|
||||
// documented accordingly
|
||||
let transcript = |t: &mut A::Transcript, commitments: [C::G; 2]| {
|
||||
if commitments[0].ct_eq(&C::G::identity()).into() ||
|
||||
commitments[1].ct_eq(&C::G::identity()).into()
|
||||
{
|
||||
Err(FrostError::InvalidCommitment(*l))?;
|
||||
}
|
||||
t.append_message(b"commitment_D", commitments[0].to_bytes().as_ref());
|
||||
t.append_message(b"commitment_E", commitments[1].to_bytes().as_ref());
|
||||
Ok(())
|
||||
};
|
||||
|
||||
if *l == params.keys.params().i {
|
||||
for nonce_commitments in &our_preprocess.commitments {
|
||||
for commitments in nonce_commitments {
|
||||
transcript(params.algorithm.transcript(), *commitments).unwrap();
|
||||
}
|
||||
let commitments = our_preprocess.preprocess.commitments.clone();
|
||||
commitments.transcript(params.algorithm.transcript());
|
||||
|
||||
let addendum = our_preprocess.preprocess.addendum.clone();
|
||||
{
|
||||
let mut buf = vec![];
|
||||
addendum.write(&mut buf).unwrap();
|
||||
params.algorithm.transcript().append_message(b"addendum", &buf);
|
||||
}
|
||||
|
||||
B.insert(*l, (our_preprocess.commitments.clone(), C::F::zero()));
|
||||
params.algorithm.process_addendum(
|
||||
¶ms.view,
|
||||
*l,
|
||||
&mut Cursor::new(our_preprocess.addendum.clone()),
|
||||
)?;
|
||||
B.insert(*l, commitments);
|
||||
params.algorithm.process_addendum(¶ms.view, *l, addendum)?;
|
||||
} else {
|
||||
let mut cursor = commitments.remove(l).unwrap();
|
||||
|
||||
let mut commitments = Vec::with_capacity(nonces.len());
|
||||
for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() {
|
||||
commitments.push(Vec::with_capacity(nonce_generators.len()));
|
||||
for _ in 0 .. nonce_generators.len() {
|
||||
commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?);
|
||||
transcript(params.algorithm.transcript(), commitments[n][commitments[n].len() - 1])?;
|
||||
}
|
||||
|
||||
if nonce_generators.len() >= 2 {
|
||||
let mut transcript = nonce_transcript::<A::Transcript>();
|
||||
for de in 0 .. 2 {
|
||||
DLEqProof::deserialize(&mut cursor)
|
||||
.map_err(|_| FrostError::InvalidCommitment(*l))?
|
||||
.verify(
|
||||
&mut transcript,
|
||||
nonce_generators,
|
||||
&commitments[n].iter().map(|commitments| commitments[de]).collect::<Vec<_>>(),
|
||||
)
|
||||
.map_err(|_| FrostError::InvalidCommitment(*l))?;
|
||||
}
|
||||
}
|
||||
let preprocess = preprocesses.remove(l).unwrap();
|
||||
preprocess.commitments.transcript(params.algorithm.transcript());
|
||||
{
|
||||
let mut buf = vec![];
|
||||
preprocess.addendum.write(&mut buf).unwrap();
|
||||
params.algorithm.transcript().append_message(b"addendum", &buf);
|
||||
}
|
||||
|
||||
B.insert(*l, (commitments, C::F::zero()));
|
||||
params.algorithm.process_addendum(¶ms.view, *l, &mut cursor)?;
|
||||
B.insert(*l, preprocess.commitments);
|
||||
params.algorithm.process_addendum(¶ms.view, *l, preprocess.addendum)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Re-format into the FROST-expected rho transcript
|
||||
let mut rho_transcript = A::Transcript::new(b"FROST_rho");
|
||||
rho_transcript.append_message(b"message", &C::hash_msg(msg));
|
||||
// This won't just be the commitments, yet the full existing transcript if used in an extended
|
||||
// protocol
|
||||
rho_transcript.append_message(
|
||||
b"commitments",
|
||||
&C::hash_commitments(params.algorithm.transcript().challenge(b"commitments").as_ref()),
|
||||
b"preprocesses",
|
||||
&C::hash_commitments(params.algorithm.transcript().challenge(b"preprocesses").as_ref()),
|
||||
);
|
||||
|
||||
// Include the offset, if one exists
|
||||
@@ -280,14 +228,10 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
}
|
||||
|
||||
// Generate the per-signer binding factors
|
||||
for (l, commitments) in B.iter_mut() {
|
||||
let mut rho_transcript = rho_transcript.clone();
|
||||
rho_transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
|
||||
commitments.1 = C::hash_binding_factor(rho_transcript.challenge(b"rho").as_ref());
|
||||
}
|
||||
B.calculate_binding_factors(&mut rho_transcript);
|
||||
|
||||
// Merge the rho transcript back into the global one to ensure its advanced while committing to
|
||||
// everything
|
||||
// Merge the rho transcript back into the global one to ensure its advanced, while
|
||||
// simultaneously committing to everything
|
||||
params
|
||||
.algorithm
|
||||
.transcript()
|
||||
@@ -295,60 +239,44 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let mut Rs = Vec::with_capacity(nonces.len());
|
||||
for n in 0 .. nonces.len() {
|
||||
Rs.push(vec![C::G::identity(); nonces[n].len()]);
|
||||
for g in 0 .. nonces[n].len() {
|
||||
#[allow(non_snake_case)]
|
||||
let mut D = C::G::identity();
|
||||
let mut statements = Vec::with_capacity(B.len());
|
||||
#[allow(non_snake_case)]
|
||||
for (B, binding) in B.values() {
|
||||
D += B[n][g][0];
|
||||
statements.push((*binding, B[n][g][1]));
|
||||
}
|
||||
Rs[n][g] = D + multiexp_vartime(&statements);
|
||||
}
|
||||
}
|
||||
let Rs = B.nonces(&nonces);
|
||||
|
||||
let our_binding_factors = B.binding_factors(multisig_params.i());
|
||||
let mut nonces = our_preprocess
|
||||
.nonces
|
||||
.iter()
|
||||
.map(|nonces| nonces[0] + (nonces[1] * B[¶ms.keys.params().i()].1))
|
||||
.enumerate()
|
||||
.map(|(n, nonces)| nonces.0[0] + (nonces.0[1] * our_binding_factors[n]))
|
||||
.collect::<Vec<_>>();
|
||||
our_preprocess.nonces.zeroize();
|
||||
|
||||
let share = params.algorithm.sign_share(¶ms.view, &Rs, &nonces, msg);
|
||||
nonces.zeroize();
|
||||
|
||||
Ok((Package { B, Rs, share }, share.to_repr().as_ref().to_vec()))
|
||||
Ok((SignData { B, Rs, share }, SignatureShare(share)))
|
||||
}
|
||||
|
||||
fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
fn complete<C: Curve, A: Algorithm<C>>(
|
||||
sign_params: &Params<C, A>,
|
||||
sign: Package<C>,
|
||||
mut shares: HashMap<u16, Re>,
|
||||
sign: SignData<C>,
|
||||
mut shares: HashMap<u16, SignatureShare<C>>,
|
||||
) -> Result<A::Signature, FrostError> {
|
||||
let params = sign_params.multisig_params();
|
||||
validate_map(&shares, &sign_params.view.included, params.i)?;
|
||||
|
||||
let mut responses = HashMap::new();
|
||||
let mut sum = C::F::zero();
|
||||
for l in &sign_params.view.included {
|
||||
let part = if *l == params.i {
|
||||
sign.share
|
||||
} else {
|
||||
C::read_F(shares.get_mut(l).unwrap()).map_err(|_| FrostError::InvalidShare(*l))?
|
||||
};
|
||||
sum += part;
|
||||
responses.insert(*l, part);
|
||||
responses.insert(params.i(), sign.share);
|
||||
let mut sum = sign.share;
|
||||
for (l, share) in shares.drain() {
|
||||
responses.insert(l, share.0);
|
||||
sum += share.0;
|
||||
}
|
||||
|
||||
// Perform signature validation instead of individual share validation
|
||||
// For the success route, which should be much more frequent, this should be faster
|
||||
// It also acts as an integrity check of this library's signing function
|
||||
let res = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum);
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
if let Some(sig) = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum) {
|
||||
return Ok(sig);
|
||||
}
|
||||
|
||||
// Find out who misbehaved. It may be beneficial to randomly sort this to have detection be
|
||||
@@ -356,13 +284,7 @@ fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
for l in &sign_params.view.included {
|
||||
if !sign_params.algorithm.verify_share(
|
||||
sign_params.view.verification_share(*l),
|
||||
&sign.B[l]
|
||||
.0
|
||||
.iter()
|
||||
.map(|nonces| {
|
||||
nonces.iter().map(|commitments| commitments[0] + (commitments[1] * sign.B[l].1)).collect()
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
&sign.B.bound(*l),
|
||||
responses[l],
|
||||
) {
|
||||
Err(FrostError::InvalidShare(*l))?;
|
||||
@@ -375,33 +297,53 @@ fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
|
||||
/// Trait for the initial state machine of a two-round signing protocol.
|
||||
pub trait PreprocessMachine {
|
||||
/// Preprocess message for this machine.
|
||||
type Preprocess: Clone + PartialEq + Writable;
|
||||
/// Signature produced by this machine.
|
||||
type Signature: Clone + PartialEq + fmt::Debug;
|
||||
type SignMachine: SignMachine<Self::Signature>;
|
||||
/// SignMachine this PreprocessMachine turns into.
|
||||
type SignMachine: SignMachine<Self::Signature, Preprocess = Self::Preprocess>;
|
||||
|
||||
/// Perform the preprocessing round required in order to sign.
|
||||
/// Returns a byte vector to be broadcast to all participants, over an authenticated channel.
|
||||
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R) -> (Self::SignMachine, Vec<u8>);
|
||||
/// Returns a preprocess message to be broadcast to all participants, over an authenticated
|
||||
/// channel.
|
||||
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R)
|
||||
-> (Self::SignMachine, Self::Preprocess);
|
||||
}
|
||||
|
||||
/// Trait for the second machine of a two-round signing protocol.
|
||||
pub trait SignMachine<S> {
|
||||
type SignatureMachine: SignatureMachine<S>;
|
||||
/// Preprocess message for this machine.
|
||||
type Preprocess: Clone + PartialEq + Writable;
|
||||
/// SignatureShare message for this machine.
|
||||
type SignatureShare: Clone + PartialEq + Writable;
|
||||
/// SignatureMachine this SignMachine turns into.
|
||||
type SignatureMachine: SignatureMachine<S, SignatureShare = Self::SignatureShare>;
|
||||
|
||||
/// Read a Preprocess message.
|
||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess>;
|
||||
|
||||
/// Sign a message.
|
||||
/// Takes in the participants' preprocesses. Returns a byte vector representing a signature share
|
||||
/// to be broadcast to all participants, over an authenticated channel.
|
||||
fn sign<Re: Read>(
|
||||
/// Takes in the participants' preprocess messages. Returns the signature share to be broadcast
|
||||
/// to all participants, over an authenticated channel.
|
||||
fn sign(
|
||||
self,
|
||||
commitments: HashMap<u16, Re>,
|
||||
commitments: HashMap<u16, Self::Preprocess>,
|
||||
msg: &[u8],
|
||||
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError>;
|
||||
) -> Result<(Self::SignatureMachine, Self::SignatureShare), FrostError>;
|
||||
}
|
||||
|
||||
/// Trait for the final machine of a two-round signing protocol.
|
||||
pub trait SignatureMachine<S> {
|
||||
/// SignatureShare message for this machine.
|
||||
type SignatureShare: Clone + PartialEq + Writable;
|
||||
|
||||
/// Read a Signature Share message.
|
||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare>;
|
||||
|
||||
/// Complete signing.
|
||||
/// Takes in everyone elses' shares. Returns the signature.
|
||||
fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<S, FrostError>;
|
||||
fn complete(self, shares: HashMap<u16, Self::SignatureShare>) -> Result<S, FrostError>;
|
||||
}
|
||||
|
||||
/// State machine which manages signing for an arbitrary signature algorithm.
|
||||
@@ -412,13 +354,13 @@ pub struct AlgorithmMachine<C: Curve, A: Algorithm<C>> {
|
||||
/// Next step of the state machine for the signing process.
|
||||
pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
|
||||
params: Params<C, A>,
|
||||
preprocess: PreprocessPackage<C>,
|
||||
preprocess: PreprocessData<C, A::Addendum>,
|
||||
}
|
||||
|
||||
/// Final step of the state machine for the signing process.
|
||||
pub struct AlgorithmSignatureMachine<C: Curve, A: Algorithm<C>> {
|
||||
params: Params<C, A>,
|
||||
sign: Package<C>,
|
||||
sign: SignData<C>,
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
||||
@@ -434,39 +376,58 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub(crate) fn unsafe_override_preprocess(
|
||||
self,
|
||||
preprocess: PreprocessPackage<C>,
|
||||
preprocess: PreprocessData<C, A::Addendum>,
|
||||
) -> AlgorithmSignMachine<C, A> {
|
||||
AlgorithmSignMachine { params: self.params, preprocess }
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> {
|
||||
type Preprocess = Preprocess<C, A::Addendum>;
|
||||
type Signature = A::Signature;
|
||||
type SignMachine = AlgorithmSignMachine<C, A>;
|
||||
|
||||
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R) -> (Self::SignMachine, Vec<u8>) {
|
||||
fn preprocess<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
) -> (Self::SignMachine, Preprocess<C, A::Addendum>) {
|
||||
let mut params = self.params;
|
||||
let (preprocess, serialized) = preprocess::<R, C, A>(rng, &mut params);
|
||||
(AlgorithmSignMachine { params, preprocess }, serialized)
|
||||
let (preprocess, public) = preprocess::<R, C, A>(rng, &mut params);
|
||||
(AlgorithmSignMachine { params, preprocess }, public)
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachine<C, A> {
|
||||
type Preprocess = Preprocess<C, A::Addendum>;
|
||||
type SignatureShare = SignatureShare<C>;
|
||||
type SignatureMachine = AlgorithmSignatureMachine<C, A>;
|
||||
|
||||
fn sign<Re: Read>(
|
||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||
Ok(Preprocess {
|
||||
commitments: Commitments::read::<_, A::Transcript>(reader, &self.params.algorithm.nonces())?,
|
||||
addendum: self.params.algorithm.read_addendum(reader)?,
|
||||
})
|
||||
}
|
||||
|
||||
fn sign(
|
||||
self,
|
||||
commitments: HashMap<u16, Re>,
|
||||
commitments: HashMap<u16, Preprocess<C, A::Addendum>>,
|
||||
msg: &[u8],
|
||||
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError> {
|
||||
) -> Result<(Self::SignatureMachine, SignatureShare<C>), FrostError> {
|
||||
let mut params = self.params;
|
||||
let (sign, serialized) = sign_with_share(&mut params, self.preprocess, commitments, msg)?;
|
||||
Ok((AlgorithmSignatureMachine { params, sign }, serialized))
|
||||
let (sign, public) = sign_with_share(&mut params, self.preprocess, commitments, msg)?;
|
||||
Ok((AlgorithmSignatureMachine { params, sign }, public))
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> {
|
||||
fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<A::Signature, FrostError> {
|
||||
type SignatureShare = SignatureShare<C>;
|
||||
|
||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<SignatureShare<C>> {
|
||||
Ok(SignatureShare(C::read_F(reader)?))
|
||||
}
|
||||
|
||||
fn complete(self, shares: HashMap<u16, SignatureShare<C>>) -> Result<A::Signature, FrostError> {
|
||||
complete(&self.params, self.sign, shares)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use group::Group;
|
||||
@@ -15,7 +13,10 @@ fn key_generation<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// Test serialization of generated keys
|
||||
fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
for (_, keys) in core_gen::<_, C>(rng) {
|
||||
assert_eq!(&FrostCore::<C>::deserialize(&mut Cursor::new(keys.serialize())).unwrap(), &keys);
|
||||
assert_eq!(
|
||||
&FrostCore::<C>::deserialize::<&[u8]>(&mut keys.serialize().as_ref()).unwrap(),
|
||||
&keys
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use rand_core::OsRng;
|
||||
|
||||
use crate::{
|
||||
@@ -13,32 +11,31 @@ fn ed448_8032_vector() {
|
||||
let context = hex::decode("666f6f").unwrap();
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let A = Ed448::read_G(&mut Cursor::new(
|
||||
hex::decode(
|
||||
let A = Ed448::read_G::<&[u8]>(
|
||||
&mut hex::decode(
|
||||
"43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c".to_owned() +
|
||||
"6798c0866aea01eb00742802b8438ea4cb82169c235160627b4c3a94" +
|
||||
"80",
|
||||
)
|
||||
.unwrap(),
|
||||
))
|
||||
.unwrap()
|
||||
.as_ref(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let msg = hex::decode("03").unwrap();
|
||||
|
||||
let mut sig = Cursor::new(
|
||||
hex::decode(
|
||||
"d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b3".to_owned() +
|
||||
"2a89f7d2151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea" +
|
||||
"00" +
|
||||
"0c85741de5c8da1144a6a1aba7f96de42505d7a7298524fda538fccb" +
|
||||
"bb754f578c1cad10d54d0d5428407e85dcbc98a49155c13764e66c3c" +
|
||||
"00",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let sig = hex::decode(
|
||||
"d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b3".to_owned() +
|
||||
"2a89f7d2151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea" +
|
||||
"00" +
|
||||
"0c85741de5c8da1144a6a1aba7f96de42505d7a7298524fda538fccb" +
|
||||
"bb754f578c1cad10d54d0d5428407e85dcbc98a49155c13764e66c3c" +
|
||||
"00",
|
||||
)
|
||||
.unwrap();
|
||||
#[allow(non_snake_case)]
|
||||
let R = Ed448::read_G(&mut sig).unwrap();
|
||||
let s = Ed448::read_F(&mut sig).unwrap();
|
||||
let R = Ed448::read_G::<&[u8]>(&mut sig.as_ref()).unwrap();
|
||||
let s = Ed448::read_F::<&[u8]>(&mut &sig[57 ..]).unwrap();
|
||||
|
||||
assert!(verify(
|
||||
A,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::{io::Cursor, collections::HashMap};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
@@ -6,9 +6,9 @@ use group::ff::Field;
|
||||
|
||||
use crate::{
|
||||
Curve, FrostParams, FrostCore, FrostKeys, lagrange,
|
||||
key_gen::KeyGenMachine,
|
||||
key_gen::{SecretShare, Commitments as KGCommitments, KeyGenMachine},
|
||||
algorithm::Algorithm,
|
||||
sign::{PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
|
||||
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
|
||||
};
|
||||
|
||||
/// Curve tests.
|
||||
@@ -50,15 +50,32 @@ pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, F
|
||||
);
|
||||
let (machine, these_commitments) = machine.generate_coefficients(rng);
|
||||
machines.insert(i, machine);
|
||||
commitments.insert(i, Cursor::new(these_commitments));
|
||||
|
||||
commitments.insert(i, {
|
||||
let mut buf = vec![];
|
||||
these_commitments.write(&mut buf).unwrap();
|
||||
KGCommitments::read::<&[u8]>(
|
||||
&mut buf.as_ref(),
|
||||
FrostParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 },
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
}
|
||||
|
||||
let mut secret_shares = HashMap::new();
|
||||
let mut machines = machines
|
||||
.drain()
|
||||
.map(|(l, machine)| {
|
||||
let (machine, shares) =
|
||||
let (machine, mut shares) =
|
||||
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
|
||||
let shares = shares
|
||||
.drain()
|
||||
.map(|(l, share)| {
|
||||
let mut buf = vec![];
|
||||
share.write(&mut buf).unwrap();
|
||||
(l, SecretShare::<C>::read::<&[u8]>(&mut buf.as_ref()).unwrap())
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
secret_shares.insert(l, shares);
|
||||
(l, machine)
|
||||
})
|
||||
@@ -74,7 +91,7 @@ pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, F
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
our_secret_shares.insert(*l, Cursor::new(shares[&i].clone()));
|
||||
our_secret_shares.insert(*l, shares[&i].clone());
|
||||
}
|
||||
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
|
||||
|
||||
@@ -154,7 +171,11 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
.drain()
|
||||
.map(|(i, machine)| {
|
||||
let (machine, preprocess) = machine.preprocess(rng);
|
||||
commitments.insert(i, Cursor::new(preprocess));
|
||||
commitments.insert(i, {
|
||||
let mut buf = vec![];
|
||||
preprocess.write(&mut buf).unwrap();
|
||||
machine.read_preprocess::<&[u8]>(&mut buf.as_ref()).unwrap()
|
||||
});
|
||||
(i, machine)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
@@ -164,7 +185,11 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
.drain()
|
||||
.map(|(i, machine)| {
|
||||
let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap();
|
||||
shares.insert(i, Cursor::new(share));
|
||||
shares.insert(i, {
|
||||
let mut buf = vec![];
|
||||
share.write(&mut buf).unwrap();
|
||||
machine.read_share::<&[u8]>(&mut buf.as_ref()).unwrap()
|
||||
});
|
||||
(i, machine)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::{io::Cursor, collections::HashMap};
|
||||
use std::collections::HashMap;
|
||||
#[cfg(test)]
|
||||
use std::str::FromStr;
|
||||
|
||||
@@ -10,7 +10,10 @@ use crate::{
|
||||
curve::Curve,
|
||||
FrostCore, FrostKeys,
|
||||
algorithm::{Schnorr, Hram},
|
||||
sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine},
|
||||
sign::{
|
||||
Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess,
|
||||
PreprocessData, SignMachine, SignatureMachine, AlgorithmMachine,
|
||||
},
|
||||
tests::{
|
||||
clone_without, curve::test_curve, schnorr::test_schnorr, promote::test_promotion, recover,
|
||||
},
|
||||
@@ -78,12 +81,13 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
|
||||
let shares = vectors
|
||||
.shares
|
||||
.iter()
|
||||
.map(|secret| C::read_F(&mut Cursor::new(hex::decode(secret).unwrap())).unwrap())
|
||||
.map(|secret| C::read_F::<&[u8]>(&mut hex::decode(secret).unwrap().as_ref()).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let verification_shares = shares.iter().map(|secret| C::generator() * secret).collect::<Vec<_>>();
|
||||
|
||||
let mut keys = HashMap::new();
|
||||
for i in 1 ..= u16::try_from(shares.len()).unwrap() {
|
||||
// Manually re-implement the serialization for FrostCore to import this data
|
||||
let mut serialized = vec![];
|
||||
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
|
||||
serialized.extend(C::ID);
|
||||
@@ -95,7 +99,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
|
||||
serialized.extend(share.to_bytes().as_ref());
|
||||
}
|
||||
|
||||
let these_keys = FrostCore::<C>::deserialize(&mut Cursor::new(serialized)).unwrap();
|
||||
let these_keys = FrostCore::<C>::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap();
|
||||
assert_eq!(these_keys.params().t(), vectors.threshold);
|
||||
assert_eq!(usize::from(these_keys.params().n()), shares.len());
|
||||
assert_eq!(these_keys.params().i(), i);
|
||||
@@ -118,8 +122,10 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
|
||||
// Test against the vectors
|
||||
let keys = vectors_to_multisig_keys::<C>(&vectors);
|
||||
let group_key = C::read_G(&mut Cursor::new(hex::decode(&vectors.group_key).unwrap())).unwrap();
|
||||
let secret = C::read_F(&mut Cursor::new(hex::decode(&vectors.group_secret).unwrap())).unwrap();
|
||||
let group_key =
|
||||
C::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap();
|
||||
let secret =
|
||||
C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap();
|
||||
assert_eq!(C::generator() * secret, group_key);
|
||||
assert_eq!(recover(&keys), secret);
|
||||
|
||||
@@ -142,27 +148,36 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
.drain(..)
|
||||
.map(|(i, machine)| {
|
||||
let nonces = [
|
||||
C::read_F(&mut Cursor::new(hex::decode(&vectors.nonces[c][0]).unwrap())).unwrap(),
|
||||
C::read_F(&mut Cursor::new(hex::decode(&vectors.nonces[c][1]).unwrap())).unwrap(),
|
||||
C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][0]).unwrap().as_ref()).unwrap(),
|
||||
C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][1]).unwrap().as_ref()).unwrap(),
|
||||
];
|
||||
c += 1;
|
||||
let these_commitments = vec![[C::generator() * nonces[0], C::generator() * nonces[1]]];
|
||||
let machine = machine.unsafe_override_preprocess(PreprocessPackage {
|
||||
nonces: vec![nonces],
|
||||
commitments: vec![these_commitments.clone()],
|
||||
addendum: vec![],
|
||||
let these_commitments = [C::generator() * nonces[0], C::generator() * nonces[1]];
|
||||
let machine = machine.unsafe_override_preprocess(PreprocessData {
|
||||
nonces: vec![Nonce(nonces)],
|
||||
preprocess: Preprocess {
|
||||
commitments: Commitments {
|
||||
nonces: vec![NonceCommitments {
|
||||
generators: vec![GeneratorCommitments(these_commitments)],
|
||||
dleqs: None,
|
||||
}],
|
||||
},
|
||||
addendum: (),
|
||||
},
|
||||
});
|
||||
|
||||
commitments.insert(
|
||||
*i,
|
||||
Cursor::new(
|
||||
[
|
||||
these_commitments[0][0].to_bytes().as_ref(),
|
||||
these_commitments[0][1].to_bytes().as_ref(),
|
||||
]
|
||||
.concat()
|
||||
.to_vec(),
|
||||
),
|
||||
machine
|
||||
.read_preprocess::<&[u8]>(
|
||||
&mut [
|
||||
these_commitments[0].to_bytes().as_ref(),
|
||||
these_commitments[1].to_bytes().as_ref(),
|
||||
]
|
||||
.concat()
|
||||
.as_ref(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
(i, machine)
|
||||
})
|
||||
@@ -176,10 +191,15 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
let (machine, share) =
|
||||
machine.sign(clone_without(&commitments, i), &hex::decode(&vectors.msg).unwrap()).unwrap();
|
||||
|
||||
let share = {
|
||||
let mut buf = vec![];
|
||||
share.write(&mut buf).unwrap();
|
||||
buf
|
||||
};
|
||||
assert_eq!(share, hex::decode(&vectors.sig_shares[c]).unwrap());
|
||||
c += 1;
|
||||
|
||||
shares.insert(*i, Cursor::new(share));
|
||||
shares.insert(*i, machine.read_share::<&[u8]>(&mut share.as_ref()).unwrap());
|
||||
(i, machine)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
Reference in New Issue
Block a user