Create dedicated message structures for FROST messages (#140)

* Create message types for FROST key gen

Taking in reader borrows absolutely wasn't feasible. Now, proper types
which can be read (and then passed directly, without a mutable borrow)
exist for key_gen. sign coming next.

* Move FROST signing to messages, not Readers/Writers/Vec<u8>

Also takes the nonce handling code and makes a dedicated file for it, 
aiming to resolve complex types and make the code more legible by 
replacing its previously inlined state.

* clippy

* Update FROST tests

* read_signature_share

* Update the Monero library to the new FROST packages

* Update processor to latest FROST

* Tweaks to terminology and documentation
This commit is contained in:
Luke Parker
2022-10-25 23:17:25 -05:00
committed by GitHub
parent ccdb834e6e
commit cbceaff678
26 changed files with 874 additions and 591 deletions

4
Cargo.lock generated
View File

@@ -1614,7 +1614,7 @@ dependencies = [
[[package]] [[package]]
name = "dleq" name = "dleq"
version = "0.1.1" version = "0.1.2"
dependencies = [ dependencies = [
"blake2", "blake2",
"dalek-ff-group", "dalek-ff-group",
@@ -4507,7 +4507,7 @@ dependencies = [
[[package]] [[package]]
name = "modular-frost" name = "modular-frost"
version = "0.2.4" version = "0.3.0"
dependencies = [ dependencies = [
"dalek-ff-group", "dalek-ff-group",
"dleq", "dleq",

View File

@@ -34,7 +34,7 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] } multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.1", features = ["recommended"], optional = true } transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.1", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.2", features = ["ed25519"], optional = true } frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.3", features = ["ed25519"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.1", features = ["serialize"], optional = true } dleq = { path = "../../crypto/dleq", version = "0.1", features = ["serialize"], optional = true }
monero-generators = { path = "generators", version = "0.1" } monero-generators = { path = "generators", version = "0.1" }
@@ -55,7 +55,7 @@ monero-generators = { path = "generators", version = "0.1" }
[dev-dependencies] [dev-dependencies]
tokio = { version = "1", features = ["full"] } tokio = { version = "1", features = ["full"] }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.2", features = ["ed25519", "tests"] } frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.3", features = ["ed25519", "tests"] }
[features] [features]
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"] multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]

View File

@@ -1,73 +0,0 @@
use std::io::Read;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use group::{Group, GroupEncoding};
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group as dfg;
use dleq::DLEqProof;
#[derive(Clone, Error, Debug)]
pub(crate) enum MultisigError {
#[error("invalid discrete log equality proof")]
InvalidDLEqProof(u16),
}
fn transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
#[allow(non_snake_case)]
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
mut x: Scalar,
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
// merge later in some form, when it should instead just merge xH (as it does)
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
dfg::Scalar(x),
)
.serialize(&mut res)
.unwrap();
x.zeroize();
res
}
#[allow(non_snake_case)]
pub(crate) fn read_dleq<Re: Read>(
serialized: &mut Re,
H: EdwardsPoint,
l: u16,
xG: dfg::EdwardsPoint,
) -> Result<dfg::EdwardsPoint, MultisigError> {
let mut bytes = [0; 32];
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or(MultisigError::InvalidDLEqProof(l))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(MultisigError::InvalidDLEqProof(l))?;
}
DLEqProof::<dfg::EdwardsPoint>::deserialize(serialized)
.map_err(|_| MultisigError::InvalidDLEqProof(l))?
.verify(&mut transcript(), &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)], &[xG, xH])
.map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH)
}

View File

@@ -33,9 +33,6 @@ use curve25519_dalek::{
pub use monero_generators::H; pub use monero_generators::H;
#[cfg(feature = "multisig")]
pub(crate) mod frost;
mod serialize; mod serialize;
/// RingCT structs and functionality. /// RingCT structs and functionality.

View File

@@ -22,7 +22,7 @@ use crate::{
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
mod multisig; mod multisig;
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
pub use multisig::{ClsagDetails, ClsagMultisig}; pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
lazy_static! { lazy_static! {
static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert(); static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert();

View File

@@ -1,6 +1,6 @@
use core::fmt::Debug; use core::fmt::Debug;
use std::{ use std::{
io::Read, io::{self, Read, Write},
sync::{Arc, RwLock}, sync::{Arc, RwLock},
}; };
@@ -16,20 +16,26 @@ use curve25519_dalek::{
edwards::EdwardsPoint, edwards::EdwardsPoint,
}; };
use group::Group; use group::{Group, GroupEncoding};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use frost::{curve::Ed25519, FrostError, FrostView, algorithm::Algorithm};
use dalek_ff_group as dfg; use dalek_ff_group as dfg;
use dleq::DLEqProof;
use crate::{ use frost::{
frost::{write_dleq, read_dleq}, curve::Ed25519,
ringct::{ FrostError, FrostView,
hash_to_point, algorithm::{AddendumSerialize, Algorithm},
clsag::{ClsagInput, Clsag},
},
}; };
use crate::ringct::{
hash_to_point,
clsag::{ClsagInput, Clsag},
};
fn dleq_transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
impl ClsagInput { impl ClsagInput {
fn transcript<T: Transcript>(&self, transcript: &mut T) { fn transcript<T: Transcript>(&self, transcript: &mut T) {
// Doesn't domain separate as this is considered part of the larger CLSAG proof // Doesn't domain separate as this is considered part of the larger CLSAG proof
@@ -54,7 +60,7 @@ impl ClsagInput {
} }
} }
/// CLSAG Input and the mask to use for it. /// CLSAG input and the mask to use for it.
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)] #[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ClsagDetails { pub struct ClsagDetails {
input: ClsagInput, input: ClsagInput,
@@ -67,6 +73,20 @@ impl ClsagDetails {
} }
} }
/// Addendum produced during the FROST signing process with relevant data.
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
pub struct ClsagAddendum {
pub(crate) key_image: dfg::EdwardsPoint,
dleq: DLEqProof<dfg::EdwardsPoint>,
}
impl AddendumSerialize for ClsagAddendum {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
self.dleq.serialize(writer)
}
}
#[allow(non_snake_case)] #[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
struct Interim { struct Interim {
@@ -113,10 +133,6 @@ impl ClsagMultisig {
} }
} }
pub(crate) const fn serialized_len() -> usize {
32 + (2 * 32)
}
fn input(&self) -> ClsagInput { fn input(&self) -> ClsagInput {
(*self.details.read().unwrap()).as_ref().unwrap().input.clone() (*self.details.read().unwrap()).as_ref().unwrap().input.clone()
} }
@@ -128,6 +144,7 @@ impl ClsagMultisig {
impl Algorithm<Ed25519> for ClsagMultisig { impl Algorithm<Ed25519> for ClsagMultisig {
type Transcript = RecommendedTranscript; type Transcript = RecommendedTranscript;
type Addendum = ClsagAddendum;
type Signature = (Clsag, EdwardsPoint); type Signature = (Clsag, EdwardsPoint);
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> { fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
@@ -138,18 +155,42 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&mut self, &mut self,
rng: &mut R, rng: &mut R,
view: &FrostView<Ed25519>, view: &FrostView<Ed25519>,
) -> Vec<u8> { ) -> ClsagAddendum {
let mut serialized = Vec::with_capacity(Self::serialized_len()); ClsagAddendum {
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes()); key_image: dfg::EdwardsPoint(self.H * view.secret_share().0),
serialized.extend(write_dleq(rng, self.H, view.secret_share().0)); dleq: DLEqProof::prove(
serialized rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
// try to merge later in some form, when it should instead just merge xH (as it does)
&mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
dfg::Scalar(view.secret_share().0),
),
}
} }
fn process_addendum<Re: Read>( fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
let mut bytes = [0; 32];
reader.read_exact(&mut bytes)?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
}
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::deserialize(reader)? })
}
fn process_addendum(
&mut self, &mut self,
view: &FrostView<Ed25519>, view: &FrostView<Ed25519>,
l: u16, l: u16,
serialized: &mut Re, addendum: ClsagAddendum,
) -> Result<(), FrostError> { ) -> Result<(), FrostError> {
if self.image.is_identity() { if self.image.is_identity() {
self.transcript.domain_separate(b"CLSAG"); self.transcript.domain_separate(b"CLSAG");
@@ -158,11 +199,20 @@ impl Algorithm<Ed25519> for ClsagMultisig {
} }
self.transcript.append_message(b"participant", &l.to_be_bytes()); self.transcript.append_message(b"participant", &l.to_be_bytes());
let image = read_dleq(serialized, self.H, l, view.verification_share(l))
.map_err(|_| FrostError::InvalidCommitment(l))? addendum
.0; .dleq
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref()); .verify(
self.image += image; &mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
&[view.verification_share(l), addendum.key_image],
)
.map_err(|_| FrostError::InvalidPreprocess(l))?;
self
.transcript
.append_message(b"key_image_share", addendum.key_image.compress().to_bytes().as_ref());
self.image += addendum.key_image.0;
Ok(()) Ok(())
} }

View File

@@ -19,10 +19,7 @@ use crate::{
}, },
}; };
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use crate::{ use crate::ringct::clsag::{ClsagDetails, ClsagMultisig};
frost::MultisigError,
ringct::clsag::{ClsagDetails, ClsagMultisig},
};
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use frost::tests::{key_gen, algorithm_machines, sign}; use frost::tests::{key_gen, algorithm_machines, sign};
@@ -79,7 +76,7 @@ fn clsag() {
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
#[test] #[test]
fn clsag_multisig() -> Result<(), MultisigError> { fn clsag_multisig() {
let keys = key_gen::<_, Ed25519>(&mut OsRng); let keys = key_gen::<_, Ed25519>(&mut OsRng);
let randomness = random_scalar(&mut OsRng); let randomness = random_scalar(&mut OsRng);
@@ -125,6 +122,4 @@ fn clsag_multisig() -> Result<(), MultisigError> {
), ),
&[1; 32], &[1; 32],
); );
Ok(())
} }

View File

@@ -1,5 +1,5 @@
use std::{ use std::{
io::{Read, Cursor}, io::{self, Read},
sync::{Arc, RwLock}, sync::{Arc, RwLock},
collections::HashMap, collections::HashMap,
}; };
@@ -7,26 +7,22 @@ use std::{
use rand_core::{RngCore, CryptoRng, SeedableRng}; use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng; use rand_chacha::ChaCha20Rng;
use curve25519_dalek::{ use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
traits::Identity,
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use frost::{ use frost::{
curve::Ed25519, curve::Ed25519,
FrostError, FrostKeys, FrostError, FrostKeys,
sign::{ sign::{
PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, AlgorithmSignMachine, Writable, Preprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine,
AlgorithmSignatureMachine, AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
}, },
}; };
use crate::{ use crate::{
random_scalar, random_scalar,
ringct::{ ringct::{
clsag::{ClsagInput, ClsagDetails, ClsagMultisig}, clsag::{ClsagInput, ClsagDetails, ClsagAddendum, ClsagMultisig},
RctPrunable, RctPrunable,
}, },
transaction::{Input, Transaction}, transaction::{Input, Transaction},
@@ -58,7 +54,7 @@ pub struct TransactionSignMachine {
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>, inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>, clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>,
our_preprocess: Vec<u8>, our_preprocess: Vec<Preprocess<Ed25519, ClsagAddendum>>,
} }
pub struct TransactionSignatureMachine { pub struct TransactionSignatureMachine {
@@ -166,28 +162,26 @@ impl SignableTransaction {
} }
impl PreprocessMachine for TransactionMachine { impl PreprocessMachine for TransactionMachine {
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
type Signature = Transaction; type Signature = Transaction;
type SignMachine = TransactionSignMachine; type SignMachine = TransactionSignMachine;
fn preprocess<R: RngCore + CryptoRng>( fn preprocess<R: RngCore + CryptoRng>(
mut self, mut self,
rng: &mut R, rng: &mut R,
) -> (TransactionSignMachine, Vec<u8>) { ) -> (TransactionSignMachine, Self::Preprocess) {
// Iterate over each CLSAG calling preprocess // Iterate over each CLSAG calling preprocess
let mut serialized = Vec::with_capacity( let mut preprocesses = Vec::with_capacity(self.clsags.len());
// D_{G, H}, E_{G, H}, DLEqs, key image addendum
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len()),
);
let clsags = self let clsags = self
.clsags .clsags
.drain(..) .drain(..)
.map(|clsag| { .map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng); let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess); preprocesses.push(preprocess);
clsag clsag
}) })
.collect(); .collect();
let our_preprocess = serialized.clone(); let our_preprocess = preprocesses.clone();
// We could add further entropy here, and previous versions of this library did so // We could add further entropy here, and previous versions of this library did so
// As of right now, the multisig's key, the inputs being spent, and the FROST data itself // As of right now, the multisig's key, the inputs being spent, and the FROST data itself
@@ -212,33 +206,35 @@ impl PreprocessMachine for TransactionMachine {
our_preprocess, our_preprocess,
}, },
serialized, preprocesses,
) )
} }
} }
impl SignMachine<Transaction> for TransactionSignMachine { impl SignMachine<Transaction> for TransactionSignMachine {
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
type SignatureShare = Vec<SignatureShare<Ed25519>>;
type SignatureMachine = TransactionSignatureMachine; type SignatureMachine = TransactionSignatureMachine;
fn sign<Re: Read>( fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.clsags.iter().map(|clsag| clsag.read_preprocess(reader)).collect()
}
fn sign(
mut self, mut self,
mut commitments: HashMap<u16, Re>, mut commitments: HashMap<u16, Self::Preprocess>,
msg: &[u8], msg: &[u8],
) -> Result<(TransactionSignatureMachine, Vec<u8>), FrostError> { ) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() { if !msg.is_empty() {
Err(FrostError::InternalError( Err(FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own", "message was passed to the TransactionMachine when it generates its own",
))?; ))?;
} }
// FROST commitments and their DLEqs, and the image and its DLEq
const CLSAG_LEN: usize = (2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len();
// Convert the unified commitments to a Vec of the individual commitments // Convert the unified commitments to a Vec of the individual commitments
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()]; let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
let mut commitments = (0 .. self.clsags.len()) let mut commitments = (0 .. self.clsags.len())
.map(|c| { .map(|c| {
let mut buf = [0; CLSAG_LEN];
self self
.included .included
.iter() .iter()
@@ -248,31 +244,27 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// transcripts cloned from this TX's initial premise's transcript. For our TX // transcripts cloned from this TX's initial premise's transcript. For our TX
// transcript to have the CLSAG data for entropy, it'll have to be added ourselves here // transcript to have the CLSAG data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes()); self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice()); let preprocess = if *l == self.i {
self.our_preprocess[c].clone()
} else { } else {
commitments commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone()
.get_mut(l) };
.ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf) {
.map_err(|_| FrostError::InvalidCommitment(*l))?; let mut buf = vec![];
preprocess.write(&mut buf).unwrap();
self.transcript.append_message(b"preprocess", &buf);
} }
self.transcript.append_message(b"preprocess", &buf);
// While here, calculate the key image // While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well // Clsag will parse/calculate/validate this as needed, yet doing so here as well
// provides the easiest API overall, as this is where the TX is (which needs the key // provides the easiest API overall, as this is where the TX is (which needs the key
// images in its message), along with where the outputs are determined (where our // images in its message), along with where the outputs are determined (where our
// outputs may need these in order to guarantee uniqueness) // outputs may need these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY( images[c] += preprocess.addendum.key_image.0;
buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)]
.try_into()
.map_err(|_| FrostError::InvalidCommitment(*l))?,
)
.decompress()
.ok_or(FrostError::InvalidCommitment(*l))?;
Ok((*l, Cursor::new(buf))) Ok((*l, preprocess))
}) })
.collect::<Result<HashMap<_, _>, _>>() .collect::<Result<HashMap<_, _>, _>>()
}) })
@@ -346,37 +338,39 @@ impl SignMachine<Transaction> for TransactionSignMachine {
let msg = tx.signature_hash(); let msg = tx.signature_hash();
// Iterate over each CLSAG calling sign // Iterate over each CLSAG calling sign
let mut serialized = Vec::with_capacity(self.clsags.len() * 32); let mut shares = Vec::with_capacity(self.clsags.len());
let clsags = self let clsags = self
.clsags .clsags
.drain(..) .drain(..)
.map(|clsag| { .map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?; let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
serialized.extend(&share); shares.push(share);
Ok(clsag) Ok(clsag)
}) })
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
Ok((TransactionSignatureMachine { tx, clsags }, serialized)) Ok((TransactionSignatureMachine { tx, clsags }, shares))
} }
} }
impl SignatureMachine<Transaction> for TransactionSignatureMachine { impl SignatureMachine<Transaction> for TransactionSignatureMachine {
fn complete<Re: Read>(self, mut shares: HashMap<u16, Re>) -> Result<Transaction, FrostError> { type SignatureShare = Vec<SignatureShare<Ed25519>>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect()
}
fn complete(
mut self,
shares: HashMap<u16, Self::SignatureShare>,
) -> Result<Transaction, FrostError> {
let mut tx = self.tx; let mut tx = self.tx;
match tx.rct_signatures.prunable { match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"), RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => { RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for clsag in self.clsags { for (c, clsag) in self.clsags.drain(..).enumerate() {
let (clsag, pseudo_out) = clsag.complete( let (clsag, pseudo_out) = clsag.complete(
shares shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::<HashMap<_, _>>(),
.iter_mut()
.map(|(l, shares)| {
let mut buf = [0; 32];
shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
})
.collect::<Result<HashMap<_, _>, _>>()?,
)?; )?;
clsags.push(clsag); clsags.push(clsag);
pseudo_outs.push(pseudo_out); pseudo_outs.push(pseudo_out);

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dleq" name = "dleq"
version = "0.1.1" version = "0.1.2"
description = "Implementation of single and cross-curve Discrete Log Equality proofs" description = "Implementation of single and cross-curve Discrete Log Equality proofs"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq"

View File

@@ -61,7 +61,7 @@ pub enum DLEqError {
InvalidProof, InvalidProof,
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct DLEqProof<G: PrimeGroup> { pub struct DLEqProof<G: PrimeGroup> {
c: G::Scalar, c: G::Scalar,
s: G::Scalar, s: G::Scalar,

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "modular-frost" name = "modular-frost"
version = "0.2.4" version = "0.3.0"
description = "Modular implementation of FROST over ff/group" description = "Modular implementation of FROST over ff/group"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost"
@@ -40,7 +40,7 @@ transcript = { package = "flexible-transcript", path = "../transcript", features
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
dleq = { path = "../dleq", version = "0.1", features = ["serialize"] } dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] }
[dev-dependencies] [dev-dependencies]
sha2 = "0.10" sha2 = "0.10"

View File

@@ -1,26 +1,45 @@
use core::{marker::PhantomData, fmt::Debug}; use core::{marker::PhantomData, fmt::Debug};
use std::io::Read; use std::io::{self, Read, Write};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use transcript::Transcript; use transcript::Transcript;
use crate::{Curve, FrostError, FrostView, schnorr}; use crate::{Curve, FrostError, FrostView, schnorr};
pub use schnorr::SchnorrSignature; pub use schnorr::SchnorrSignature;
/// Serialize an addendum to a writer.
pub trait AddendumSerialize {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
}
impl AddendumSerialize for () {
fn write<W: Write>(&self, _: &mut W) -> io::Result<()> {
Ok(())
}
}
/// Trait alias for the requirements to be used as an addendum.
pub trait Addendum: Clone + PartialEq + Debug + Zeroize + AddendumSerialize {}
impl<A: Clone + PartialEq + Debug + Zeroize + AddendumSerialize> Addendum for A {}
/// Algorithm trait usable by the FROST signing machine to produce signatures.. /// Algorithm trait usable by the FROST signing machine to produce signatures..
pub trait Algorithm<C: Curve>: Clone { pub trait Algorithm<C: Curve>: Clone {
/// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible /// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible
/// transcript included in this crate. /// transcript included in this crate.
type Transcript: Transcript + Clone + Debug; type Transcript: Clone + Debug + Transcript;
/// Serializable addendum, used in algorithms requiring more data than just the nonces.
type Addendum: Addendum;
/// The resulting type of the signatures this algorithm will produce. /// The resulting type of the signatures this algorithm will produce.
type Signature: Clone + PartialEq + Debug; type Signature: Clone + PartialEq + Debug;
/// Obtain a mutable borrow of the underlying transcript. /// Obtain a mutable borrow of the underlying transcript.
fn transcript(&mut self) -> &mut Self::Transcript; fn transcript(&mut self) -> &mut Self::Transcript;
/// Obtain the list of nonces to generate, as specified by the basepoints to create commitments. /// Obtain the list of nonces to generate, as specified by the generators to create commitments
/// against per-nonce. These are not committed to by FROST on the underlying transcript. /// against per-nonce
fn nonces(&self) -> Vec<Vec<C::G>>; fn nonces(&self) -> Vec<Vec<C::G>>;
/// Generate an addendum to FROST"s preprocessing stage. /// Generate an addendum to FROST"s preprocessing stage.
@@ -28,14 +47,17 @@ pub trait Algorithm<C: Curve>: Clone {
&mut self, &mut self,
rng: &mut R, rng: &mut R,
params: &FrostView<C>, params: &FrostView<C>,
) -> Vec<u8>; ) -> Self::Addendum;
/// Proccess the addendum for the specified participant. Guaranteed to be ordered. /// Read an addendum from a reader.
fn process_addendum<Re: Read>( fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<Self::Addendum>;
/// Proccess the addendum for the specified participant. Guaranteed to be called in order.
fn process_addendum(
&mut self, &mut self,
params: &FrostView<C>, params: &FrostView<C>,
l: u16, l: u16,
reader: &mut Re, reader: Self::Addendum,
) -> Result<(), FrostError>; ) -> Result<(), FrostError>;
/// Sign a share with the given secret/nonce. /// Sign a share with the given secret/nonce.
@@ -116,6 +138,7 @@ impl<C: Curve, H: Hram<C>> Schnorr<C, H> {
impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> { impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
type Transcript = IetfTranscript; type Transcript = IetfTranscript;
type Addendum = ();
type Signature = SchnorrSignature<C>; type Signature = SchnorrSignature<C>;
fn transcript(&mut self) -> &mut Self::Transcript { fn transcript(&mut self) -> &mut Self::Transcript {
@@ -126,20 +149,13 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
vec![vec![C::generator()]] vec![vec![C::generator()]]
} }
fn preprocess_addendum<R: RngCore + CryptoRng>( fn preprocess_addendum<R: RngCore + CryptoRng>(&mut self, _: &mut R, _: &FrostView<C>) {}
&mut self,
_: &mut R, fn read_addendum<R: Read>(&self, _: &mut R) -> io::Result<Self::Addendum> {
_: &FrostView<C>, Ok(())
) -> Vec<u8> {
vec![]
} }
fn process_addendum<Re: Read>( fn process_addendum(&mut self, _: &FrostView<C>, _: u16, _: ()) -> Result<(), FrostError> {
&mut self,
_: &FrostView<C>,
_: u16,
_: &mut Re,
) -> Result<(), FrostError> {
Ok(()) Ok(())
} }

View File

@@ -1,7 +1,5 @@
use core::fmt::Debug; use core::fmt::Debug;
use std::io::Read; use std::io::{self, Read};
use thiserror::Error;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@@ -30,15 +28,6 @@ mod ed448;
#[cfg(feature = "ed448")] #[cfg(feature = "ed448")]
pub use ed448::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram}; pub use ed448::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram};
/// Set of errors for curve-related operations, namely encoding and decoding.
#[derive(Clone, Error, Debug)]
pub enum CurveError {
#[error("invalid scalar")]
InvalidScalar,
#[error("invalid point")]
InvalidPoint,
}
/// Unified trait to manage an elliptic curve. /// Unified trait to manage an elliptic curve.
// This should be moved into its own crate if the need for generic cryptography over ff/group // This should be moved into its own crate if the need for generic cryptography over ff/group
// continues, which is the exact reason ff/group exists (to provide a generic interface) // continues, which is the exact reason ff/group exists (to provide a generic interface)
@@ -127,13 +116,13 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn read_F<R: Read>(r: &mut R) -> Result<Self::F, CurveError> { fn read_F<R: Read>(r: &mut R) -> io::Result<Self::F> {
let mut encoding = <Self::F as PrimeField>::Repr::default(); let mut encoding = <Self::F as PrimeField>::Repr::default();
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidScalar)?; r.read_exact(encoding.as_mut())?;
// ff mandates this is canonical // ff mandates this is canonical
let res = let res = Option::<Self::F>::from(Self::F::from_repr(encoding))
Option::<Self::F>::from(Self::F::from_repr(encoding)).ok_or(CurveError::InvalidScalar); .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar"));
for b in encoding.as_mut() { for b in encoding.as_mut() {
b.zeroize(); b.zeroize();
} }
@@ -141,15 +130,15 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn read_G<R: Read>(r: &mut R) -> Result<Self::G, CurveError> { fn read_G<R: Read>(r: &mut R) -> io::Result<Self::G> {
let mut encoding = <Self::G as GroupEncoding>::Repr::default(); let mut encoding = <Self::G as GroupEncoding>::Repr::default();
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidPoint)?; r.read_exact(encoding.as_mut())?;
let point = let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
Option::<Self::G>::from(Self::G::from_bytes(&encoding)).ok_or(CurveError::InvalidPoint)?; .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
// Ban the identity, per the FROST spec, and non-canonical points // Ban the identity, per the FROST spec, and non-canonical points
if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) { if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) {
Err(CurveError::InvalidPoint)?; Err(io::Error::new(io::ErrorKind::Other, "non-canonical or identity point"))?;
} }
Ok(point) Ok(point)
} }

View File

@@ -1,6 +1,6 @@
use std::{ use std::{
marker::PhantomData, marker::PhantomData,
io::{Read, Cursor}, io::{self, Read, Write},
collections::HashMap, collections::HashMap,
}; };
@@ -34,101 +34,97 @@ fn challenge<C: Curve>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
C::hash_to_F(DST, &transcript) C::hash_to_F(DST, &transcript)
} }
/// Commitments message to be broadcast to all other parties.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Commitments<C: Curve>(Vec<C::G>, Vec<u8>, SchnorrSignature<C>);
impl<C: Curve> Commitments<C> {
pub fn read<R: Read>(reader: &mut R, params: FrostParams) -> io::Result<Self> {
let mut commitments = Vec::with_capacity(params.t().into());
let mut serialized = Vec::with_capacity(usize::from(params.t()) * C::G_len());
for _ in 0 .. params.t() {
let mut buf = <C::G as GroupEncoding>::Repr::default();
reader.read_exact(buf.as_mut())?;
commitments.push(C::read_G(&mut buf.as_ref())?);
serialized.extend(buf.as_ref());
}
Ok(Commitments(commitments, serialized, SchnorrSignature::read(reader)?))
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.1)?;
self.2.write(writer)
}
}
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and // Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
// the serialized commitments to be broadcasted over an authenticated channel to all parties // the commitments to be broadcasted over an authenticated channel to all parties
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>( fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: &FrostParams, params: &FrostParams,
context: &str, context: &str,
) -> (Vec<C::F>, Vec<C::G>, Vec<u8>) { ) -> (Vec<C::F>, Vec<C::G>, Commitments<C>) {
let t = usize::from(params.t); let t = usize::from(params.t);
let mut coefficients = Vec::with_capacity(t); let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t); let mut commitments = Vec::with_capacity(t);
let mut serialized = Vec::with_capacity((C::G_len() * t) + C::G_len() + C::F_len()); let mut serialized = Vec::with_capacity(t * C::G_len());
for i in 0 .. t { for i in 0 .. t {
// Step 1: Generate t random values to form a polynomial with // Step 1: Generate t random values to form a polynomial with
coefficients.push(C::random_F(&mut *rng)); coefficients.push(C::random_F(&mut *rng));
// Step 3: Generate public commitments // Step 3: Generate public commitments
commitments.push(C::generator() * coefficients[i]); commitments.push(C::generator() * coefficients[i]);
// Serialize them for publication
serialized.extend(commitments[i].to_bytes().as_ref()); serialized.extend(commitments[i].to_bytes().as_ref());
} }
// Step 2: Provide a proof of knowledge // Step 2: Provide a proof of knowledge
let mut r = C::random_F(rng); let mut r = C::random_F(rng);
serialized.extend( let sig = schnorr::sign::<C>(
schnorr::sign::<C>( coefficients[0],
coefficients[0], // This could be deterministic as the PoK is a singleton never opened up to cooperative
// This could be deterministic as the PoK is a singleton never opened up to cooperative // discussion
// discussion // There's no reason to spend the time and effort to make this deterministic besides a
// There's no reason to spend the time and effort to make this deterministic besides a // general obsession with canonicity and determinism though
// general obsession with canonicity and determinism though r,
r, challenge::<C>(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized),
challenge::<C>(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized),
)
.serialize(),
); );
r.zeroize(); r.zeroize();
// Step 4: Broadcast // Step 4: Broadcast
(coefficients, commitments, serialized) (coefficients, commitments.clone(), Commitments(commitments, serialized, sig))
} }
// Verify the received data from the first round of key generation // Verify the received data from the first round of key generation
fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>( fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: &FrostParams, params: &FrostParams,
context: &str, context: &str,
our_commitments: Vec<C::G>, our_commitments: Vec<C::G>,
mut serialized: HashMap<u16, Re>, mut msgs: HashMap<u16, Commitments<C>>,
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> { ) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
validate_map(&serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?; validate_map(&msgs, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
let mut commitments = HashMap::new();
commitments.insert(params.i, our_commitments);
let mut signatures = Vec::with_capacity(usize::from(params.n() - 1)); let mut signatures = Vec::with_capacity(usize::from(params.n() - 1));
for l in 1 ..= params.n() { let mut commitments = msgs
if l == params.i { .drain()
continue; .map(|(l, msg)| {
}
let invalid = FrostError::InvalidCommitment(l);
// Read the entire list of commitments as the key we're providing a PoK for (A) and the message
#[allow(non_snake_case)]
let mut Am = vec![0; usize::from(params.t()) * C::G_len()];
serialized.get_mut(&l).unwrap().read_exact(&mut Am).map_err(|_| invalid)?;
let mut these_commitments = vec![];
let mut cursor = Cursor::new(&Am);
for _ in 0 .. usize::from(params.t()) {
these_commitments.push(C::read_G(&mut cursor).map_err(|_| invalid)?);
}
// Don't bother validating our own proof of knowledge
if l != params.i() {
let cursor = serialized.get_mut(&l).unwrap();
#[allow(non_snake_case)]
let R = C::read_G(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
let s = C::read_F(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
// Step 5: Validate each proof of knowledge // Step 5: Validate each proof of knowledge
// This is solely the prep step for the latter batch verification // This is solely the prep step for the latter batch verification
signatures.push(( signatures.push((
l, l,
these_commitments[0], msg.0[0],
challenge::<C>(context, l, R.to_bytes().as_ref(), &Am), challenge::<C>(context, l, msg.2.R.to_bytes().as_ref(), &msg.1),
SchnorrSignature::<C> { R, s }, msg.2,
)); ));
}
commitments.insert(l, these_commitments); (l, msg.0)
} })
.collect::<HashMap<_, _>>();
schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?; schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?;
commitments.insert(params.i, our_commitments);
Ok(commitments) Ok(commitments)
} }
@@ -144,18 +140,39 @@ fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
share share
} }
// Implements round 1, step 5 and round 2, step 1 of FROST key generation /// Secret share, to be sent only to the party it's intended for, over an encrypted and
/// authenticated channel.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct SecretShare<C: Curve>(C::F);
impl<C: Curve> SecretShare<C> {
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
Ok(SecretShare(C::read_F(reader)?))
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.0.to_repr().as_ref())
}
}
impl<C: Curve> Drop for SecretShare<C> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<C: Curve> ZeroizeOnDrop for SecretShare<C> {}
// Calls round 1, step 5 and implements round 2, step 1 of FROST key generation
// Returns our secret share part, commitments for the next step, and a vector for each // Returns our secret share part, commitments for the next step, and a vector for each
// counterparty to receive // counterparty to receive
fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>( fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: &FrostParams, params: &FrostParams,
context: &str, context: &str,
coefficients: &mut Vec<C::F>, coefficients: &mut Vec<C::F>,
our_commitments: Vec<C::G>, our_commitments: Vec<C::G>,
commitments: HashMap<u16, Re>, msgs: HashMap<u16, Commitments<C>>,
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> { ) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, SecretShare<C>>), FrostError> {
let commitments = verify_r1::<_, _, C>(rng, params, context, our_commitments, commitments)?; let commitments = verify_r1::<_, C>(rng, params, context, our_commitments, msgs)?;
// Step 1: Generate secret shares for all other parties // Step 1: Generate secret shares for all other parties
let mut res = HashMap::new(); let mut res = HashMap::new();
@@ -166,7 +183,7 @@ fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
continue; continue;
} }
res.insert(l, polynomial(coefficients, l).to_repr().as_ref().to_vec()); res.insert(l, SecretShare(polynomial(coefficients, l)));
} }
// Calculate our own share // Calculate our own share
@@ -177,24 +194,17 @@ fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
Ok((share, commitments, res)) Ok((share, commitments, res))
} }
/// Finishes round 2 and returns both the secret share and the serialized public key. // Finishes round 2 and returns the keys.
/// This key MUST NOT be considered usable until all parties confirm they have completed the // This key MUST NOT be considered usable until all parties confirm they have completed the
/// protocol without issue. // protocol without issue.
fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>( fn complete_r2<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: FrostParams, params: FrostParams,
mut secret_share: C::F, mut secret_share: C::F,
commitments: &mut HashMap<u16, Vec<C::G>>, commitments: &mut HashMap<u16, Vec<C::G>>,
mut serialized: HashMap<u16, Re>, mut shares: HashMap<u16, SecretShare<C>>,
) -> Result<FrostCore<C>, FrostError> { ) -> Result<FrostCore<C>, FrostError> {
validate_map(&serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?; validate_map(&shares, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
// Step 2. Verify each share
let mut shares = HashMap::new();
// TODO: Clear serialized
for (l, share) in serialized.iter_mut() {
shares.insert(*l, C::read_F(share).map_err(|_| FrostError::InvalidShare(*l))?);
}
// Calculate the exponent for a given participant and apply it to a series of commitments // Calculate the exponent for a given participant and apply it to a series of commitments
// Initially used with the actual commitments to verify the secret share, later used with stripes // Initially used with the actual commitments to verify the secret share, later used with stripes
@@ -210,22 +220,18 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
}; };
let mut batch = BatchVerifier::new(shares.len()); let mut batch = BatchVerifier::new(shares.len());
for (l, share) in shares.iter_mut() { for (l, mut share) in shares.drain() {
if *l == params.i() { secret_share += share.0;
continue;
}
secret_share += *share;
// This can be insecurely linearized from n * t to just n using the below sums for a given // This can be insecurely linearized from n * t to just n using the below sums for a given
// stripe. Doing so uses naive addition which is subject to malleability. The only way to // stripe. Doing so uses naive addition which is subject to malleability. The only way to
// ensure that malleability isn't present is to use this n * t algorithm, which runs // ensure that malleability isn't present is to use this n * t algorithm, which runs
// per sender and not as an aggregate of all senders, which also enables blame // per sender and not as an aggregate of all senders, which also enables blame
let mut values = exponential(params.i, &commitments[l]); let mut values = exponential(params.i, &commitments[&l]);
values.push((-*share, C::generator())); values.push((-share.0, C::generator()));
share.zeroize(); share.zeroize();
batch.queue(rng, *l, values); batch.queue(rng, l, values);
} }
batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?; batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?;
@@ -299,14 +305,14 @@ impl<C: Curve> KeyGenMachine<C> {
} }
/// Start generating a key according to the FROST DKG spec. /// Start generating a key according to the FROST DKG spec.
/// Returns a serialized list of commitments to be sent to all parties over an authenticated /// Returns a commitments message to be sent to all parties over an authenticated
/// channel. If any party submits multiple sets of commitments, they MUST be treated as /// channel. If any party submits multiple sets of commitments, they MUST be treated as
/// malicious. /// malicious.
pub fn generate_coefficients<R: RngCore + CryptoRng>( pub fn generate_coefficients<R: RngCore + CryptoRng>(
self, self,
rng: &mut R, rng: &mut R,
) -> (SecretShareMachine<C>, Vec<u8>) { ) -> (SecretShareMachine<C>, Commitments<C>) {
let (coefficients, our_commitments, serialized) = let (coefficients, our_commitments, commitments) =
generate_key_r1::<_, C>(rng, &self.params, &self.context); generate_key_r1::<_, C>(rng, &self.params, &self.context);
( (
@@ -316,21 +322,21 @@ impl<C: Curve> KeyGenMachine<C> {
coefficients, coefficients,
our_commitments, our_commitments,
}, },
serialized, commitments,
) )
} }
} }
impl<C: Curve> SecretShareMachine<C> { impl<C: Curve> SecretShareMachine<C> {
/// Continue generating a key. /// Continue generating a key.
/// Takes in everyone else's commitments. Returns a HashMap of byte vectors representing secret /// Takes in everyone else's commitments. Returns a HashMap of secret shares.
/// shares. These MUST be encrypted and only then sent to their respective participants. /// These MUST be encrypted and only then sent to their respective participants.
pub fn generate_secret_shares<Re: Read, R: RngCore + CryptoRng>( pub fn generate_secret_shares<R: RngCore + CryptoRng>(
mut self, mut self,
rng: &mut R, rng: &mut R,
commitments: HashMap<u16, Re>, commitments: HashMap<u16, Commitments<C>>,
) -> Result<(KeyMachine<C>, HashMap<u16, Vec<u8>>), FrostError> { ) -> Result<(KeyMachine<C>, HashMap<u16, SecretShare<C>>), FrostError> {
let (secret, commitments, shares) = generate_key_r2::<_, _, C>( let (secret, commitments, shares) = generate_key_r2::<_, C>(
rng, rng,
&self.params, &self.params,
&self.context, &self.context,
@@ -347,10 +353,10 @@ impl<C: Curve> KeyMachine<C> {
/// Takes in everyone elses' shares submitted to us. Returns a FrostCore object representing the /// Takes in everyone elses' shares submitted to us. Returns a FrostCore object representing the
/// generated keys. Successful protocol completion MUST be confirmed by all parties before these /// generated keys. Successful protocol completion MUST be confirmed by all parties before these
/// keys may be safely used. /// keys may be safely used.
pub fn complete<Re: Read, R: RngCore + CryptoRng>( pub fn complete<R: RngCore + CryptoRng>(
mut self, mut self,
rng: &mut R, rng: &mut R,
shares: HashMap<u16, Re>, shares: HashMap<u16, SecretShare<C>>,
) -> Result<FrostCore<C>, FrostError> { ) -> Result<FrostCore<C>, FrostError> {
complete_r2(rng, self.params, self.secret, &mut self.commitments, shares) complete_r2(rng, self.params, self.secret, &mut self.commitments, shares)
} }

View File

@@ -38,6 +38,7 @@ pub mod promote;
/// Algorithm for the signing process. /// Algorithm for the signing process.
pub mod algorithm; pub mod algorithm;
mod nonce;
/// Threshold signing protocol. /// Threshold signing protocol.
pub mod sign; pub mod sign;
@@ -45,7 +46,7 @@ pub mod sign;
#[cfg(any(test, feature = "tests"))] #[cfg(any(test, feature = "tests"))]
pub mod tests; pub mod tests;
// Validate a map of serialized values to have the expected included participants // Validate a map of values to have the expected included participants
pub(crate) fn validate_map<T>( pub(crate) fn validate_map<T>(
map: &HashMap<u16, T>, map: &HashMap<u16, T>,
included: &[u16], included: &[u16],
@@ -136,6 +137,8 @@ pub enum FrostError {
InvalidCommitment(u16), InvalidCommitment(u16),
#[error("invalid proof of knowledge (participant {0})")] #[error("invalid proof of knowledge (participant {0})")]
InvalidProofOfKnowledge(u16), InvalidProofOfKnowledge(u16),
#[error("invalid preprocess (participant {0})")]
InvalidPreprocess(u16),
#[error("invalid share (participant {0})")] #[error("invalid share (participant {0})")]
InvalidShare(u16), InvalidShare(u16),

271
crypto/frost/src/nonce.rs Normal file
View File

@@ -0,0 +1,271 @@
// FROST defines its nonce as sum(Di, Ei * bi)
// Monero needs not just the nonce over G however, yet also over H
// Then there is a signature (a modified Chaum Pedersen proof) using multiple nonces at once
//
// Accordingly, in order for this library to be robust, it supports generating an arbitrary amount
// of nonces, each against an arbitrary list of basepoints
//
// Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b)
// When multiple D, E pairs are provided, a DLEq proof is also provided to confirm their integrity
use std::{
io::{self, Read, Write},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use transcript::Transcript;
use group::{ff::PrimeField, Group, GroupEncoding};
use multiexp::multiexp_vartime;
use dleq::DLEqProof;
use crate::curve::Curve;
fn dleq_transcript<T: Transcript>() -> T {
T::new(b"FROST_nonce_dleq")
}
// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper
// This is considered a single nonce as r = d + be
#[derive(Clone, Zeroize)]
pub(crate) struct Nonce<C: Curve>(pub(crate) [C::F; 2]);
// Commitments to a specific generator for this nonce
#[derive(Copy, Clone, PartialEq, Eq, Zeroize)]
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
impl<C: Curve> GeneratorCommitments<C> {
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
Ok(GeneratorCommitments([C::read_G(reader)?, C::read_G(reader)?]))
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.0[0].to_bytes().as_ref())?;
writer.write_all(self.0[1].to_bytes().as_ref())
}
}
// A single nonce's commitments and relevant proofs
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub(crate) struct NonceCommitments<C: Curve> {
// Called generators as these commitments are indexed by generator
pub(crate) generators: Vec<GeneratorCommitments<C>>,
// DLEq Proofs proving that these commitments are generated using the same scalar pair
// This could be further optimized with a multi-nonce proof, offering just one proof for all
// nonces. See https://github.com/serai-dex/serai/issues/38
// TODO
pub(crate) dleqs: Option<[DLEqProof<C::G>; 2]>,
}
impl<C: Curve> NonceCommitments<C> {
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
mut secret_share: C::F,
generators: &[C::G],
) -> (Nonce<C>, NonceCommitments<C>) {
let nonce =
Nonce([C::random_nonce(secret_share, &mut *rng), C::random_nonce(secret_share, &mut *rng)]);
secret_share.zeroize();
let mut commitments = Vec::with_capacity(generators.len());
for generator in generators {
commitments.push(GeneratorCommitments([*generator * nonce.0[0], *generator * nonce.0[1]]));
}
let mut dleqs = None;
if generators.len() >= 2 {
let mut dleq = |nonce| {
// Uses an independent transcript as each signer must prove this with their commitments,
// yet they're validated while processing everyone's data sequentially, by the global order
// This avoids needing to clone and fork the transcript around
// TODO: At least include a challenge from the existing transcript
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(), generators, nonce)
};
dleqs = Some([dleq(nonce.0[0]), dleq(nonce.0[1])]);
}
(nonce, NonceCommitments { generators: commitments, dleqs })
}
fn read<R: Read, T: Transcript>(
reader: &mut R,
generators: &[C::G],
) -> io::Result<NonceCommitments<C>> {
let commitments: Vec<GeneratorCommitments<C>> = (0 .. generators.len())
.map(|_| GeneratorCommitments::read(reader))
.collect::<Result<_, _>>()?;
let mut dleqs = None;
if generators.len() >= 2 {
let mut verify = |i| -> io::Result<_> {
let dleq = DLEqProof::deserialize(reader)?;
dleq
.verify(
&mut dleq_transcript::<T>(),
generators,
&commitments.iter().map(|commitments| commitments.0[i]).collect::<Vec<_>>(),
)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
Ok(dleq)
};
dleqs = Some([verify(0)?, verify(1)?]);
}
Ok(NonceCommitments { generators: commitments, dleqs })
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
for generator in &self.generators {
generator.write(writer)?;
}
if let Some(dleqs) = &self.dleqs {
dleqs[0].serialize(writer)?;
dleqs[1].serialize(writer)?;
}
Ok(())
}
}
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub(crate) struct Commitments<C: Curve> {
// Called nonces as these commitments are indexed by nonce
pub(crate) nonces: Vec<NonceCommitments<C>>,
}
impl<C: Curve> Commitments<C> {
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
secret_share: C::F,
planned_nonces: &[Vec<C::G>],
) -> (Vec<Nonce<C>>, Commitments<C>) {
let mut nonces = vec![];
let mut commitments = vec![];
for generators in planned_nonces {
let (nonce, these_commitments) =
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators);
nonces.push(nonce);
commitments.push(these_commitments);
}
(nonces, Commitments { nonces: commitments })
}
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
for nonce in &self.nonces {
for commitments in &nonce.generators {
t.append_message(b"commitment_D", commitments.0[0].to_bytes().as_ref());
t.append_message(b"commitment_E", commitments.0[1].to_bytes().as_ref());
}
// Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce
// This means it shouldn't be possible for variadic generators to cause conflicts as they're
// committed to as their entire series per-nonce, not as isolates
if let Some(dleqs) = &nonce.dleqs {
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
let mut buf = Vec::with_capacity(C::G_len() + C::F_len());
dleq.serialize(&mut buf).unwrap();
t.append_message(label, &buf);
};
transcript_dleq(b"dleq_D", &dleqs[0]);
transcript_dleq(b"dleq_E", &dleqs[1]);
}
}
}
pub(crate) fn read<R: Read, T: Transcript>(
reader: &mut R,
nonces: &[Vec<C::G>],
) -> io::Result<Self> {
Ok(Commitments {
nonces: (0 .. nonces.len())
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i]))
.collect::<Result<_, _>>()?,
})
}
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
for nonce in &self.nonces {
nonce.write(writer)?;
}
Ok(())
}
}
#[derive(Zeroize)]
pub(crate) struct IndividualBinding<C: Curve> {
commitments: Commitments<C>,
binding_factors: Option<Vec<C::F>>,
}
pub(crate) struct BindingFactor<C: Curve>(pub(crate) HashMap<u16, IndividualBinding<C>>);
impl<C: Curve> Zeroize for BindingFactor<C> {
fn zeroize(&mut self) {
for (mut validator, mut binding) in self.0.drain() {
validator.zeroize();
binding.zeroize();
}
}
}
impl<C: Curve> BindingFactor<C> {
pub(crate) fn insert(&mut self, i: u16, commitments: Commitments<C>) {
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });
}
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &mut T) {
for (l, binding) in self.0.iter_mut() {
let mut transcript = transcript.clone();
transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
// It *should* be perfectly fine to reuse a binding factor for multiple nonces
// This generates a binding factor per nonce just to ensure it never comes up as a question
binding.binding_factors = Some(
(0 .. binding.commitments.nonces.len())
.map(|_| C::hash_binding_factor(transcript.challenge(b"rho").as_ref()))
.collect(),
);
}
}
pub(crate) fn binding_factors(&self, i: u16) -> &[C::F] {
self.0[&i].binding_factors.as_ref().unwrap()
}
// Get the bound nonces for a specific party
pub(crate) fn bound(&self, l: u16) -> Vec<Vec<C::G>> {
let mut res = vec![];
for (i, (nonce, rho)) in
self.0[&l].commitments.nonces.iter().zip(self.binding_factors(l).iter()).enumerate()
{
res.push(vec![]);
for generator in &nonce.generators {
res[i].push(generator.0[0] + (generator.0[1] * rho));
}
}
res
}
// Get the nonces for this signing session
pub(crate) fn nonces(&self, planned_nonces: &[Vec<C::G>]) -> Vec<Vec<C::G>> {
let mut nonces = Vec::with_capacity(planned_nonces.len());
for n in 0 .. planned_nonces.len() {
nonces.push(Vec::with_capacity(planned_nonces[n].len()));
for g in 0 .. planned_nonces[n].len() {
#[allow(non_snake_case)]
let mut D = C::G::identity();
let mut statements = Vec::with_capacity(self.0.len());
#[allow(non_snake_case)]
for IndividualBinding { commitments, binding_factors } in self.0.values() {
D += commitments.nonces[n].generators[g].0[0];
statements
.push((binding_factors.as_ref().unwrap()[n], commitments.nonces[n].generators[g].0[1]));
}
nonces[n].push(D + multiexp_vartime(&statements));
}
}
nonces
}
}

View File

@@ -12,10 +12,7 @@ use group::GroupEncoding;
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use dleq::DLEqProof; use dleq::DLEqProof;
use crate::{ use crate::{curve::Curve, FrostError, FrostCore, FrostKeys, validate_map};
curve::{CurveError, Curve},
FrostError, FrostCore, FrostKeys, validate_map,
};
/// Promote a set of keys to another Curve definition. /// Promote a set of keys to another Curve definition.
pub trait CurvePromote<C2: Curve> { pub trait CurvePromote<C2: Curve> {
@@ -73,11 +70,8 @@ impl<C: Curve> GeneratorProof<C> {
self.proof.serialize(writer) self.proof.serialize(writer)
} }
pub fn deserialize<R: Read>(reader: &mut R) -> Result<GeneratorProof<C>, CurveError> { pub fn deserialize<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
Ok(GeneratorProof { Ok(GeneratorProof { share: C::read_G(reader)?, proof: DLEqProof::deserialize(reader)? })
share: C::read_G(reader)?,
proof: DLEqProof::deserialize(reader).map_err(|_| CurveError::InvalidScalar)?,
})
} }
} }

View File

@@ -1,3 +1,5 @@
use std::io::{self, Read, Write};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize; use zeroize::Zeroize;
@@ -9,7 +11,7 @@ use group::{
use multiexp::BatchVerifier; use multiexp::BatchVerifier;
use crate::Curve; use crate::curve::Curve;
/// A Schnorr signature of the form (R, s) where s = r + cx. /// A Schnorr signature of the form (R, s) where s = r + cx.
#[allow(non_snake_case)] #[allow(non_snake_case)]
@@ -20,11 +22,13 @@ pub struct SchnorrSignature<C: Curve> {
} }
impl<C: Curve> SchnorrSignature<C> { impl<C: Curve> SchnorrSignature<C> {
pub fn serialize(&self) -> Vec<u8> { pub(crate) fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
let mut res = Vec::with_capacity(C::G_len() + C::F_len()); Ok(SchnorrSignature { R: C::read_G(reader)?, s: C::read_F(reader)? })
res.extend(self.R.to_bytes().as_ref()); }
res.extend(self.s.to_repr().as_ref());
res pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.R.to_bytes().as_ref())?;
writer.write_all(self.s.to_repr().as_ref())
} }
} }

View File

@@ -1,28 +1,40 @@
use core::fmt; use core::fmt;
use std::{ use std::{
io::{Read, Cursor}, io::{self, Read, Write},
collections::HashMap, collections::HashMap,
}; };
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop}; use zeroize::{Zeroize, ZeroizeOnDrop};
use subtle::ConstantTimeEq;
use transcript::Transcript; use transcript::Transcript;
use group::{ use group::{ff::PrimeField, GroupEncoding};
ff::{Field, PrimeField},
Group, GroupEncoding,
};
use multiexp::multiexp_vartime;
use dleq::DLEqProof;
use crate::{ use crate::{
curve::Curve, FrostError, FrostParams, FrostKeys, FrostView, algorithm::Algorithm, validate_map, curve::Curve,
FrostError, FrostParams, FrostKeys, FrostView,
algorithm::{AddendumSerialize, Addendum, Algorithm},
validate_map,
}; };
pub(crate) use crate::nonce::*;
/// Trait enabling writing preprocesses and signature shares.
pub trait Writable {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
}
impl<T: Writable> Writable for Vec<T> {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
for w in self {
w.write(writer)?;
}
Ok(())
}
}
/// Pairing of an Algorithm with a FrostKeys instance and this specific signing set. /// Pairing of an Algorithm with a FrostKeys instance and this specific signing set.
#[derive(Clone)] #[derive(Clone)]
pub struct Params<C: Curve, A: Algorithm<C>> { pub struct Params<C: Curve, A: Algorithm<C>> {
@@ -31,7 +43,6 @@ pub struct Params<C: Curve, A: Algorithm<C>> {
view: FrostView<C>, view: FrostView<C>,
} }
// Currently public to enable more complex operations as desired, yet solely used in testing
impl<C: Curve, A: Algorithm<C>> Params<C, A> { impl<C: Curve, A: Algorithm<C>> Params<C, A> {
pub fn new( pub fn new(
algorithm: A, algorithm: A,
@@ -79,104 +90,75 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
} }
} }
fn nonce_transcript<T: Transcript>() -> T { /// Preprocess for an instance of the FROST signing protocol.
T::new(b"FROST_nonce_dleq") #[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct Preprocess<C: Curve, A: Addendum> {
pub(crate) commitments: Commitments<C>,
pub addendum: A,
}
impl<C: Curve, A: Addendum> Writable for Preprocess<C, A> {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
self.commitments.write(writer)?;
self.addendum.write(writer)
}
} }
#[derive(Zeroize)] #[derive(Zeroize)]
pub(crate) struct PreprocessPackage<C: Curve> { pub(crate) struct PreprocessData<C: Curve, A: Addendum> {
pub(crate) nonces: Vec<[C::F; 2]>, pub(crate) nonces: Vec<Nonce<C>>,
#[zeroize(skip)] pub(crate) preprocess: Preprocess<C, A>,
pub(crate) commitments: Vec<Vec<[C::G; 2]>>,
pub(crate) addendum: Vec<u8>,
} }
impl<C: Curve> Drop for PreprocessPackage<C> { impl<C: Curve, A: Addendum> Drop for PreprocessData<C, A> {
fn drop(&mut self) { fn drop(&mut self) {
self.zeroize() self.zeroize()
} }
} }
impl<C: Curve> ZeroizeOnDrop for PreprocessPackage<C> {} impl<C: Curve, A: Addendum> ZeroizeOnDrop for PreprocessData<C, A> {}
fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>( fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
rng: &mut R, rng: &mut R,
params: &mut Params<C, A>, params: &mut Params<C, A>,
) -> (PreprocessPackage<C>, Vec<u8>) { ) -> (PreprocessData<C, A::Addendum>, Preprocess<C, A::Addendum>) {
let mut serialized = Vec::with_capacity(2 * C::G_len()); let (nonces, commitments) = Commitments::new::<_, A::Transcript>(
let (nonces, commitments) = params &mut *rng,
.algorithm params.view().secret_share(),
.nonces() &params.algorithm.nonces(),
.iter() );
.map(|generators| {
let nonces = [
C::random_nonce(params.view().secret_share(), &mut *rng),
C::random_nonce(params.view().secret_share(), &mut *rng),
];
let commit = |generator: C::G, buf: &mut Vec<u8>| {
let commitments = [generator * nonces[0], generator * nonces[1]];
buf.extend(commitments[0].to_bytes().as_ref());
buf.extend(commitments[1].to_bytes().as_ref());
commitments
};
let mut commitments = Vec::with_capacity(generators.len());
for generator in generators.iter() {
commitments.push(commit(*generator, &mut serialized));
}
// Provide a DLEq proof to verify these commitments are for the same nonce
if generators.len() >= 2 {
// Uses an independent transcript as each signer must do this now, yet we validate them
// sequentially by the global order. Avoids needing to clone and fork the transcript around
let mut transcript = nonce_transcript::<A::Transcript>();
// This could be further optimized with a multi-nonce proof.
// See https://github.com/serai-dex/serai/issues/38
for mut nonce in nonces {
DLEqProof::prove(&mut *rng, &mut transcript, generators, nonce)
.serialize(&mut serialized)
.unwrap();
nonce.zeroize();
}
}
(nonces, commitments)
})
.unzip();
let addendum = params.algorithm.preprocess_addendum(rng, &params.view); let addendum = params.algorithm.preprocess_addendum(rng, &params.view);
serialized.extend(&addendum);
(PreprocessPackage { nonces, commitments, addendum }, serialized) let preprocess = Preprocess { commitments, addendum };
(PreprocessData { nonces, preprocess: preprocess.clone() }, preprocess)
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn read_D_E<Re: Read, C: Curve>(cursor: &mut Re, l: u16) -> Result<[C::G; 2], FrostError> { struct SignData<C: Curve> {
Ok([ B: BindingFactor<C>,
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
])
}
#[allow(non_snake_case)]
struct Package<C: Curve> {
B: HashMap<u16, (Vec<Vec<[C::G; 2]>>, C::F)>,
Rs: Vec<Vec<C::G>>, Rs: Vec<Vec<C::G>>,
share: C::F, share: C::F,
} }
/// Share of a signature produced via FROST.
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct SignatureShare<C: Curve>(C::F);
impl<C: Curve> Writable for SignatureShare<C> {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.0.to_repr().as_ref())
}
}
// Has every signer perform the role of the signature aggregator // Has every signer perform the role of the signature aggregator
// Step 1 was already deprecated by performing nonce generation as needed // Step 1 was already deprecated by performing nonce generation as needed
// Step 2 is simply the broadcast round from step 1 // Step 2 is simply the broadcast round from step 1
fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>( fn sign_with_share<C: Curve, A: Algorithm<C>>(
params: &mut Params<C, A>, params: &mut Params<C, A>,
our_preprocess: PreprocessPackage<C>, mut our_preprocess: PreprocessData<C, A::Addendum>,
mut commitments: HashMap<u16, Re>, mut preprocesses: HashMap<u16, Preprocess<C, A::Addendum>>,
msg: &[u8], msg: &[u8],
) -> Result<(Package<C>, Vec<u8>), FrostError> { ) -> Result<(SignData<C>, SignatureShare<C>), FrostError> {
let multisig_params = params.multisig_params(); let multisig_params = params.multisig_params();
validate_map(&commitments, &params.view.included, multisig_params.i)?; validate_map(&preprocesses, &params.view.included, multisig_params.i)?;
{ {
// Domain separate FROST // Domain separate FROST
@@ -185,9 +167,9 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
let nonces = params.algorithm.nonces(); let nonces = params.algorithm.nonces();
#[allow(non_snake_case)] #[allow(non_snake_case)]
let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len()); let mut B = BindingFactor(HashMap::<u16, _>::with_capacity(params.view.included.len()));
{ {
// Parse the commitments // Parse the preprocesses
for l in &params.view.included { for l in &params.view.included {
{ {
params params
@@ -196,73 +178,39 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref()); .append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
} }
// While this doesn't note which nonce/basepoint this is for, those are expected to be
// static. Beyond that, they're committed to in the DLEq proof transcripts, ensuring
// consistency. While this is suboptimal, it maintains IETF compliance, and Algorithm is
// documented accordingly
let transcript = |t: &mut A::Transcript, commitments: [C::G; 2]| {
if commitments[0].ct_eq(&C::G::identity()).into() ||
commitments[1].ct_eq(&C::G::identity()).into()
{
Err(FrostError::InvalidCommitment(*l))?;
}
t.append_message(b"commitment_D", commitments[0].to_bytes().as_ref());
t.append_message(b"commitment_E", commitments[1].to_bytes().as_ref());
Ok(())
};
if *l == params.keys.params().i { if *l == params.keys.params().i {
for nonce_commitments in &our_preprocess.commitments { let commitments = our_preprocess.preprocess.commitments.clone();
for commitments in nonce_commitments { commitments.transcript(params.algorithm.transcript());
transcript(params.algorithm.transcript(), *commitments).unwrap();
} let addendum = our_preprocess.preprocess.addendum.clone();
{
let mut buf = vec![];
addendum.write(&mut buf).unwrap();
params.algorithm.transcript().append_message(b"addendum", &buf);
} }
B.insert(*l, (our_preprocess.commitments.clone(), C::F::zero())); B.insert(*l, commitments);
params.algorithm.process_addendum( params.algorithm.process_addendum(&params.view, *l, addendum)?;
&params.view,
*l,
&mut Cursor::new(our_preprocess.addendum.clone()),
)?;
} else { } else {
let mut cursor = commitments.remove(l).unwrap(); let preprocess = preprocesses.remove(l).unwrap();
preprocess.commitments.transcript(params.algorithm.transcript());
let mut commitments = Vec::with_capacity(nonces.len()); {
for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() { let mut buf = vec![];
commitments.push(Vec::with_capacity(nonce_generators.len())); preprocess.addendum.write(&mut buf).unwrap();
for _ in 0 .. nonce_generators.len() { params.algorithm.transcript().append_message(b"addendum", &buf);
commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?);
transcript(params.algorithm.transcript(), commitments[n][commitments[n].len() - 1])?;
}
if nonce_generators.len() >= 2 {
let mut transcript = nonce_transcript::<A::Transcript>();
for de in 0 .. 2 {
DLEqProof::deserialize(&mut cursor)
.map_err(|_| FrostError::InvalidCommitment(*l))?
.verify(
&mut transcript,
nonce_generators,
&commitments[n].iter().map(|commitments| commitments[de]).collect::<Vec<_>>(),
)
.map_err(|_| FrostError::InvalidCommitment(*l))?;
}
}
} }
B.insert(*l, (commitments, C::F::zero())); B.insert(*l, preprocess.commitments);
params.algorithm.process_addendum(&params.view, *l, &mut cursor)?; params.algorithm.process_addendum(&params.view, *l, preprocess.addendum)?;
} }
} }
// Re-format into the FROST-expected rho transcript // Re-format into the FROST-expected rho transcript
let mut rho_transcript = A::Transcript::new(b"FROST_rho"); let mut rho_transcript = A::Transcript::new(b"FROST_rho");
rho_transcript.append_message(b"message", &C::hash_msg(msg)); rho_transcript.append_message(b"message", &C::hash_msg(msg));
// This won't just be the commitments, yet the full existing transcript if used in an extended
// protocol
rho_transcript.append_message( rho_transcript.append_message(
b"commitments", b"preprocesses",
&C::hash_commitments(params.algorithm.transcript().challenge(b"commitments").as_ref()), &C::hash_commitments(params.algorithm.transcript().challenge(b"preprocesses").as_ref()),
); );
// Include the offset, if one exists // Include the offset, if one exists
@@ -280,14 +228,10 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
} }
// Generate the per-signer binding factors // Generate the per-signer binding factors
for (l, commitments) in B.iter_mut() { B.calculate_binding_factors(&mut rho_transcript);
let mut rho_transcript = rho_transcript.clone();
rho_transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
commitments.1 = C::hash_binding_factor(rho_transcript.challenge(b"rho").as_ref());
}
// Merge the rho transcript back into the global one to ensure its advanced while committing to // Merge the rho transcript back into the global one to ensure its advanced, while
// everything // simultaneously committing to everything
params params
.algorithm .algorithm
.transcript() .transcript()
@@ -295,60 +239,44 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
let mut Rs = Vec::with_capacity(nonces.len()); let Rs = B.nonces(&nonces);
for n in 0 .. nonces.len() {
Rs.push(vec![C::G::identity(); nonces[n].len()]);
for g in 0 .. nonces[n].len() {
#[allow(non_snake_case)]
let mut D = C::G::identity();
let mut statements = Vec::with_capacity(B.len());
#[allow(non_snake_case)]
for (B, binding) in B.values() {
D += B[n][g][0];
statements.push((*binding, B[n][g][1]));
}
Rs[n][g] = D + multiexp_vartime(&statements);
}
}
let our_binding_factors = B.binding_factors(multisig_params.i());
let mut nonces = our_preprocess let mut nonces = our_preprocess
.nonces .nonces
.iter() .iter()
.map(|nonces| nonces[0] + (nonces[1] * B[&params.keys.params().i()].1)) .enumerate()
.map(|(n, nonces)| nonces.0[0] + (nonces.0[1] * our_binding_factors[n]))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
our_preprocess.nonces.zeroize();
let share = params.algorithm.sign_share(&params.view, &Rs, &nonces, msg); let share = params.algorithm.sign_share(&params.view, &Rs, &nonces, msg);
nonces.zeroize(); nonces.zeroize();
Ok((Package { B, Rs, share }, share.to_repr().as_ref().to_vec())) Ok((SignData { B, Rs, share }, SignatureShare(share)))
} }
fn complete<Re: Read, C: Curve, A: Algorithm<C>>( fn complete<C: Curve, A: Algorithm<C>>(
sign_params: &Params<C, A>, sign_params: &Params<C, A>,
sign: Package<C>, sign: SignData<C>,
mut shares: HashMap<u16, Re>, mut shares: HashMap<u16, SignatureShare<C>>,
) -> Result<A::Signature, FrostError> { ) -> Result<A::Signature, FrostError> {
let params = sign_params.multisig_params(); let params = sign_params.multisig_params();
validate_map(&shares, &sign_params.view.included, params.i)?; validate_map(&shares, &sign_params.view.included, params.i)?;
let mut responses = HashMap::new(); let mut responses = HashMap::new();
let mut sum = C::F::zero(); responses.insert(params.i(), sign.share);
for l in &sign_params.view.included { let mut sum = sign.share;
let part = if *l == params.i { for (l, share) in shares.drain() {
sign.share responses.insert(l, share.0);
} else { sum += share.0;
C::read_F(shares.get_mut(l).unwrap()).map_err(|_| FrostError::InvalidShare(*l))?
};
sum += part;
responses.insert(*l, part);
} }
// Perform signature validation instead of individual share validation // Perform signature validation instead of individual share validation
// For the success route, which should be much more frequent, this should be faster // For the success route, which should be much more frequent, this should be faster
// It also acts as an integrity check of this library's signing function // It also acts as an integrity check of this library's signing function
let res = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum); if let Some(sig) = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum) {
if let Some(res) = res { return Ok(sig);
return Ok(res);
} }
// Find out who misbehaved. It may be beneficial to randomly sort this to have detection be // Find out who misbehaved. It may be beneficial to randomly sort this to have detection be
@@ -356,13 +284,7 @@ fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
for l in &sign_params.view.included { for l in &sign_params.view.included {
if !sign_params.algorithm.verify_share( if !sign_params.algorithm.verify_share(
sign_params.view.verification_share(*l), sign_params.view.verification_share(*l),
&sign.B[l] &sign.B.bound(*l),
.0
.iter()
.map(|nonces| {
nonces.iter().map(|commitments| commitments[0] + (commitments[1] * sign.B[l].1)).collect()
})
.collect::<Vec<_>>(),
responses[l], responses[l],
) { ) {
Err(FrostError::InvalidShare(*l))?; Err(FrostError::InvalidShare(*l))?;
@@ -375,33 +297,53 @@ fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
/// Trait for the initial state machine of a two-round signing protocol. /// Trait for the initial state machine of a two-round signing protocol.
pub trait PreprocessMachine { pub trait PreprocessMachine {
/// Preprocess message for this machine.
type Preprocess: Clone + PartialEq + Writable;
/// Signature produced by this machine.
type Signature: Clone + PartialEq + fmt::Debug; type Signature: Clone + PartialEq + fmt::Debug;
type SignMachine: SignMachine<Self::Signature>; /// SignMachine this PreprocessMachine turns into.
type SignMachine: SignMachine<Self::Signature, Preprocess = Self::Preprocess>;
/// Perform the preprocessing round required in order to sign. /// Perform the preprocessing round required in order to sign.
/// Returns a byte vector to be broadcast to all participants, over an authenticated channel. /// Returns a preprocess message to be broadcast to all participants, over an authenticated
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R) -> (Self::SignMachine, Vec<u8>); /// channel.
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R)
-> (Self::SignMachine, Self::Preprocess);
} }
/// Trait for the second machine of a two-round signing protocol. /// Trait for the second machine of a two-round signing protocol.
pub trait SignMachine<S> { pub trait SignMachine<S> {
type SignatureMachine: SignatureMachine<S>; /// Preprocess message for this machine.
type Preprocess: Clone + PartialEq + Writable;
/// SignatureShare message for this machine.
type SignatureShare: Clone + PartialEq + Writable;
/// SignatureMachine this SignMachine turns into.
type SignatureMachine: SignatureMachine<S, SignatureShare = Self::SignatureShare>;
/// Read a Preprocess message.
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess>;
/// Sign a message. /// Sign a message.
/// Takes in the participants' preprocesses. Returns a byte vector representing a signature share /// Takes in the participants' preprocess messages. Returns the signature share to be broadcast
/// to be broadcast to all participants, over an authenticated channel. /// to all participants, over an authenticated channel.
fn sign<Re: Read>( fn sign(
self, self,
commitments: HashMap<u16, Re>, commitments: HashMap<u16, Self::Preprocess>,
msg: &[u8], msg: &[u8],
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError>; ) -> Result<(Self::SignatureMachine, Self::SignatureShare), FrostError>;
} }
/// Trait for the final machine of a two-round signing protocol. /// Trait for the final machine of a two-round signing protocol.
pub trait SignatureMachine<S> { pub trait SignatureMachine<S> {
/// SignatureShare message for this machine.
type SignatureShare: Clone + PartialEq + Writable;
/// Read a Signature Share message.
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare>;
/// Complete signing. /// Complete signing.
/// Takes in everyone elses' shares. Returns the signature. /// Takes in everyone elses' shares. Returns the signature.
fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<S, FrostError>; fn complete(self, shares: HashMap<u16, Self::SignatureShare>) -> Result<S, FrostError>;
} }
/// State machine which manages signing for an arbitrary signature algorithm. /// State machine which manages signing for an arbitrary signature algorithm.
@@ -412,13 +354,13 @@ pub struct AlgorithmMachine<C: Curve, A: Algorithm<C>> {
/// Next step of the state machine for the signing process. /// Next step of the state machine for the signing process.
pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> { pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
params: Params<C, A>, params: Params<C, A>,
preprocess: PreprocessPackage<C>, preprocess: PreprocessData<C, A::Addendum>,
} }
/// Final step of the state machine for the signing process. /// Final step of the state machine for the signing process.
pub struct AlgorithmSignatureMachine<C: Curve, A: Algorithm<C>> { pub struct AlgorithmSignatureMachine<C: Curve, A: Algorithm<C>> {
params: Params<C, A>, params: Params<C, A>,
sign: Package<C>, sign: SignData<C>,
} }
impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> { impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
@@ -434,39 +376,58 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
#[cfg(any(test, feature = "tests"))] #[cfg(any(test, feature = "tests"))]
pub(crate) fn unsafe_override_preprocess( pub(crate) fn unsafe_override_preprocess(
self, self,
preprocess: PreprocessPackage<C>, preprocess: PreprocessData<C, A::Addendum>,
) -> AlgorithmSignMachine<C, A> { ) -> AlgorithmSignMachine<C, A> {
AlgorithmSignMachine { params: self.params, preprocess } AlgorithmSignMachine { params: self.params, preprocess }
} }
} }
impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> { impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> {
type Preprocess = Preprocess<C, A::Addendum>;
type Signature = A::Signature; type Signature = A::Signature;
type SignMachine = AlgorithmSignMachine<C, A>; type SignMachine = AlgorithmSignMachine<C, A>;
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R) -> (Self::SignMachine, Vec<u8>) { fn preprocess<R: RngCore + CryptoRng>(
self,
rng: &mut R,
) -> (Self::SignMachine, Preprocess<C, A::Addendum>) {
let mut params = self.params; let mut params = self.params;
let (preprocess, serialized) = preprocess::<R, C, A>(rng, &mut params); let (preprocess, public) = preprocess::<R, C, A>(rng, &mut params);
(AlgorithmSignMachine { params, preprocess }, serialized) (AlgorithmSignMachine { params, preprocess }, public)
} }
} }
impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachine<C, A> { impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachine<C, A> {
type Preprocess = Preprocess<C, A::Addendum>;
type SignatureShare = SignatureShare<C>;
type SignatureMachine = AlgorithmSignatureMachine<C, A>; type SignatureMachine = AlgorithmSignatureMachine<C, A>;
fn sign<Re: Read>( fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
Ok(Preprocess {
commitments: Commitments::read::<_, A::Transcript>(reader, &self.params.algorithm.nonces())?,
addendum: self.params.algorithm.read_addendum(reader)?,
})
}
fn sign(
self, self,
commitments: HashMap<u16, Re>, commitments: HashMap<u16, Preprocess<C, A::Addendum>>,
msg: &[u8], msg: &[u8],
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError> { ) -> Result<(Self::SignatureMachine, SignatureShare<C>), FrostError> {
let mut params = self.params; let mut params = self.params;
let (sign, serialized) = sign_with_share(&mut params, self.preprocess, commitments, msg)?; let (sign, public) = sign_with_share(&mut params, self.preprocess, commitments, msg)?;
Ok((AlgorithmSignatureMachine { params, sign }, serialized)) Ok((AlgorithmSignatureMachine { params, sign }, public))
} }
} }
impl<C: Curve, A: Algorithm<C>> SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> { impl<C: Curve, A: Algorithm<C>> SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> {
fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<A::Signature, FrostError> { type SignatureShare = SignatureShare<C>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<SignatureShare<C>> {
Ok(SignatureShare(C::read_F(reader)?))
}
fn complete(self, shares: HashMap<u16, SignatureShare<C>>) -> Result<A::Signature, FrostError> {
complete(&self.params, self.sign, shares) complete(&self.params, self.sign, shares)
} }
} }

View File

@@ -1,5 +1,3 @@
use std::io::Cursor;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use group::Group; use group::Group;
@@ -15,7 +13,10 @@ fn key_generation<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// Test serialization of generated keys // Test serialization of generated keys
fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) { fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
for (_, keys) in core_gen::<_, C>(rng) { for (_, keys) in core_gen::<_, C>(rng) {
assert_eq!(&FrostCore::<C>::deserialize(&mut Cursor::new(keys.serialize())).unwrap(), &keys); assert_eq!(
&FrostCore::<C>::deserialize::<&[u8]>(&mut keys.serialize().as_ref()).unwrap(),
&keys
);
} }
} }

View File

@@ -1,5 +1,3 @@
use std::io::Cursor;
use rand_core::OsRng; use rand_core::OsRng;
use crate::{ use crate::{
@@ -13,32 +11,31 @@ fn ed448_8032_vector() {
let context = hex::decode("666f6f").unwrap(); let context = hex::decode("666f6f").unwrap();
#[allow(non_snake_case)] #[allow(non_snake_case)]
let A = Ed448::read_G(&mut Cursor::new( let A = Ed448::read_G::<&[u8]>(
hex::decode( &mut hex::decode(
"43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c".to_owned() + "43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c".to_owned() +
"6798c0866aea01eb00742802b8438ea4cb82169c235160627b4c3a94" + "6798c0866aea01eb00742802b8438ea4cb82169c235160627b4c3a94" +
"80", "80",
) )
.unwrap(), .unwrap()
)) .as_ref(),
)
.unwrap(); .unwrap();
let msg = hex::decode("03").unwrap(); let msg = hex::decode("03").unwrap();
let mut sig = Cursor::new( let sig = hex::decode(
hex::decode( "d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b3".to_owned() +
"d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b3".to_owned() + "2a89f7d2151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea" +
"2a89f7d2151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea" + "00" +
"00" + "0c85741de5c8da1144a6a1aba7f96de42505d7a7298524fda538fccb" +
"0c85741de5c8da1144a6a1aba7f96de42505d7a7298524fda538fccb" + "bb754f578c1cad10d54d0d5428407e85dcbc98a49155c13764e66c3c" +
"bb754f578c1cad10d54d0d5428407e85dcbc98a49155c13764e66c3c" + "00",
"00", )
) .unwrap();
.unwrap(),
);
#[allow(non_snake_case)] #[allow(non_snake_case)]
let R = Ed448::read_G(&mut sig).unwrap(); let R = Ed448::read_G::<&[u8]>(&mut sig.as_ref()).unwrap();
let s = Ed448::read_F(&mut sig).unwrap(); let s = Ed448::read_F::<&[u8]>(&mut &sig[57 ..]).unwrap();
assert!(verify( assert!(verify(
A, A,

View File

@@ -1,4 +1,4 @@
use std::{io::Cursor, collections::HashMap}; use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@@ -6,9 +6,9 @@ use group::ff::Field;
use crate::{ use crate::{
Curve, FrostParams, FrostCore, FrostKeys, lagrange, Curve, FrostParams, FrostCore, FrostKeys, lagrange,
key_gen::KeyGenMachine, key_gen::{SecretShare, Commitments as KGCommitments, KeyGenMachine},
algorithm::Algorithm, algorithm::Algorithm,
sign::{PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
}; };
/// Curve tests. /// Curve tests.
@@ -50,15 +50,32 @@ pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, F
); );
let (machine, these_commitments) = machine.generate_coefficients(rng); let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine); machines.insert(i, machine);
commitments.insert(i, Cursor::new(these_commitments));
commitments.insert(i, {
let mut buf = vec![];
these_commitments.write(&mut buf).unwrap();
KGCommitments::read::<&[u8]>(
&mut buf.as_ref(),
FrostParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 },
)
.unwrap()
});
} }
let mut secret_shares = HashMap::new(); let mut secret_shares = HashMap::new();
let mut machines = machines let mut machines = machines
.drain() .drain()
.map(|(l, machine)| { .map(|(l, machine)| {
let (machine, shares) = let (machine, mut shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
let shares = shares
.drain()
.map(|(l, share)| {
let mut buf = vec![];
share.write(&mut buf).unwrap();
(l, SecretShare::<C>::read::<&[u8]>(&mut buf.as_ref()).unwrap())
})
.collect::<HashMap<_, _>>();
secret_shares.insert(l, shares); secret_shares.insert(l, shares);
(l, machine) (l, machine)
}) })
@@ -74,7 +91,7 @@ pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, F
if i == *l { if i == *l {
continue; continue;
} }
our_secret_shares.insert(*l, Cursor::new(shares[&i].clone())); our_secret_shares.insert(*l, shares[&i].clone());
} }
let these_keys = machine.complete(rng, our_secret_shares).unwrap(); let these_keys = machine.complete(rng, our_secret_shares).unwrap();
@@ -154,7 +171,11 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
.drain() .drain()
.map(|(i, machine)| { .map(|(i, machine)| {
let (machine, preprocess) = machine.preprocess(rng); let (machine, preprocess) = machine.preprocess(rng);
commitments.insert(i, Cursor::new(preprocess)); commitments.insert(i, {
let mut buf = vec![];
preprocess.write(&mut buf).unwrap();
machine.read_preprocess::<&[u8]>(&mut buf.as_ref()).unwrap()
});
(i, machine) (i, machine)
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
@@ -164,7 +185,11 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
.drain() .drain()
.map(|(i, machine)| { .map(|(i, machine)| {
let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap(); let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap();
shares.insert(i, Cursor::new(share)); shares.insert(i, {
let mut buf = vec![];
share.write(&mut buf).unwrap();
machine.read_share::<&[u8]>(&mut buf.as_ref()).unwrap()
});
(i, machine) (i, machine)
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();

View File

@@ -1,4 +1,4 @@
use std::{io::Cursor, collections::HashMap}; use std::collections::HashMap;
#[cfg(test)] #[cfg(test)]
use std::str::FromStr; use std::str::FromStr;
@@ -10,7 +10,10 @@ use crate::{
curve::Curve, curve::Curve,
FrostCore, FrostKeys, FrostCore, FrostKeys,
algorithm::{Schnorr, Hram}, algorithm::{Schnorr, Hram},
sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine}, sign::{
Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess,
PreprocessData, SignMachine, SignatureMachine, AlgorithmMachine,
},
tests::{ tests::{
clone_without, curve::test_curve, schnorr::test_schnorr, promote::test_promotion, recover, clone_without, curve::test_curve, schnorr::test_schnorr, promote::test_promotion, recover,
}, },
@@ -78,12 +81,13 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
let shares = vectors let shares = vectors
.shares .shares
.iter() .iter()
.map(|secret| C::read_F(&mut Cursor::new(hex::decode(secret).unwrap())).unwrap()) .map(|secret| C::read_F::<&[u8]>(&mut hex::decode(secret).unwrap().as_ref()).unwrap())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let verification_shares = shares.iter().map(|secret| C::generator() * secret).collect::<Vec<_>>(); let verification_shares = shares.iter().map(|secret| C::generator() * secret).collect::<Vec<_>>();
let mut keys = HashMap::new(); let mut keys = HashMap::new();
for i in 1 ..= u16::try_from(shares.len()).unwrap() { for i in 1 ..= u16::try_from(shares.len()).unwrap() {
// Manually re-implement the serialization for FrostCore to import this data
let mut serialized = vec![]; let mut serialized = vec![];
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes()); serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
serialized.extend(C::ID); serialized.extend(C::ID);
@@ -95,7 +99,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
serialized.extend(share.to_bytes().as_ref()); serialized.extend(share.to_bytes().as_ref());
} }
let these_keys = FrostCore::<C>::deserialize(&mut Cursor::new(serialized)).unwrap(); let these_keys = FrostCore::<C>::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap();
assert_eq!(these_keys.params().t(), vectors.threshold); assert_eq!(these_keys.params().t(), vectors.threshold);
assert_eq!(usize::from(these_keys.params().n()), shares.len()); assert_eq!(usize::from(these_keys.params().n()), shares.len());
assert_eq!(these_keys.params().i(), i); assert_eq!(these_keys.params().i(), i);
@@ -118,8 +122,10 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
// Test against the vectors // Test against the vectors
let keys = vectors_to_multisig_keys::<C>(&vectors); let keys = vectors_to_multisig_keys::<C>(&vectors);
let group_key = C::read_G(&mut Cursor::new(hex::decode(&vectors.group_key).unwrap())).unwrap(); let group_key =
let secret = C::read_F(&mut Cursor::new(hex::decode(&vectors.group_secret).unwrap())).unwrap(); C::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap();
let secret =
C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap();
assert_eq!(C::generator() * secret, group_key); assert_eq!(C::generator() * secret, group_key);
assert_eq!(recover(&keys), secret); assert_eq!(recover(&keys), secret);
@@ -142,27 +148,36 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
.drain(..) .drain(..)
.map(|(i, machine)| { .map(|(i, machine)| {
let nonces = [ let nonces = [
C::read_F(&mut Cursor::new(hex::decode(&vectors.nonces[c][0]).unwrap())).unwrap(), C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][0]).unwrap().as_ref()).unwrap(),
C::read_F(&mut Cursor::new(hex::decode(&vectors.nonces[c][1]).unwrap())).unwrap(), C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][1]).unwrap().as_ref()).unwrap(),
]; ];
c += 1; c += 1;
let these_commitments = vec![[C::generator() * nonces[0], C::generator() * nonces[1]]]; let these_commitments = [C::generator() * nonces[0], C::generator() * nonces[1]];
let machine = machine.unsafe_override_preprocess(PreprocessPackage { let machine = machine.unsafe_override_preprocess(PreprocessData {
nonces: vec![nonces], nonces: vec![Nonce(nonces)],
commitments: vec![these_commitments.clone()], preprocess: Preprocess {
addendum: vec![], commitments: Commitments {
nonces: vec![NonceCommitments {
generators: vec![GeneratorCommitments(these_commitments)],
dleqs: None,
}],
},
addendum: (),
},
}); });
commitments.insert( commitments.insert(
*i, *i,
Cursor::new( machine
[ .read_preprocess::<&[u8]>(
these_commitments[0][0].to_bytes().as_ref(), &mut [
these_commitments[0][1].to_bytes().as_ref(), these_commitments[0].to_bytes().as_ref(),
] these_commitments[1].to_bytes().as_ref(),
.concat() ]
.to_vec(), .concat()
), .as_ref(),
)
.unwrap(),
); );
(i, machine) (i, machine)
}) })
@@ -176,10 +191,15 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
let (machine, share) = let (machine, share) =
machine.sign(clone_without(&commitments, i), &hex::decode(&vectors.msg).unwrap()).unwrap(); machine.sign(clone_without(&commitments, i), &hex::decode(&vectors.msg).unwrap()).unwrap();
let share = {
let mut buf = vec![];
share.write(&mut buf).unwrap();
buf
};
assert_eq!(share, hex::decode(&vectors.sig_shares[c]).unwrap()); assert_eq!(share, hex::decode(&vectors.sig_shares[c]).unwrap());
c += 1; c += 1;
shares.insert(*i, Cursor::new(share)); shares.insert(*i, machine.read_share::<&[u8]>(&mut share.as_ref()).unwrap());
(i, machine) (i, machine)
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();

View File

@@ -1,4 +1,4 @@
use std::{marker::Send, io::Cursor, collections::HashMap}; use std::{marker::Send, collections::HashMap};
use async_trait::async_trait; use async_trait::async_trait;
use thiserror::Error; use thiserror::Error;
@@ -18,7 +18,7 @@ pub enum NetworkError {}
#[async_trait] #[async_trait]
pub trait Network: Send { pub trait Network: Send {
async fn round(&mut self, data: Vec<u8>) -> Result<HashMap<u16, Cursor<Vec<u8>>>, NetworkError>; async fn round(&mut self, data: Vec<u8>) -> Result<HashMap<u16, Vec<u8>>, NetworkError>;
} }
#[derive(Clone, Error, Debug)] #[derive(Clone, Error, Debug)]

View File

@@ -1,5 +1,4 @@
use std::{ use std::{
io::Cursor,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
collections::HashMap, collections::HashMap,
}; };
@@ -19,7 +18,7 @@ struct LocalNetwork {
i: u16, i: u16,
size: u16, size: u16,
round: usize, round: usize,
rounds: Arc<RwLock<Vec<HashMap<u16, Cursor<Vec<u8>>>>>>, rounds: Arc<RwLock<Vec<HashMap<u16, Vec<u8>>>>>,
} }
impl LocalNetwork { impl LocalNetwork {
@@ -35,13 +34,13 @@ impl LocalNetwork {
#[async_trait] #[async_trait]
impl Network for LocalNetwork { impl Network for LocalNetwork {
async fn round(&mut self, data: Vec<u8>) -> Result<HashMap<u16, Cursor<Vec<u8>>>, NetworkError> { async fn round(&mut self, data: Vec<u8>) -> Result<HashMap<u16, Vec<u8>>, NetworkError> {
{ {
let mut rounds = self.rounds.write().unwrap(); let mut rounds = self.rounds.write().unwrap();
if rounds.len() == self.round { if rounds.len() == self.round {
rounds.push(HashMap::new()); rounds.push(HashMap::new());
} }
rounds[self.round].insert(self.i, Cursor::new(data)); rounds[self.round].insert(self.i, data);
} }
while { while {

View File

@@ -7,8 +7,8 @@ use group::GroupEncoding;
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use frost::{ use frost::{
curve::Curve, curve::Curve,
FrostKeys, FrostError, FrostKeys,
sign::{PreprocessMachine, SignMachine, SignatureMachine}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine},
}; };
use crate::{ use crate::{
@@ -343,10 +343,44 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
self.coin.attempt_send(prepared, &included).await.map_err(SignError::CoinError)?; self.coin.attempt_send(prepared, &included).await.map_err(SignError::CoinError)?;
let (attempt, commitments) = attempt.preprocess(&mut OsRng); let (attempt, commitments) = attempt.preprocess(&mut OsRng);
let commitments = network.round(commitments).await.map_err(SignError::NetworkError)?; let commitments = network
.round({
let mut buf = vec![];
commitments.write(&mut buf).unwrap();
buf
})
.await
.map_err(SignError::NetworkError)?
.drain()
.map(|(validator, preprocess)| {
Ok((
validator,
attempt
.read_preprocess::<&[u8]>(&mut preprocess.as_ref())
.map_err(|_| SignError::FrostError(FrostError::InvalidPreprocess(validator)))?,
))
})
.collect::<Result<HashMap<_, _>, _>>()?;
let (attempt, share) = attempt.sign(commitments, b"").map_err(SignError::FrostError)?; let (attempt, share) = attempt.sign(commitments, b"").map_err(SignError::FrostError)?;
let shares = network.round(share).await.map_err(SignError::NetworkError)?; let shares = network
.round({
let mut buf = vec![];
share.write(&mut buf).unwrap();
buf
})
.await
.map_err(SignError::NetworkError)?
.drain()
.map(|(validator, share)| {
Ok((
validator,
attempt
.read_share::<&[u8]>(&mut share.as_ref())
.map_err(|_| SignError::FrostError(FrostError::InvalidShare(validator)))?,
))
})
.collect::<Result<HashMap<_, _>, _>>()?;
let tx = attempt.complete(shares).map_err(SignError::FrostError)?; let tx = attempt.complete(shares).map_err(SignError::FrostError)?;