diff --git a/Cargo.lock b/Cargo.lock index edbeca40..69dbca9e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -43,7 +43,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" dependencies = [ "cfg-if", - "cipher", + "cipher 0.3.0", "cpufeatures", "opaque-debug 0.3.0", ] @@ -56,7 +56,7 @@ checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6" dependencies = [ "aead", "aes", - "cipher", + "cipher 0.3.0", "ctr", "ghash", "subtle", @@ -818,11 +818,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" dependencies = [ "cfg-if", - "cipher", + "cipher 0.3.0", "cpufeatures", "zeroize", ] +[[package]] +name = "chacha20" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fc89c7c5b9e7a02dfe45cd2367bae382f9ed31c61ca8debe5f827c420a2f08" +dependencies = [ + "cfg-if", + "cipher 0.4.3", + "cpufeatures", +] + [[package]] name = "chacha20poly1305" version = "0.9.1" @@ -830,8 +841,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18446b09be63d457bbec447509e85f662f32952b035ce892290396bc0b0cff5" dependencies = [ "aead", - "chacha20", - "cipher", + "chacha20 0.8.2", + "cipher 0.3.0", "poly1305", "zeroize", ] @@ -873,6 +884,36 @@ dependencies = [ "generic-array 0.14.6", ] +[[package]] +name = "cipher" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1873270f8f7942c191139cb8a40fd228da6c3fd2fc376d7e92d47aa14aeb59e" +dependencies = [ + "crypto-common", + "inout", + "zeroize", +] + +[[package]] +name = "ciphersuite" +version = "0.1.1" +dependencies = [ + "dalek-ff-group", + "digest 0.10.5", + "elliptic-curve", + "ff", + "group", + "k256", + "minimal-ed448", + "p256", + "rand_core 0.6.4", + "sha2 0.10.6", + "sha3", + "subtle", + "zeroize", +] + [[package]] name = "clang-sys" version = "1.4.0" @@ -1361,7 +1402,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" dependencies = [ - "cipher", + "cipher 0.3.0", ] [[package]] @@ -1612,9 +1653,29 @@ dependencies = [ "winapi", ] +[[package]] +name = "dkg" +version = "0.1.0" +dependencies = [ + "chacha20 0.9.0", + "ciphersuite", + "digest 0.10.5", + "dleq", + "flexible-transcript", + "group", + "hex", + "hkdf", + "multiexp", + "rand_core 0.6.4", + "schnorr-signatures", + "subtle", + "thiserror", + "zeroize", +] + [[package]] name = "dleq" -version = "0.1.1" +version = "0.1.2" dependencies = [ "blake2", "dalek-ff-group", @@ -2975,6 +3036,15 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0" +[[package]] +name = "hkdf" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +dependencies = [ + "hmac 0.12.1", +] + [[package]] name = "hmac" version = "0.8.1" @@ -3454,6 +3524,15 @@ dependencies = [ "synstructure", ] +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array 0.14.6", +] + [[package]] name = "instant" version = "0.1.12" @@ -4492,23 +4571,23 @@ dependencies = [ [[package]] name = "modular-frost" -version = "0.2.4" +version = "0.3.0" dependencies = [ + "chacha20 0.9.0", + "ciphersuite", "dalek-ff-group", + "digest 0.10.5", + "dkg", "dleq", - "elliptic-curve", - "ff", "flexible-transcript", "group", "hex", - "k256", + "hkdf", "minimal-ed448", "multiexp", - "p256", "rand_core 0.6.4", + "schnorr-signatures", "serde_json", - "sha2 0.10.6", - "sha3", "subtle", "thiserror", "zeroize", @@ -6389,7 +6468,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c0fbb5f676da676c260ba276a8f43a8dc67cf02d1438423aeb1c677a7212686" dependencies = [ - "cipher", + "cipher 0.3.0", ] [[package]] @@ -7260,6 +7339,18 @@ dependencies = [ "windows-sys 0.36.1", ] +[[package]] +name = "schnorr-signatures" +version = "0.1.0" +dependencies = [ + "ciphersuite", + "dalek-ff-group", + "group", + "multiexp", + "rand_core 0.6.4", + "zeroize", +] + [[package]] name = "schnorrkel" version = "0.9.1" diff --git a/Cargo.toml b/Cargo.toml index 85057ee7..952e20fd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,10 +4,13 @@ members = [ "crypto/dalek-ff-group", "crypto/ed448", + "crypto/ciphersuite", "crypto/multiexp", + "crypto/schnorr", "crypto/dleq", + "crypto/dkg", "crypto/frost", "coins/ethereum", diff --git a/coins/ethereum/Cargo.toml b/coins/ethereum/Cargo.toml index 25ca79b0..a1b9697b 100644 --- a/coins/ethereum/Cargo.toml +++ b/coins/ethereum/Cargo.toml @@ -24,7 +24,7 @@ sha3 = "0.10" group = "0.12" k256 = { version = "0.11", features = ["arithmetic", "keccak256", "ecdsa"] } -frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1"] } +frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] } eyre = "0.6" diff --git a/coins/monero/Cargo.toml b/coins/monero/Cargo.toml index ccc1d5de..00308153 100644 --- a/coins/monero/Cargo.toml +++ b/coins/monero/Cargo.toml @@ -34,7 +34,7 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" } multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] } transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.1", features = ["recommended"], optional = true } -frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.2", features = ["ed25519"], optional = true } +frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.3", features = ["ed25519"], optional = true } dleq = { path = "../../crypto/dleq", version = "0.1", features = ["serialize"], optional = true } monero-generators = { path = "generators", version = "0.1" } @@ -55,7 +55,7 @@ monero-generators = { path = "generators", version = "0.1" } [dev-dependencies] tokio = { version = "1", features = ["full"] } -frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.2", features = ["ed25519", "tests"] } +frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.3", features = ["ed25519", "tests"] } [features] multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"] diff --git a/coins/monero/src/frost.rs b/coins/monero/src/frost.rs deleted file mode 100644 index bd27ffdc..00000000 --- a/coins/monero/src/frost.rs +++ /dev/null @@ -1,73 +0,0 @@ -use std::io::Read; - -use thiserror::Error; -use rand_core::{RngCore, CryptoRng}; - -use zeroize::Zeroize; - -use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint}; - -use group::{Group, GroupEncoding}; - -use transcript::{Transcript, RecommendedTranscript}; -use dalek_ff_group as dfg; -use dleq::DLEqProof; - -#[derive(Clone, Error, Debug)] -pub(crate) enum MultisigError { - #[error("invalid discrete log equality proof")] - InvalidDLEqProof(u16), -} - -fn transcript() -> RecommendedTranscript { - RecommendedTranscript::new(b"monero_key_image_dleq") -} - -#[allow(non_snake_case)] -pub(crate) fn write_dleq( - rng: &mut R, - H: EdwardsPoint, - mut x: Scalar, -) -> Vec { - let mut res = Vec::with_capacity(64); - DLEqProof::prove( - rng, - // Doesn't take in a larger transcript object due to the usage of this - // Every prover would immediately write their own DLEq proof, when they can only do so in - // the proper order if they want to reach consensus - // It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to - // merge later in some form, when it should instead just merge xH (as it does) - &mut transcript(), - &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)], - dfg::Scalar(x), - ) - .serialize(&mut res) - .unwrap(); - x.zeroize(); - res -} - -#[allow(non_snake_case)] -pub(crate) fn read_dleq( - serialized: &mut Re, - H: EdwardsPoint, - l: u16, - xG: dfg::EdwardsPoint, -) -> Result { - let mut bytes = [0; 32]; - serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?; - // dfg ensures the point is torsion free - let xH = Option::::from(dfg::EdwardsPoint::from_bytes(&bytes)) - .ok_or(MultisigError::InvalidDLEqProof(l))?; - // Ensure this is a canonical point - if xH.to_bytes() != bytes { - Err(MultisigError::InvalidDLEqProof(l))?; - } - - DLEqProof::::deserialize(serialized) - .map_err(|_| MultisigError::InvalidDLEqProof(l))? - .verify(&mut transcript(), &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)], &[xG, xH]) - .map_err(|_| MultisigError::InvalidDLEqProof(l))?; - - Ok(xH) -} diff --git a/coins/monero/src/lib.rs b/coins/monero/src/lib.rs index 52e1a758..b8344bf9 100644 --- a/coins/monero/src/lib.rs +++ b/coins/monero/src/lib.rs @@ -33,9 +33,6 @@ use curve25519_dalek::{ pub use monero_generators::H; -#[cfg(feature = "multisig")] -pub(crate) mod frost; - mod serialize; /// RingCT structs and functionality. diff --git a/coins/monero/src/ringct/clsag/mod.rs b/coins/monero/src/ringct/clsag/mod.rs index 71fa2072..80a3e9b3 100644 --- a/coins/monero/src/ringct/clsag/mod.rs +++ b/coins/monero/src/ringct/clsag/mod.rs @@ -22,7 +22,7 @@ use crate::{ #[cfg(feature = "multisig")] mod multisig; #[cfg(feature = "multisig")] -pub use multisig::{ClsagDetails, ClsagMultisig}; +pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig}; lazy_static! { static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert(); diff --git a/coins/monero/src/ringct/clsag/multisig.rs b/coins/monero/src/ringct/clsag/multisig.rs index f9816b88..459a7073 100644 --- a/coins/monero/src/ringct/clsag/multisig.rs +++ b/coins/monero/src/ringct/clsag/multisig.rs @@ -1,6 +1,6 @@ use core::fmt::Debug; use std::{ - io::Read, + io::{self, Read, Write}, sync::{Arc, RwLock}, }; @@ -16,20 +16,26 @@ use curve25519_dalek::{ edwards::EdwardsPoint, }; -use group::Group; +use group::{Group, GroupEncoding}; use transcript::{Transcript, RecommendedTranscript}; -use frost::{curve::Ed25519, FrostError, FrostView, algorithm::Algorithm}; use dalek_ff_group as dfg; - -use crate::{ - frost::{write_dleq, read_dleq}, - ringct::{ - hash_to_point, - clsag::{ClsagInput, Clsag}, - }, +use dleq::DLEqProof; +use frost::{ + curve::Ed25519, + FrostError, ThresholdView, + algorithm::{WriteAddendum, Algorithm}, }; +use crate::ringct::{ + hash_to_point, + clsag::{ClsagInput, Clsag}, +}; + +fn dleq_transcript() -> RecommendedTranscript { + RecommendedTranscript::new(b"monero_key_image_dleq") +} + impl ClsagInput { fn transcript(&self, transcript: &mut T) { // Doesn't domain separate as this is considered part of the larger CLSAG proof @@ -54,7 +60,7 @@ impl ClsagInput { } } -/// CLSAG Input and the mask to use for it. +/// CLSAG input and the mask to use for it. #[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)] pub struct ClsagDetails { input: ClsagInput, @@ -67,6 +73,20 @@ impl ClsagDetails { } } +/// Addendum produced during the FROST signing process with relevant data. +#[derive(Clone, PartialEq, Eq, Zeroize, Debug)] +pub struct ClsagAddendum { + pub(crate) key_image: dfg::EdwardsPoint, + dleq: DLEqProof, +} + +impl WriteAddendum for ClsagAddendum { + fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(self.key_image.compress().to_bytes().as_ref())?; + self.dleq.serialize(writer) + } +} + #[allow(non_snake_case)] #[derive(Clone, PartialEq, Eq, Debug)] struct Interim { @@ -113,10 +133,6 @@ impl ClsagMultisig { } } - pub(crate) const fn serialized_len() -> usize { - 32 + (2 * 32) - } - fn input(&self) -> ClsagInput { (*self.details.read().unwrap()).as_ref().unwrap().input.clone() } @@ -128,6 +144,7 @@ impl ClsagMultisig { impl Algorithm for ClsagMultisig { type Transcript = RecommendedTranscript; + type Addendum = ClsagAddendum; type Signature = (Clsag, EdwardsPoint); fn nonces(&self) -> Vec> { @@ -137,19 +154,43 @@ impl Algorithm for ClsagMultisig { fn preprocess_addendum( &mut self, rng: &mut R, - view: &FrostView, - ) -> Vec { - let mut serialized = Vec::with_capacity(Self::serialized_len()); - serialized.extend((view.secret_share().0 * self.H).compress().to_bytes()); - serialized.extend(write_dleq(rng, self.H, view.secret_share().0)); - serialized + view: &ThresholdView, + ) -> ClsagAddendum { + ClsagAddendum { + key_image: dfg::EdwardsPoint(self.H * view.secret_share().0), + dleq: DLEqProof::prove( + rng, + // Doesn't take in a larger transcript object due to the usage of this + // Every prover would immediately write their own DLEq proof, when they can only do so in + // the proper order if they want to reach consensus + // It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to + // try to merge later in some form, when it should instead just merge xH (as it does) + &mut dleq_transcript(), + &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)], + dfg::Scalar(view.secret_share().0), + ), + } } - fn process_addendum( + fn read_addendum(&self, reader: &mut R) -> io::Result { + let mut bytes = [0; 32]; + reader.read_exact(&mut bytes)?; + // dfg ensures the point is torsion free + let xH = Option::::from(dfg::EdwardsPoint::from_bytes(&bytes)) + .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?; + // Ensure this is a canonical point + if xH.to_bytes() != bytes { + Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?; + } + + Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::::deserialize(reader)? }) + } + + fn process_addendum( &mut self, - view: &FrostView, + view: &ThresholdView, l: u16, - serialized: &mut Re, + addendum: ClsagAddendum, ) -> Result<(), FrostError> { if self.image.is_identity() { self.transcript.domain_separate(b"CLSAG"); @@ -158,11 +199,20 @@ impl Algorithm for ClsagMultisig { } self.transcript.append_message(b"participant", &l.to_be_bytes()); - let image = read_dleq(serialized, self.H, l, view.verification_share(l)) - .map_err(|_| FrostError::InvalidCommitment(l))? - .0; - self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref()); - self.image += image; + + addendum + .dleq + .verify( + &mut dleq_transcript(), + &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)], + &[view.verification_share(l), addendum.key_image], + ) + .map_err(|_| FrostError::InvalidPreprocess(l))?; + + self + .transcript + .append_message(b"key_image_share", addendum.key_image.compress().to_bytes().as_ref()); + self.image += addendum.key_image.0; Ok(()) } @@ -173,7 +223,7 @@ impl Algorithm for ClsagMultisig { fn sign_share( &mut self, - view: &FrostView, + view: &ThresholdView, nonce_sums: &[Vec], nonces: &[dfg::Scalar], msg: &[u8], diff --git a/coins/monero/src/tests/clsag.rs b/coins/monero/src/tests/clsag.rs index 0068073b..a89f6d3a 100644 --- a/coins/monero/src/tests/clsag.rs +++ b/coins/monero/src/tests/clsag.rs @@ -19,10 +19,7 @@ use crate::{ }, }; #[cfg(feature = "multisig")] -use crate::{ - frost::MultisigError, - ringct::clsag::{ClsagDetails, ClsagMultisig}, -}; +use crate::ringct::clsag::{ClsagDetails, ClsagMultisig}; #[cfg(feature = "multisig")] use frost::tests::{key_gen, algorithm_machines, sign}; @@ -79,7 +76,7 @@ fn clsag() { #[cfg(feature = "multisig")] #[test] -fn clsag_multisig() -> Result<(), MultisigError> { +fn clsag_multisig() { let keys = key_gen::<_, Ed25519>(&mut OsRng); let randomness = random_scalar(&mut OsRng); @@ -125,6 +122,4 @@ fn clsag_multisig() -> Result<(), MultisigError> { ), &[1; 32], ); - - Ok(()) } diff --git a/coins/monero/src/wallet/send/multisig.rs b/coins/monero/src/wallet/send/multisig.rs index e3205bed..12bd9d4d 100644 --- a/coins/monero/src/wallet/send/multisig.rs +++ b/coins/monero/src/wallet/send/multisig.rs @@ -1,5 +1,5 @@ use std::{ - io::{Read, Cursor}, + io::{self, Read}, sync::{Arc, RwLock}, collections::HashMap, }; @@ -7,26 +7,22 @@ use std::{ use rand_core::{RngCore, CryptoRng, SeedableRng}; use rand_chacha::ChaCha20Rng; -use curve25519_dalek::{ - traits::Identity, - scalar::Scalar, - edwards::{EdwardsPoint, CompressedEdwardsY}, -}; +use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint}; use transcript::{Transcript, RecommendedTranscript}; use frost::{ curve::Ed25519, - FrostError, FrostKeys, + FrostError, ThresholdKeys, sign::{ - PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, AlgorithmSignMachine, - AlgorithmSignatureMachine, + Writable, Preprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine, + AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine, }, }; use crate::{ random_scalar, ringct::{ - clsag::{ClsagInput, ClsagDetails, ClsagMultisig}, + clsag::{ClsagInput, ClsagDetails, ClsagAddendum, ClsagMultisig}, RctPrunable, }, transaction::{Input, Transaction}, @@ -58,7 +54,7 @@ pub struct TransactionSignMachine { inputs: Vec>>>, clsags: Vec>, - our_preprocess: Vec, + our_preprocess: Vec>, } pub struct TransactionSignatureMachine { @@ -72,7 +68,7 @@ impl SignableTransaction { pub async fn multisig( self, rpc: &Rpc, - keys: FrostKeys, + keys: ThresholdKeys, mut transcript: RecommendedTranscript, height: usize, mut included: Vec, @@ -166,28 +162,26 @@ impl SignableTransaction { } impl PreprocessMachine for TransactionMachine { + type Preprocess = Vec>; type Signature = Transaction; type SignMachine = TransactionSignMachine; fn preprocess( mut self, rng: &mut R, - ) -> (TransactionSignMachine, Vec) { + ) -> (TransactionSignMachine, Self::Preprocess) { // Iterate over each CLSAG calling preprocess - let mut serialized = Vec::with_capacity( - // D_{G, H}, E_{G, H}, DLEqs, key image addendum - self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len()), - ); + let mut preprocesses = Vec::with_capacity(self.clsags.len()); let clsags = self .clsags .drain(..) .map(|clsag| { let (clsag, preprocess) = clsag.preprocess(rng); - serialized.extend(&preprocess); + preprocesses.push(preprocess); clsag }) .collect(); - let our_preprocess = serialized.clone(); + let our_preprocess = preprocesses.clone(); // We could add further entropy here, and previous versions of this library did so // As of right now, the multisig's key, the inputs being spent, and the FROST data itself @@ -212,33 +206,35 @@ impl PreprocessMachine for TransactionMachine { our_preprocess, }, - serialized, + preprocesses, ) } } impl SignMachine for TransactionSignMachine { + type Preprocess = Vec>; + type SignatureShare = Vec>; type SignatureMachine = TransactionSignatureMachine; - fn sign( + fn read_preprocess(&self, reader: &mut R) -> io::Result { + self.clsags.iter().map(|clsag| clsag.read_preprocess(reader)).collect() + } + + fn sign( mut self, - mut commitments: HashMap, + mut commitments: HashMap, msg: &[u8], - ) -> Result<(TransactionSignatureMachine, Vec), FrostError> { + ) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> { if !msg.is_empty() { Err(FrostError::InternalError( "message was passed to the TransactionMachine when it generates its own", ))?; } - // FROST commitments and their DLEqs, and the image and its DLEq - const CLSAG_LEN: usize = (2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len(); - // Convert the unified commitments to a Vec of the individual commitments let mut images = vec![EdwardsPoint::identity(); self.clsags.len()]; let mut commitments = (0 .. self.clsags.len()) .map(|c| { - let mut buf = [0; CLSAG_LEN]; self .included .iter() @@ -248,31 +244,27 @@ impl SignMachine for TransactionSignMachine { // transcripts cloned from this TX's initial premise's transcript. For our TX // transcript to have the CLSAG data for entropy, it'll have to be added ourselves here self.transcript.append_message(b"participant", &(*l).to_be_bytes()); - if *l == self.i { - buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice()); + + let preprocess = if *l == self.i { + self.our_preprocess[c].clone() } else { - commitments - .get_mut(l) - .ok_or(FrostError::MissingParticipant(*l))? - .read_exact(&mut buf) - .map_err(|_| FrostError::InvalidCommitment(*l))?; + commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone() + }; + + { + let mut buf = vec![]; + preprocess.write(&mut buf).unwrap(); + self.transcript.append_message(b"preprocess", &buf); } - self.transcript.append_message(b"preprocess", &buf); // While here, calculate the key image // Clsag will parse/calculate/validate this as needed, yet doing so here as well // provides the easiest API overall, as this is where the TX is (which needs the key // images in its message), along with where the outputs are determined (where our // outputs may need these in order to guarantee uniqueness) - images[c] += CompressedEdwardsY( - buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)] - .try_into() - .map_err(|_| FrostError::InvalidCommitment(*l))?, - ) - .decompress() - .ok_or(FrostError::InvalidCommitment(*l))?; + images[c] += preprocess.addendum.key_image.0; - Ok((*l, Cursor::new(buf))) + Ok((*l, preprocess)) }) .collect::, _>>() }) @@ -346,37 +338,39 @@ impl SignMachine for TransactionSignMachine { let msg = tx.signature_hash(); // Iterate over each CLSAG calling sign - let mut serialized = Vec::with_capacity(self.clsags.len() * 32); + let mut shares = Vec::with_capacity(self.clsags.len()); let clsags = self .clsags .drain(..) .map(|clsag| { let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?; - serialized.extend(&share); + shares.push(share); Ok(clsag) }) .collect::>()?; - Ok((TransactionSignatureMachine { tx, clsags }, serialized)) + Ok((TransactionSignatureMachine { tx, clsags }, shares)) } } impl SignatureMachine for TransactionSignatureMachine { - fn complete(self, mut shares: HashMap) -> Result { + type SignatureShare = Vec>; + + fn read_share(&self, reader: &mut R) -> io::Result { + self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect() + } + + fn complete( + mut self, + shares: HashMap, + ) -> Result { let mut tx = self.tx; match tx.rct_signatures.prunable { RctPrunable::Null => panic!("Signing for RctPrunable::Null"), RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => { - for clsag in self.clsags { + for (c, clsag) in self.clsags.drain(..).enumerate() { let (clsag, pseudo_out) = clsag.complete( - shares - .iter_mut() - .map(|(l, shares)| { - let mut buf = [0; 32]; - shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?; - Ok((*l, Cursor::new(buf))) - }) - .collect::, _>>()?, + shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::>(), )?; clsags.push(clsag); pseudo_outs.push(pseudo_out); diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml new file mode 100644 index 00000000..57c8f92d --- /dev/null +++ b/crypto/ciphersuite/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "ciphersuite" +version = "0.1.1" +description = "Ciphersuites built around ff/group" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite" +authors = ["Luke Parker "] +keywords = ["ciphersuite", "ff", "group"] +edition = "2021" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[dependencies] +rand_core = "0.6" + +zeroize = { version = "1.5", features = ["zeroize_derive"] } +subtle = "2" + +digest = "0.10" +sha2 = { version = "0.10", optional = true } +sha3 = { version = "0.10", optional = true } + +ff = { version = "0.12", features = ["bits"] } +group = "0.12" + +dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true } + +elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true } +p256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true } +k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true } + +minimal-ed448 = { path = "../ed448", version = "0.1", optional = true } + +[features] +std = [] + +dalek = ["sha2", "dalek-ff-group"] +ed25519 = ["dalek"] +ristretto = ["dalek"] + +kp256 = ["sha2", "elliptic-curve"] +p256 = ["kp256", "dep:p256"] +secp256k1 = ["kp256", "k256"] + +ed448 = ["sha3", "minimal-ed448"] + +default = ["std"] diff --git a/crypto/ciphersuite/LICENSE b/crypto/ciphersuite/LICENSE new file mode 100644 index 00000000..c0617e57 --- /dev/null +++ b/crypto/ciphersuite/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2022 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/ciphersuite/README.md b/crypto/ciphersuite/README.md new file mode 100644 index 00000000..db85fda2 --- /dev/null +++ b/crypto/ciphersuite/README.md @@ -0,0 +1,3 @@ +# Ciphersuite + +Ciphersuites for elliptic curves premised on ff/group. diff --git a/crypto/ciphersuite/src/dalek.rs b/crypto/ciphersuite/src/dalek.rs new file mode 100644 index 00000000..48b968eb --- /dev/null +++ b/crypto/ciphersuite/src/dalek.rs @@ -0,0 +1,44 @@ +use zeroize::Zeroize; + +use sha2::{Digest, Sha512}; + +use group::Group; +use dalek_ff_group::Scalar; + +use crate::Ciphersuite; + +macro_rules! dalek_curve { + ( + $feature: literal, + + $Ciphersuite: ident, + $Point: ident, + $ID: literal + ) => { + use dalek_ff_group::$Point; + + #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] + pub struct $Ciphersuite; + impl Ciphersuite for $Ciphersuite { + type F = Scalar; + type G = $Point; + type H = Sha512; + + const ID: &'static [u8] = $ID; + + fn generator() -> Self::G { + $Point::generator() + } + + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { + Scalar::from_hash(Sha512::new_with_prefix(&[dst, data].concat())) + } + } + }; +} + +#[cfg(any(test, feature = "ristretto"))] +dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto"); + +#[cfg(feature = "ed25519")] +dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519"); diff --git a/crypto/ciphersuite/src/ed448.rs b/crypto/ciphersuite/src/ed448.rs new file mode 100644 index 00000000..d5075b9e --- /dev/null +++ b/crypto/ciphersuite/src/ed448.rs @@ -0,0 +1,67 @@ +use zeroize::Zeroize; + +use digest::{ + typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput, + ExtendableOutput, XofReader, HashMarker, Digest, +}; +use sha3::Shake256; + +use group::Group; +use minimal_ed448::{scalar::Scalar, point::Point}; + +use crate::Ciphersuite; + +// Re-define Shake256 as a traditional Digest to meet API expectations +#[derive(Clone, Default)] +pub struct Shake256_114(Shake256); +impl BlockSizeUser for Shake256_114 { + type BlockSize = ::BlockSize; + fn block_size() -> usize { + Shake256::block_size() + } +} +impl OutputSizeUser for Shake256_114 { + type OutputSize = U114; + fn output_size() -> usize { + 114 + } +} +impl Update for Shake256_114 { + fn update(&mut self, data: &[u8]) { + self.0.update(data); + } + fn chain(mut self, data: impl AsRef<[u8]>) -> Self { + Update::update(&mut self, data.as_ref()); + self + } +} +impl FixedOutput for Shake256_114 { + fn finalize_fixed(self) -> Output { + let mut res = Default::default(); + FixedOutput::finalize_into(self, &mut res); + res + } + fn finalize_into(self, out: &mut Output) { + let mut reader = self.0.finalize_xof(); + reader.read(out); + } +} +impl HashMarker for Shake256_114 {} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct Ed448; +impl Ciphersuite for Ed448 { + type F = Scalar; + type G = Point; + type H = Shake256_114; + + const ID: &'static [u8] = b"ed448"; + + fn generator() -> Self::G { + Point::generator() + } + + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { + Scalar::wide_reduce(Self::H::digest(&[dst, data].concat()).as_ref().try_into().unwrap()) + } +} diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs new file mode 100644 index 00000000..26b16bc9 --- /dev/null +++ b/crypto/ciphersuite/src/kp256.rs @@ -0,0 +1,72 @@ +use zeroize::Zeroize; + +use sha2::{Digest, Sha256}; + +use group::ff::{Field, PrimeField}; + +use elliptic_curve::{ + generic_array::GenericArray, + bigint::{Encoding, U384}, + hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}, +}; + +use crate::Ciphersuite; + +macro_rules! kp_curve { + ( + $feature: literal, + $lib: ident, + + $Ciphersuite: ident, + $ID: literal + ) => { + #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] + pub struct $Ciphersuite; + impl Ciphersuite for $Ciphersuite { + type F = $lib::Scalar; + type G = $lib::ProjectivePoint; + type H = Sha256; + + const ID: &'static [u8] = $ID; + + fn generator() -> Self::G { + $lib::ProjectivePoint::GENERATOR + } + + fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { + let mut dst = dst; + let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat()); + if dst.len() > 255 { + dst = oversize.as_ref(); + } + + // While one of these two libraries does support directly hashing to the Scalar field, the + // other doesn't. While that's probably an oversight, this is a universally working method + let mut modulus = [0; 48]; + modulus[16 ..].copy_from_slice(&(Self::F::zero() - Self::F::one()).to_bytes()); + let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE); + + let mut unreduced = U384::from_be_bytes({ + let mut bytes = [0; 48]; + ExpandMsgXmd::::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes); + bytes + }) + .reduce(&modulus) + .unwrap() + .to_be_bytes(); + + let mut array = *GenericArray::from_slice(&unreduced[16 ..]); + let res = $lib::Scalar::from_repr(array).unwrap(); + unreduced.zeroize(); + array.zeroize(); + res + } + } + }; +} + +#[cfg(feature = "p256")] +kp_curve!("p256", p256, P256, b"P-256"); + +#[cfg(feature = "secp256k1")] +kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1"); diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs new file mode 100644 index 00000000..68680d66 --- /dev/null +++ b/crypto/ciphersuite/src/lib.rs @@ -0,0 +1,106 @@ +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +use core::fmt::Debug; +#[cfg(feature = "std")] +use std::io::{self, Read}; + +use rand_core::{RngCore, CryptoRng}; + +use zeroize::Zeroize; +use subtle::ConstantTimeEq; + +use digest::{core_api::BlockSizeUser, Digest}; + +use group::{ + ff::{Field, PrimeField, PrimeFieldBits}, + Group, GroupOps, + prime::PrimeGroup, +}; +#[cfg(feature = "std")] +use group::GroupEncoding; + +#[cfg(feature = "dalek")] +mod dalek; +#[cfg(feature = "ristretto")] +pub use dalek::Ristretto; +#[cfg(feature = "ed25519")] +pub use dalek::Ed25519; + +#[cfg(feature = "kp256")] +mod kp256; +#[cfg(feature = "secp256k1")] +pub use kp256::Secp256k1; +#[cfg(feature = "p256")] +pub use kp256::P256; + +#[cfg(feature = "ed448")] +mod ed448; +#[cfg(feature = "ed448")] +pub use ed448::*; + +/// Unified trait defining a ciphersuite around an elliptic curve. +pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize { + /// Scalar field element type. + // This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses + type F: PrimeField + PrimeFieldBits + Zeroize; + /// Group element type. + type G: Group + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq; + /// Hash algorithm used with this curve. + // Requires BlockSizeUser so it can be used within Hkdf which requies that. + type H: Clone + BlockSizeUser + Digest; + + /// ID for this curve. + const ID: &'static [u8]; + + /// Generator for the group. + // While group does provide this in its API, privacy coins may want to use a custom basepoint + fn generator() -> Self::G; + + /// Hash the provided dst and message to a scalar. + #[allow(non_snake_case)] + fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F; + + /// Generate a random non-zero scalar. + #[allow(non_snake_case)] + fn random_nonzero_F(rng: &mut R) -> Self::F { + let mut res; + while { + res = Self::F::random(&mut *rng); + res.ct_eq(&Self::F::zero()).into() + } {} + res + } + + /// Read a canonical scalar from something implementing std::io::Read. + #[cfg(feature = "std")] + #[allow(non_snake_case)] + fn read_F(reader: &mut R) -> io::Result { + let mut encoding = ::Repr::default(); + reader.read_exact(encoding.as_mut())?; + + // ff mandates this is canonical + let res = Option::::from(Self::F::from_repr(encoding)) + .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar")); + for b in encoding.as_mut() { + b.zeroize(); + } + res + } + + /// Read a canonical point from something implementing std::io::Read. + #[cfg(feature = "std")] + #[allow(non_snake_case)] + fn read_G(reader: &mut R) -> io::Result { + let mut encoding = ::Repr::default(); + reader.read_exact(encoding.as_mut())?; + + let point = Option::::from(Self::G::from_bytes(&encoding)) + .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?; + if point.to_bytes().as_ref() != encoding.as_ref() { + Err(io::Error::new(io::ErrorKind::Other, "non-canonical point"))?; + } + Ok(point) + } +} diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml new file mode 100644 index 00000000..5e66f467 --- /dev/null +++ b/crypto/dkg/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "dkg" +version = "0.1.0" +description = "Distributed key generation over ff/group" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[dependencies] +thiserror = "1" + +rand_core = "0.6" + +zeroize = { version = "1.5", features = ["zeroize_derive"] } +subtle = "2" + +hex = "0.4" + +digest = "0.10" + +hkdf = "0.12" +chacha20 = { version = "0.9", features = ["zeroize"] } + +group = "0.12" + +ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } + +transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"], version = "^0.1.3" } + +multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } + +schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.1.0" } +dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] } + +[features] +tests = [] diff --git a/crypto/dkg/LICENSE b/crypto/dkg/LICENSE new file mode 100644 index 00000000..c0617e57 --- /dev/null +++ b/crypto/dkg/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2022 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/README.md b/crypto/dkg/README.md new file mode 100644 index 00000000..73fb48c4 --- /dev/null +++ b/crypto/dkg/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation + +A collection of implementations of various distributed key generation protocols. + +All included protocols resolve into the provided `Threshold` types, intended to +enable their modularity. + +Additional utilities around them, such as promotion from one generator to +another, are also provided. + +Currently included is the two-round protocol from the +[FROST paper](https://eprint.iacr.org/2020/852). diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs new file mode 100644 index 00000000..9cd6ae2a --- /dev/null +++ b/crypto/dkg/src/frost.rs @@ -0,0 +1,458 @@ +use std::{ + marker::PhantomData, + io::{self, Read, Write}, + collections::HashMap, +}; + +use rand_core::{RngCore, CryptoRng}; + +use zeroize::{Zeroize, ZeroizeOnDrop}; + +use digest::Digest; +use hkdf::{Hkdf, hmac::SimpleHmac}; +use chacha20::{ + cipher::{crypto_common::KeyIvInit, StreamCipher}, + Key as Cc20Key, Nonce as Cc20Iv, ChaCha20, +}; + +use group::{ + ff::{Field, PrimeField}, + GroupEncoding, +}; + +use ciphersuite::Ciphersuite; + +use multiexp::{multiexp_vartime, BatchVerifier}; + +use schnorr::SchnorrSignature; + +use crate::{DkgError, ThresholdParams, ThresholdCore, validate_map}; + +#[allow(non_snake_case)] +fn challenge(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F { + const DST: &[u8] = b"FROST Schnorr Proof of Knowledge"; + + // Hashes the context to get a fixed size value out of it + let mut transcript = C::H::digest(context.as_bytes()).as_ref().to_vec(); + transcript.extend(l.to_be_bytes()); + transcript.extend(R); + transcript.extend(Am); + C::hash_to_F(DST, &transcript) +} + +/// Commitments message to be broadcast to all other parties. +#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] +pub struct Commitments { + commitments: Vec, + enc_key: C::G, + cached_msg: Vec, + sig: SchnorrSignature, +} +impl Drop for Commitments { + fn drop(&mut self) { + self.zeroize(); + } +} +impl ZeroizeOnDrop for Commitments {} + +impl Commitments { + pub fn read(reader: &mut R, params: ThresholdParams) -> io::Result { + let mut commitments = Vec::with_capacity(params.t().into()); + let mut cached_msg = vec![]; + + #[allow(non_snake_case)] + let mut read_G = || -> io::Result { + let mut buf = ::Repr::default(); + reader.read_exact(buf.as_mut())?; + let point = C::read_G(&mut buf.as_ref())?; + cached_msg.extend(buf.as_ref()); + Ok(point) + }; + + for _ in 0 .. params.t() { + commitments.push(read_G()?); + } + let enc_key = read_G()?; + + Ok(Commitments { commitments, enc_key, cached_msg, sig: SchnorrSignature::read(reader)? }) + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&self.cached_msg)?; + self.sig.write(writer) + } + + pub fn serialize(&self) -> Vec { + let mut buf = vec![]; + self.write(&mut buf).unwrap(); + buf + } +} + +/// State machine to begin the key generation protocol. +pub struct KeyGenMachine { + params: ThresholdParams, + context: String, + _curve: PhantomData, +} + +impl KeyGenMachine { + /// Creates a new machine to generate a key for the specified curve in the specified multisig. + // The context string should be unique among multisigs. + pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine { + KeyGenMachine { params, context, _curve: PhantomData } + } + + /// Start generating a key according to the FROST DKG spec. + /// Returns a commitments message to be sent to all parties over an authenticated channel. If any + /// party submits multiple sets of commitments, they MUST be treated as malicious. + pub fn generate_coefficients( + self, + rng: &mut R, + ) -> (SecretShareMachine, Commitments) { + let t = usize::from(self.params.t); + let mut coefficients = Vec::with_capacity(t); + let mut commitments = Vec::with_capacity(t); + let mut cached_msg = vec![]; + + for i in 0 .. t { + // Step 1: Generate t random values to form a polynomial with + coefficients.push(C::random_nonzero_F(&mut *rng)); + // Step 3: Generate public commitments + commitments.push(C::generator() * coefficients[i]); + cached_msg.extend(commitments[i].to_bytes().as_ref()); + } + + // Generate an encryption key for transmitting the secret shares + // It would probably be perfectly fine to use one of our polynomial elements, yet doing so + // puts the integrity of FROST at risk. While there's almost no way it could, as it's used in + // an ECDH with validated group elemnents, better to avoid any questions on it + let enc_key = C::random_nonzero_F(&mut *rng); + let pub_enc_key = C::generator() * enc_key; + cached_msg.extend(pub_enc_key.to_bytes().as_ref()); + + // Step 2: Provide a proof of knowledge + let mut r = C::random_nonzero_F(rng); + let sig = SchnorrSignature::::sign( + coefficients[0], + // This could be deterministic as the PoK is a singleton never opened up to cooperative + // discussion + // There's no reason to spend the time and effort to make this deterministic besides a + // general obsession with canonicity and determinism though + r, + challenge::( + &self.context, + self.params.i(), + (C::generator() * r).to_bytes().as_ref(), + &cached_msg, + ), + ); + r.zeroize(); + + // Step 4: Broadcast + ( + SecretShareMachine { + params: self.params, + context: self.context, + coefficients, + our_commitments: commitments.clone(), + enc_key, + }, + Commitments { commitments, enc_key: pub_enc_key, cached_msg, sig }, + ) + } +} + +fn polynomial(coefficients: &[F], l: u16) -> F { + let l = F::from(u64::from(l)); + let mut share = F::zero(); + for (idx, coefficient) in coefficients.iter().rev().enumerate() { + share += coefficient; + if idx != (coefficients.len() - 1) { + share *= l; + } + } + share +} + +/// Secret share to be sent to the party it's intended for over an authenticated channel. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct SecretShare(F::Repr); +impl Zeroize for SecretShare { + fn zeroize(&mut self) { + self.0.as_mut().zeroize() + } +} +impl Drop for SecretShare { + fn drop(&mut self) { + self.zeroize(); + } +} +impl ZeroizeOnDrop for SecretShare {} + +impl SecretShare { + pub fn read(reader: &mut R) -> io::Result { + let mut repr = F::Repr::default(); + reader.read_exact(repr.as_mut())?; + Ok(SecretShare(repr)) + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(self.0.as_ref()) + } + + pub fn serialize(&self) -> Vec { + let mut buf = vec![]; + self.write(&mut buf).unwrap(); + buf + } +} + +fn create_ciphers( + mut sender: ::Repr, + receiver: &mut ::Repr, + ecdh: &mut ::Repr, +) -> (ChaCha20, ChaCha20) { + let directional = |sender: &mut ::Repr| { + let mut key = Cc20Key::default(); + key.copy_from_slice( + &Hkdf::>::extract( + Some(b"key"), + &[sender.as_ref(), ecdh.as_ref()].concat(), + ) + .0 + .as_ref()[.. 32], + ); + let mut iv = Cc20Iv::default(); + iv.copy_from_slice( + &Hkdf::>::extract( + Some(b"iv"), + &[sender.as_ref(), ecdh.as_ref()].concat(), + ) + .0 + .as_ref()[.. 12], + ); + sender.as_mut().zeroize(); + + let res = ChaCha20::new(&key, &iv); + >::as_mut(&mut key).zeroize(); + >::as_mut(&mut iv).zeroize(); + res + }; + + let res = (directional(&mut sender), directional(receiver)); + ecdh.as_mut().zeroize(); + res +} + +/// Advancement of the key generation state machine. +#[derive(Zeroize)] +pub struct SecretShareMachine { + params: ThresholdParams, + context: String, + coefficients: Vec, + our_commitments: Vec, + enc_key: C::F, +} +impl Drop for SecretShareMachine { + fn drop(&mut self) { + self.zeroize() + } +} +impl ZeroizeOnDrop for SecretShareMachine {} + +impl SecretShareMachine { + /// Verify the data from the previous round (canonicity, PoKs, message authenticity) + fn verify_r1( + &mut self, + rng: &mut R, + mut commitments: HashMap>, + ) -> Result<(HashMap>, HashMap), DkgError> { + validate_map(&commitments, &(1 ..= self.params.n()).collect::>(), self.params.i())?; + + let mut enc_keys = HashMap::new(); + let mut batch = BatchVerifier::::new(commitments.len()); + let mut commitments = commitments + .drain() + .map(|(l, mut msg)| { + enc_keys.insert(l, msg.enc_key); + msg.enc_key.zeroize(); + + // Step 5: Validate each proof of knowledge + // This is solely the prep step for the latter batch verification + msg.sig.batch_verify( + rng, + &mut batch, + l, + msg.commitments[0], + challenge::(&self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg), + ); + + (l, msg.commitments.drain(..).collect::>()) + }) + .collect::>(); + + batch.verify_with_vartime_blame().map_err(DkgError::InvalidProofOfKnowledge)?; + + commitments.insert(self.params.i, self.our_commitments.drain(..).collect()); + Ok((commitments, enc_keys)) + } + + /// Continue generating a key. + /// Takes in everyone else's commitments. Returns a HashMap of secret shares to be sent over + /// authenticated channels to their relevant counterparties. + pub fn generate_secret_shares( + mut self, + rng: &mut R, + commitments: HashMap>, + ) -> Result<(KeyMachine, HashMap>), DkgError> { + let (commitments, mut enc_keys) = self.verify_r1(&mut *rng, commitments)?; + + // Step 1: Generate secret shares for all other parties + let mut sender = (C::generator() * self.enc_key).to_bytes(); + let mut ciphers = HashMap::new(); + let mut res = HashMap::new(); + for l in 1 ..= self.params.n() { + // Don't insert our own shares to the byte buffer which is meant to be sent around + // An app developer could accidentally send it. Best to keep this black boxed + if l == self.params.i() { + continue; + } + + let (mut cipher_send, cipher_recv) = { + let receiver = enc_keys.get_mut(&l).unwrap(); + let mut ecdh = (*receiver * self.enc_key).to_bytes(); + + create_ciphers::(sender, &mut receiver.to_bytes(), &mut ecdh) + }; + + let mut share = polynomial(&self.coefficients, l); + let mut share_bytes = share.to_repr(); + share.zeroize(); + + cipher_send.apply_keystream(share_bytes.as_mut()); + drop(cipher_send); + + ciphers.insert(l, cipher_recv); + res.insert(l, SecretShare::(share_bytes)); + share_bytes.as_mut().zeroize(); + } + self.enc_key.zeroize(); + sender.as_mut().zeroize(); + + // Calculate our own share + let share = polynomial(&self.coefficients, self.params.i()); + + self.coefficients.zeroize(); + + Ok((KeyMachine { params: self.params, secret: share, commitments, ciphers }, res)) + } +} + +/// Final step of the key generation protocol. +pub struct KeyMachine { + params: ThresholdParams, + secret: C::F, + ciphers: HashMap, + commitments: HashMap>, +} +impl Zeroize for KeyMachine { + fn zeroize(&mut self) { + self.params.zeroize(); + self.secret.zeroize(); + + // cipher implements ZeroizeOnDrop and zeroizes on drop, yet doesn't implement Zeroize + // The following is redundant, as Rust should automatically handle dropping it, yet it shows + // awareness of this quirk and at least attempts to be comprehensive + for (_, cipher) in self.ciphers.drain() { + drop(cipher); + } + + for (_, commitments) in self.commitments.iter_mut() { + commitments.zeroize(); + } + } +} +impl Drop for KeyMachine { + fn drop(&mut self) { + self.zeroize() + } +} +impl ZeroizeOnDrop for KeyMachine {} + +impl KeyMachine { + /// Complete key generation. + /// Takes in everyone elses' shares submitted to us. Returns a ThresholdCore object representing + /// the generated keys. Successful protocol completion MUST be confirmed by all parties before + /// these keys may be safely used. + pub fn complete( + mut self, + rng: &mut R, + mut shares: HashMap>, + ) -> Result, DkgError> { + let mut secret_share = self.secret; + self.secret.zeroize(); + + validate_map(&shares, &(1 ..= self.params.n()).collect::>(), self.params.i())?; + + // Calculate the exponent for a given participant and apply it to a series of commitments + // Initially used with the actual commitments to verify the secret share, later used with + // stripes to generate the verification shares + let exponential = |i: u16, values: &[_]| { + let i = C::F::from(i.into()); + let mut res = Vec::with_capacity(self.params.t().into()); + (0 .. usize::from(self.params.t())).into_iter().fold(C::F::one(), |exp, l| { + res.push((exp, values[l])); + exp * i + }); + res + }; + + let mut batch = BatchVerifier::new(shares.len()); + for (l, mut share_bytes) in shares.drain() { + let mut cipher = self.ciphers.remove(&l).unwrap(); + cipher.apply_keystream(share_bytes.0.as_mut()); + drop(cipher); + + let mut share: C::F = + Option::from(C::F::from_repr(share_bytes.0)).ok_or(DkgError::InvalidShare(l))?; + share_bytes.zeroize(); + secret_share += share; + + // This can be insecurely linearized from n * t to just n using the below sums for a given + // stripe. Doing so uses naive addition which is subject to malleability. The only way to + // ensure that malleability isn't present is to use this n * t algorithm, which runs + // per sender and not as an aggregate of all senders, which also enables blame + let mut values = exponential(self.params.i, &self.commitments[&l]); + values.push((-share, C::generator())); + share.zeroize(); + + batch.queue(rng, l, values); + } + batch.verify_with_vartime_blame().map_err(DkgError::InvalidShare)?; + + // Stripe commitments per t and sum them in advance. Calculating verification shares relies on + // these sums so preprocessing them is a massive speedup + // If these weren't just sums, yet the tables used in multiexp, this would be further optimized + // As of right now, each multiexp will regenerate them + let mut stripes = Vec::with_capacity(usize::from(self.params.t())); + for t in 0 .. usize::from(self.params.t()) { + stripes.push(self.commitments.values().map(|commitments| commitments[t]).sum()); + } + + // Calculate each user's verification share + let mut verification_shares = HashMap::new(); + for i in 1 ..= self.params.n() { + verification_shares.insert(i, multiexp_vartime(&exponential(i, &stripes))); + } + // Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t) + debug_assert_eq!(C::generator() * secret_share, verification_shares[&self.params.i()]); + + Ok(ThresholdCore { + params: self.params, + secret_share, + group_key: stripes[0], + verification_shares, + }) + } +} diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs new file mode 100644 index 00000000..487513f4 --- /dev/null +++ b/crypto/dkg/src/lib.rs @@ -0,0 +1,399 @@ +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +//! A collection of implementations of various distributed key generation protocols. +//! They all resolve into the provided Threshold types intended to enable their modularity. +//! Additional utilities around them, such as promotion from one generator to another, are also +//! provided. + +use core::fmt::Debug; +use std::{io::Read, sync::Arc, collections::HashMap}; + +use thiserror::Error; + +use zeroize::{Zeroize, ZeroizeOnDrop}; + +use group::{ + ff::{Field, PrimeField}, + GroupEncoding, +}; + +use ciphersuite::Ciphersuite; + +/// The distributed key generation protocol described in the +/// [FROST paper](https://eprint.iacr.org/2020/852). +pub mod frost; + +/// Promote keys between ciphersuites. +pub mod promote; + +/// Tests for application-provided curves and algorithms. +#[cfg(any(test, feature = "tests"))] +pub mod tests; + +// Validate a map of values to have the expected included participants +pub(crate) fn validate_map( + map: &HashMap, + included: &[u16], + ours: u16, +) -> Result<(), DkgError> { + if (map.len() + 1) != included.len() { + Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?; + } + + for included in included { + if *included == ours { + if map.contains_key(included) { + Err(DkgError::DuplicatedIndex(*included))?; + } + continue; + } + + if !map.contains_key(included) { + Err(DkgError::MissingParticipant(*included))?; + } + } + + Ok(()) +} + +/// Parameters for a multisig. +// These fields should not be made public as they should be static +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct ThresholdParams { + /// Participants needed to sign on behalf of the group. + t: u16, + /// Amount of participants. + n: u16, + /// Index of the participant being acted for. + i: u16, +} + +impl ThresholdParams { + pub fn new(t: u16, n: u16, i: u16) -> Result { + if (t == 0) || (n == 0) { + Err(DkgError::ZeroParameter(t, n))?; + } + + // When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason), + // but it's not invalid to do so + if t > n { + Err(DkgError::InvalidRequiredQuantity(t, n))?; + } + if (i == 0) || (i > n) { + Err(DkgError::InvalidParticipantIndex(n, i))?; + } + + Ok(ThresholdParams { t, n, i }) + } + + pub fn t(&self) -> u16 { + self.t + } + pub fn n(&self) -> u16 { + self.n + } + pub fn i(&self) -> u16 { + self.i + } +} + +/// Various errors possible during key generation/signing. +#[derive(Copy, Clone, Error, Debug)] +pub enum DkgError { + #[error("a parameter was 0 (required {0}, participants {1})")] + ZeroParameter(u16, u16), + #[error("invalid amount of required participants (max {1}, got {0})")] + InvalidRequiredQuantity(u16, u16), + #[error("invalid participant index (0 < index <= {0}, yet index is {1})")] + InvalidParticipantIndex(u16, u16), + + #[error("invalid signing set")] + InvalidSigningSet, + #[error("invalid participant quantity (expected {0}, got {1})")] + InvalidParticipantQuantity(usize, usize), + #[error("duplicated participant index ({0})")] + DuplicatedIndex(u16), + #[error("missing participant {0}")] + MissingParticipant(u16), + + #[error("invalid proof of knowledge (participant {0})")] + InvalidProofOfKnowledge(u16), + #[error("invalid share (participant {0})")] + InvalidShare(u16), + + #[error("internal error ({0})")] + InternalError(&'static str), +} + +/// Calculate the lagrange coefficient for a signing set. +pub fn lagrange(i: u16, included: &[u16]) -> F { + let mut num = F::one(); + let mut denom = F::one(); + for l in included { + if i == *l { + continue; + } + + let share = F::from(u64::try_from(*l).unwrap()); + num *= share; + denom *= share - F::from(u64::try_from(i).unwrap()); + } + + // Safe as this will only be 0 if we're part of the above loop + // (which we have an if case to avoid) + num * denom.invert().unwrap() +} + +/// Keys and verification shares generated by a DKG. +/// Called core as they're expected to be wrapped into an Arc before usage in various operations. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct ThresholdCore { + /// Threshold Parameters. + params: ThresholdParams, + + /// Secret share key. + secret_share: C::F, + /// Group key. + group_key: C::G, + /// Verification shares. + verification_shares: HashMap, +} + +impl Zeroize for ThresholdCore { + fn zeroize(&mut self) { + self.params.zeroize(); + self.secret_share.zeroize(); + self.group_key.zeroize(); + for (_, share) in self.verification_shares.iter_mut() { + share.zeroize(); + } + } +} +impl Drop for ThresholdCore { + fn drop(&mut self) { + self.zeroize() + } +} +impl ZeroizeOnDrop for ThresholdCore {} + +impl ThresholdCore { + pub(crate) fn new( + params: ThresholdParams, + secret_share: C::F, + verification_shares: HashMap, + ) -> ThresholdCore { + #[cfg(debug_assertions)] + validate_map(&verification_shares, &(0 ..= params.n).collect::>(), 0).unwrap(); + + let t = (1 ..= params.t).collect::>(); + ThresholdCore { + params, + secret_share, + group_key: t.iter().map(|i| verification_shares[i] * lagrange::(*i, &t)).sum(), + verification_shares, + } + } + pub fn params(&self) -> ThresholdParams { + self.params + } + + pub fn secret_share(&self) -> C::F { + self.secret_share + } + + pub fn group_key(&self) -> C::G { + self.group_key + } + + pub(crate) fn verification_shares(&self) -> HashMap { + self.verification_shares.clone() + } + + pub fn serialize(&self) -> Vec { + let mut serialized = vec![]; + serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes()); + serialized.extend(C::ID); + serialized.extend(self.params.t.to_be_bytes()); + serialized.extend(self.params.n.to_be_bytes()); + serialized.extend(self.params.i.to_be_bytes()); + serialized.extend(self.secret_share.to_repr().as_ref()); + for l in 1 ..= self.params.n { + serialized.extend(self.verification_shares[&l].to_bytes().as_ref()); + } + serialized + } + + pub fn deserialize(reader: &mut R) -> Result, DkgError> { + { + let missing = DkgError::InternalError("ThresholdCore serialization is missing its curve"); + let different = DkgError::InternalError("deserializing ThresholdCore for another curve"); + + let mut id_len = [0; 4]; + reader.read_exact(&mut id_len).map_err(|_| missing)?; + if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len { + Err(different)?; + } + + let mut id = vec![0; C::ID.len()]; + reader.read_exact(&mut id).map_err(|_| missing)?; + if id != C::ID { + Err(different)?; + } + } + + let (t, n, i) = { + let mut read_u16 = || { + let mut value = [0; 2]; + reader + .read_exact(&mut value) + .map_err(|_| DkgError::InternalError("missing participant quantities"))?; + Ok(u16::from_be_bytes(value)) + }; + (read_u16()?, read_u16()?, read_u16()?) + }; + + let secret_share = + C::read_F(reader).map_err(|_| DkgError::InternalError("invalid secret share"))?; + + let mut verification_shares = HashMap::new(); + for l in 1 ..= n { + verification_shares.insert( + l, + ::read_G(reader) + .map_err(|_| DkgError::InternalError("invalid verification share"))?, + ); + } + + Ok(ThresholdCore::new( + ThresholdParams::new(t, n, i).map_err(|_| DkgError::InternalError("invalid parameters"))?, + secret_share, + verification_shares, + )) + } +} + +/// Threshold keys usable for signing. +#[derive(Clone, Debug, Zeroize)] +pub struct ThresholdKeys { + /// Core keys. + #[zeroize(skip)] + core: Arc>, + + /// Offset applied to these keys. + pub(crate) offset: Option, +} + +// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786 +impl Drop for ThresholdKeys { + fn drop(&mut self) { + self.zeroize() + } +} +impl ZeroizeOnDrop for ThresholdKeys {} + +/// View of keys passed to algorithm implementations. +#[derive(Clone, Zeroize)] +pub struct ThresholdView { + group_key: C::G, + #[zeroize(skip)] + included: Vec, + secret_share: C::F, + #[zeroize(skip)] + verification_shares: HashMap, +} + +impl Drop for ThresholdView { + fn drop(&mut self) { + self.zeroize() + } +} +impl ZeroizeOnDrop for ThresholdView {} + +impl ThresholdKeys { + pub fn new(core: ThresholdCore) -> ThresholdKeys { + ThresholdKeys { core: Arc::new(core), offset: None } + } + + /// Offset the keys by a given scalar to allow for account and privacy schemes. + /// This offset is ephemeral and will not be included when these keys are serialized. + /// Keys offset multiple times will form a new offset of their sum. + pub fn offset(&self, offset: C::F) -> ThresholdKeys { + let mut res = self.clone(); + // Carry any existing offset + // Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a + // one-time-key offset + res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero)); + res + } + + /// Returns the current offset in-use for these keys. + pub fn current_offset(&self) -> Option { + self.offset + } + + pub fn params(&self) -> ThresholdParams { + self.core.params + } + + pub fn secret_share(&self) -> C::F { + self.core.secret_share + } + + /// Returns the group key with any offset applied. + pub fn group_key(&self) -> C::G { + self.core.group_key + (C::generator() * self.offset.unwrap_or_else(C::F::zero)) + } + + /// Returns all participants' verification shares without any offsetting. + pub(crate) fn verification_shares(&self) -> HashMap { + self.core.verification_shares() + } + + pub fn serialize(&self) -> Vec { + self.core.serialize() + } + + pub fn view(&self, included: &[u16]) -> Result, DkgError> { + if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len()) + { + Err(DkgError::InvalidSigningSet)?; + } + + let offset_share = self.offset.unwrap_or_else(C::F::zero) * + C::F::from(included.len().try_into().unwrap()).invert().unwrap(); + let offset_verification_share = C::generator() * offset_share; + + Ok(ThresholdView { + group_key: self.group_key(), + secret_share: (self.secret_share() * lagrange::(self.params().i, included)) + + offset_share, + verification_shares: self + .verification_shares() + .iter() + .map(|(l, share)| { + (*l, (*share * lagrange::(*l, included)) + offset_verification_share) + }) + .collect(), + included: included.to_vec(), + }) + } +} + +impl ThresholdView { + pub fn group_key(&self) -> C::G { + self.group_key + } + + pub fn included(&self) -> Vec { + self.included.clone() + } + + pub fn secret_share(&self) -> C::F { + self.secret_share + } + + pub fn verification_share(&self, l: u16) -> C::G { + self.verification_shares[&l] + } +} diff --git a/crypto/frost/src/promote.rs b/crypto/dkg/src/promote.rs similarity index 55% rename from crypto/frost/src/promote.rs rename to crypto/dkg/src/promote.rs index c879f056..458c2212 100644 --- a/crypto/frost/src/promote.rs +++ b/crypto/dkg/src/promote.rs @@ -9,50 +9,24 @@ use rand_core::{RngCore, CryptoRng}; use group::GroupEncoding; +use ciphersuite::Ciphersuite; + use transcript::{Transcript, RecommendedTranscript}; use dleq::DLEqProof; -use crate::{ - curve::{CurveError, Curve}, - FrostError, FrostCore, FrostKeys, validate_map, -}; +use crate::{DkgError, ThresholdCore, ThresholdKeys, validate_map}; -/// Promote a set of keys to another Curve definition. -pub trait CurvePromote { +/// Promote a set of keys to another Ciphersuite definition. +pub trait CiphersuitePromote { #[doc(hidden)] #[allow(non_snake_case)] fn _bound_C2(_c2: C2) { panic!() } - fn promote(self) -> FrostKeys; + fn promote(self) -> ThresholdKeys; } -// Implement promotion to different ciphersuites, panicking if the generators are different -// Commented due to lack of practical benefit. While it'd have interoperability benefits, those -// would have their own DKG process which isn't compatible anyways. This becomes unsafe code -// that'll never be used but we're bound to support -/* -impl CurvePromote for FrostKeys -where - C2: Curve, -{ - fn promote(self) -> FrostKeys { - assert_eq!(C::GENERATOR, C2::GENERATOR); - - FrostKeys { - core: Arc::new(FrostCore { - params: self.core.params, - secret_share: self.core.secret_share, - group_key: self.core.group_key, - verification_shares: self.core.verification_shares(), - }), - offset: None, - } - } -} -*/ - fn transcript(key: G, i: u16) -> RecommendedTranscript { let mut transcript = RecommendedTranscript::new(b"FROST Generator Update"); transcript.append_message(b"group_key", key.to_bytes().as_ref()); @@ -62,43 +36,49 @@ fn transcript(key: G, i: u16) -> RecommendedTranscript { /// Proof of valid promotion to another generator. #[derive(Clone, Copy)] -pub struct GeneratorProof { +pub struct GeneratorProof { share: C::G, proof: DLEqProof, } -impl GeneratorProof { - pub fn serialize(&self, writer: &mut W) -> io::Result<()> { +impl GeneratorProof { + pub fn write(&self, writer: &mut W) -> io::Result<()> { writer.write_all(self.share.to_bytes().as_ref())?; self.proof.serialize(writer) } - pub fn deserialize(reader: &mut R) -> Result, CurveError> { + pub fn read(reader: &mut R) -> io::Result> { Ok(GeneratorProof { - share: C::read_G(reader)?, - proof: DLEqProof::deserialize(reader).map_err(|_| CurveError::InvalidScalar)?, + share: ::read_G(reader)?, + proof: DLEqProof::deserialize(reader)?, }) } + + pub fn serialize(&self) -> Vec { + let mut buf = vec![]; + self.write(&mut buf).unwrap(); + buf + } } /// Promote a set of keys from one curve to another, where the elliptic curve is the same. -/// Since the Curve trait additionally specifies a generator, this provides an O(n) way to update -/// the generator used with keys. The key generation protocol itself is exponential. -pub struct GeneratorPromotion { - base: FrostKeys, +/// Since the Ciphersuite trait additionally specifies a generator, this provides an O(n) way to +/// update the generator used with keys. The key generation protocol itself is exponential. +pub struct GeneratorPromotion { + base: ThresholdKeys, proof: GeneratorProof, _c2: PhantomData, } -impl GeneratorPromotion +impl GeneratorPromotion where - C2: Curve, + C2: Ciphersuite, { /// Begin promoting keys from one curve to another. Returns a proof this share was properly /// promoted. pub fn promote( rng: &mut R, - base: FrostKeys, + base: ThresholdKeys, ) -> (GeneratorPromotion, GeneratorProof) { // Do a DLEqProof for the new generator let proof = GeneratorProof { @@ -118,7 +98,7 @@ where pub fn complete( self, proofs: &HashMap>, - ) -> Result, FrostError> { + ) -> Result, DkgError> { let params = self.base.params(); validate_map(proofs, &(1 ..= params.n).collect::>(), params.i)?; @@ -135,12 +115,12 @@ where &[C1::generator(), C2::generator()], &[original_shares[&i], proof.share], ) - .map_err(|_| FrostError::InvalidProofOfKnowledge(i))?; + .map_err(|_| DkgError::InvalidProofOfKnowledge(i))?; verification_shares.insert(i, proof.share); } - Ok(FrostKeys { - core: Arc::new(FrostCore::new(params, self.base.secret_share(), verification_shares)), + Ok(ThresholdKeys { + core: Arc::new(ThresholdCore::new(params, self.base.secret_share(), verification_shares)), offset: None, }) } diff --git a/crypto/dkg/src/tests/frost.rs b/crypto/dkg/src/tests/frost.rs new file mode 100644 index 00000000..3abba285 --- /dev/null +++ b/crypto/dkg/src/tests/frost.rs @@ -0,0 +1,81 @@ +use std::collections::HashMap; + +use rand_core::{RngCore, CryptoRng}; + +use crate::{ + Ciphersuite, ThresholdParams, ThresholdCore, + frost::{SecretShare, Commitments, KeyGenMachine}, + tests::{THRESHOLD, PARTICIPANTS, clone_without}, +}; + +/// Fully perform the FROST key generation algorithm. +pub fn frost_gen( + rng: &mut R, +) -> HashMap> { + let mut machines = HashMap::new(); + let mut commitments = HashMap::new(); + for i in 1 ..= PARTICIPANTS { + let machine = KeyGenMachine::::new( + ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(), + "DKG Test Key Generation".to_string(), + ); + let (machine, these_commitments) = machine.generate_coefficients(rng); + machines.insert(i, machine); + + commitments.insert( + i, + Commitments::read::<&[u8]>( + &mut these_commitments.serialize().as_ref(), + ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 }, + ) + .unwrap(), + ); + } + + let mut secret_shares = HashMap::new(); + let mut machines = machines + .drain() + .map(|(l, machine)| { + let (machine, mut shares) = + machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); + let shares = shares + .drain() + .map(|(l, share)| { + (l, SecretShare::::read::<&[u8]>(&mut share.serialize().as_ref()).unwrap()) + }) + .collect::>(); + secret_shares.insert(l, shares); + (l, machine) + }) + .collect::>(); + + let mut verification_shares = None; + let mut group_key = None; + machines + .drain() + .map(|(i, machine)| { + let mut our_secret_shares = HashMap::new(); + for (l, shares) in &secret_shares { + if i == *l { + continue; + } + our_secret_shares.insert(*l, shares[&i].clone()); + } + let these_keys = machine.complete(rng, our_secret_shares).unwrap(); + + // Verify the verification_shares are agreed upon + if verification_shares.is_none() { + verification_shares = Some(these_keys.verification_shares()); + } + assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares()); + + // Verify the group keys are agreed upon + if group_key.is_none() { + group_key = Some(these_keys.group_key()); + } + assert_eq!(group_key.unwrap(), these_keys.group_key()); + + (i, these_keys) + }) + .collect::>() +} diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs new file mode 100644 index 00000000..2011cd33 --- /dev/null +++ b/crypto/dkg/src/tests/mod.rs @@ -0,0 +1,69 @@ +use std::collections::HashMap; + +use rand_core::{RngCore, CryptoRng}; + +use group::ff::Field; + +use ciphersuite::Ciphersuite; + +use crate::{ThresholdCore, ThresholdKeys, lagrange}; + +/// FROST generation test. +pub mod frost; +use frost::frost_gen; + +// Promotion test. +mod promote; +use promote::test_generator_promotion; + +/// Constant amount of participants to use when testing. +pub const PARTICIPANTS: u16 = 5; +/// Constant threshold of participants to use when signing. +pub const THRESHOLD: u16 = ((PARTICIPANTS / 3) * 2) + 1; + +/// Clone a map without a specific value. +pub fn clone_without( + map: &HashMap, + without: &K, +) -> HashMap { + let mut res = map.clone(); + res.remove(without).unwrap(); + res +} + +/// Recover the secret from a collection of keys. +pub fn recover_key(keys: &HashMap>) -> C::F { + let first = keys.values().next().expect("no keys provided"); + assert!(keys.len() >= first.params().t().into(), "not enough keys provided"); + let included = keys.keys().cloned().collect::>(); + + let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| { + accum + (keys.secret_share() * lagrange::(*i, &included)) + }); + assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys"); + group_private +} + +/// Generate threshold keys for tests. +pub fn key_gen( + rng: &mut R, +) -> HashMap> { + let res = frost_gen(rng) + .drain() + .map(|(i, core)| { + assert_eq!( + &ThresholdCore::::deserialize::<&[u8]>(&mut core.serialize().as_ref()).unwrap(), + &core + ); + (i, ThresholdKeys::new(core)) + }) + .collect(); + assert_eq!(C::generator() * recover_key(&res), res[&1].group_key()); + res +} + +/// Run the test suite on a ciphersuite. +pub fn test_ciphersuite(rng: &mut R) { + key_gen::<_, C>(rng); + test_generator_promotion::<_, C>(rng); +} diff --git a/crypto/dkg/src/tests/promote.rs b/crypto/dkg/src/tests/promote.rs new file mode 100644 index 00000000..adc1bd9d --- /dev/null +++ b/crypto/dkg/src/tests/promote.rs @@ -0,0 +1,60 @@ +use std::{marker::PhantomData, collections::HashMap}; + +use rand_core::{RngCore, CryptoRng}; + +use zeroize::Zeroize; + +use group::Group; + +use ciphersuite::Ciphersuite; + +use crate::{ + promote::{GeneratorPromotion, GeneratorProof}, + tests::{clone_without, key_gen, recover_key}, +}; + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +struct AltGenerator { + _curve: PhantomData, +} + +impl Ciphersuite for AltGenerator { + type F = C::F; + type G = C::G; + type H = C::H; + + const ID: &'static [u8] = b"Alternate Ciphersuite"; + + fn generator() -> Self::G { + C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") + } + + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { + ::hash_to_F(dst, data) + } +} + +// Test promotion of threshold keys to another generator +pub(crate) fn test_generator_promotion(rng: &mut R) { + let keys = key_gen::<_, C>(&mut *rng); + + let mut promotions = HashMap::new(); + let mut proofs = HashMap::new(); + for (i, keys) in &keys { + let (promotion, proof) = + GeneratorPromotion::<_, AltGenerator>::promote(&mut *rng, keys.clone()); + promotions.insert(*i, promotion); + proofs.insert(*i, GeneratorProof::::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap()); + } + + let new_group_key = AltGenerator::::generator() * recover_key(&keys); + for (i, promoting) in promotions.drain() { + let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); + assert_eq!(keys[&i].params(), promoted.params()); + assert_eq!(keys[&i].secret_share(), promoted.secret_share()); + assert_eq!(new_group_key, promoted.group_key()); + for (l, verification_share) in promoted.verification_shares() { + assert_eq!(AltGenerator::::generator() * keys[&l].secret_share(), verification_share); + } + } +} diff --git a/crypto/dleq/Cargo.toml b/crypto/dleq/Cargo.toml index 99a0fa47..485c97e4 100644 --- a/crypto/dleq/Cargo.toml +++ b/crypto/dleq/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dleq" -version = "0.1.1" +version = "0.1.2" description = "Implementation of single and cross-curve Discrete Log Equality proofs" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq" diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index 642cfd2d..f170a370 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -61,7 +61,7 @@ pub enum DLEqError { InvalidProof, } -#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct DLEqProof { c: G::Scalar, s: G::Scalar, diff --git a/crypto/ed448/src/lib.rs b/crypto/ed448/src/lib.rs index d0f6d14d..4afeebde 100644 --- a/crypto/ed448/src/lib.rs +++ b/crypto/ed448/src/lib.rs @@ -1,6 +1,12 @@ #![no_std] mod backend; + pub mod scalar; +pub use scalar::Scalar; + pub mod field; +pub use field::FieldElement; + pub mod point; +pub use point::Point; diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 452ea429..81afb57a 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "modular-frost" -version = "0.2.4" +version = "0.3.0" description = "Modular implementation of FROST over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost" @@ -22,40 +22,37 @@ subtle = "2" hex = "0.4" -sha2 = { version = "0.10", optional = true } -sha3 = { version = "0.10", optional = true } +digest = "0.10" + +hkdf = "0.12" +chacha20 = { version = "0.9", features = ["zeroize"] } -ff = "0.12" group = "0.12" dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true } - -elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true } -p256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true } -k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true } - minimal-ed448 = { path = "../ed448", version = "0.1", optional = true } +ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } + transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"], version = "^0.1.3" } multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } -dleq = { path = "../dleq", version = "0.1", features = ["serialize"] } +schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.1.0" } +dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] } + +dkg = { path = "../dkg", version = "0.1.0" } [dev-dependencies] -sha2 = "0.10" -dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" } serde_json = "1" [features] -dalek = ["sha2", "dalek-ff-group"] -ed25519 = ["dalek"] -ristretto = ["dalek"] +ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] +ristretto = ["dalek-ff-group", "ciphersuite/ristretto"] -kp256 = ["sha2", "elliptic-curve"] -p256 = ["kp256", "dep:p256"] -secp256k1 = ["kp256", "k256"] +secp256k1 = ["ciphersuite/secp256k1"] +p256 = ["ciphersuite/p256"] -ed448 = ["sha3", "minimal-ed448"] +ed448 = ["minimal-ed448", "ciphersuite/ed448"] -tests = [] +tests = ["dkg/tests"] diff --git a/crypto/frost/README.md b/crypto/frost/README.md index 3032ea8c..1a242d6b 100644 --- a/crypto/frost/README.md +++ b/crypto/frost/README.md @@ -10,4 +10,4 @@ integrating with existing systems. This library offers ciphersuites compatible with the [IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version -10 is supported. +11 is supported. diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index 28adaa1d..c4f3ceb8 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -1,41 +1,61 @@ use core::{marker::PhantomData, fmt::Debug}; -use std::io::Read; +use std::io::{self, Read, Write}; use rand_core::{RngCore, CryptoRng}; use transcript::Transcript; -use crate::{Curve, FrostError, FrostView, schnorr}; +use crate::{Curve, FrostError, ThresholdView}; pub use schnorr::SchnorrSignature; +/// Write an addendum to a writer. +pub trait WriteAddendum { + fn write(&self, writer: &mut W) -> io::Result<()>; +} + +impl WriteAddendum for () { + fn write(&self, _: &mut W) -> io::Result<()> { + Ok(()) + } +} + +/// Trait alias for the requirements to be used as an addendum. +pub trait Addendum: Clone + PartialEq + Debug + WriteAddendum {} +impl Addendum for A {} + /// Algorithm trait usable by the FROST signing machine to produce signatures.. pub trait Algorithm: Clone { /// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible /// transcript included in this crate. - type Transcript: Transcript + Clone + Debug; + type Transcript: Clone + Debug + Transcript; + /// Serializable addendum, used in algorithms requiring more data than just the nonces. + type Addendum: Addendum; /// The resulting type of the signatures this algorithm will produce. type Signature: Clone + PartialEq + Debug; /// Obtain a mutable borrow of the underlying transcript. fn transcript(&mut self) -> &mut Self::Transcript; - /// Obtain the list of nonces to generate, as specified by the basepoints to create commitments. - /// against per-nonce. These are not committed to by FROST on the underlying transcript. + /// Obtain the list of nonces to generate, as specified by the generators to create commitments + /// against per-nonce fn nonces(&self) -> Vec>; /// Generate an addendum to FROST"s preprocessing stage. fn preprocess_addendum( &mut self, rng: &mut R, - params: &FrostView, - ) -> Vec; + params: &ThresholdView, + ) -> Self::Addendum; - /// Proccess the addendum for the specified participant. Guaranteed to be ordered. - fn process_addendum( + /// Read an addendum from a reader. + fn read_addendum(&self, reader: &mut R) -> io::Result; + + /// Proccess the addendum for the specified participant. Guaranteed to be called in order. + fn process_addendum( &mut self, - params: &FrostView, + params: &ThresholdView, l: u16, - reader: &mut Re, + reader: Self::Addendum, ) -> Result<(), FrostError>; /// Sign a share with the given secret/nonce. @@ -44,7 +64,7 @@ pub trait Algorithm: Clone { /// The nonce will already have been processed into the combined form d + (e * p). fn sign_share( &mut self, - params: &FrostView, + params: &ThresholdView, nonce_sums: &[Vec], nonces: &[C::F], msg: &[u8], @@ -116,6 +136,7 @@ impl> Schnorr { impl> Algorithm for Schnorr { type Transcript = IetfTranscript; + type Addendum = (); type Signature = SchnorrSignature; fn transcript(&mut self) -> &mut Self::Transcript { @@ -126,51 +147,36 @@ impl> Algorithm for Schnorr { vec![vec![C::generator()]] } - fn preprocess_addendum( - &mut self, - _: &mut R, - _: &FrostView, - ) -> Vec { - vec![] + fn preprocess_addendum(&mut self, _: &mut R, _: &ThresholdView) {} + + fn read_addendum(&self, _: &mut R) -> io::Result { + Ok(()) } - fn process_addendum( - &mut self, - _: &FrostView, - _: u16, - _: &mut Re, - ) -> Result<(), FrostError> { + fn process_addendum(&mut self, _: &ThresholdView, _: u16, _: ()) -> Result<(), FrostError> { Ok(()) } fn sign_share( &mut self, - params: &FrostView, + params: &ThresholdView, nonce_sums: &[Vec], nonces: &[C::F], msg: &[u8], ) -> C::F { let c = H::hram(&nonce_sums[0][0], ¶ms.group_key(), msg); self.c = Some(c); - schnorr::sign::(params.secret_share(), nonces[0], c).s + SchnorrSignature::::sign(params.secret_share(), nonces[0], c).s } #[must_use] fn verify(&self, group_key: C::G, nonces: &[Vec], sum: C::F) -> Option { let sig = SchnorrSignature { R: nonces[0][0], s: sum }; - if schnorr::verify::(group_key, self.c.unwrap(), &sig) { - Some(sig) - } else { - None - } + Some(sig).filter(|sig| sig.verify(group_key, self.c.unwrap())) } #[must_use] fn verify_share(&self, verification_share: C::G, nonces: &[Vec], share: C::F) -> bool { - schnorr::verify::( - verification_share, - self.c.unwrap(), - &SchnorrSignature { R: nonces[0][0], s: share }, - ) + SchnorrSignature:: { R: nonces[0][0], s: share }.verify(verification_share, self.c.unwrap()) } } diff --git a/crypto/frost/src/curve/dalek.rs b/crypto/frost/src/curve/dalek.rs index 8db03bd0..25e97c60 100644 --- a/crypto/frost/src/curve/dalek.rs +++ b/crypto/frost/src/curve/dalek.rs @@ -1,10 +1,9 @@ -use zeroize::Zeroize; +use digest::Digest; -use sha2::{Digest, Sha512}; - -use group::Group; use dalek_ff_group::Scalar; +use ciphersuite::Ciphersuite; + use crate::{curve::Curve, algorithm::Hram}; macro_rules! dalek_curve { @@ -13,49 +12,22 @@ macro_rules! dalek_curve { $Curve: ident, $Hram: ident, - $Point: ident, - $ID: literal, $CONTEXT: literal, - $chal: literal, + $chal: literal ) => { - use dalek_ff_group::$Point; - - #[cfg_attr(docsrs, doc(cfg(feature = $feature)))] - #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] - pub struct $Curve; - impl $Curve { - fn hash(dst: &[u8], data: &[u8]) -> Sha512 { - Sha512::new().chain_update(&[$CONTEXT.as_ref(), dst, data].concat()) - } - } + pub use ciphersuite::$Curve; impl Curve for $Curve { - type F = Scalar; - type G = $Point; - - const ID: &'static [u8] = $ID; - - fn generator() -> Self::G { - $Point::generator() - } - - fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec { - Self::hash(dst, data).finalize().to_vec() - } - - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { - Scalar::from_hash(Self::hash(dst, data)) - } + const CONTEXT: &'static [u8] = $CONTEXT; } - #[cfg_attr(docsrs, doc(cfg(feature = $feature)))] #[derive(Copy, Clone)] pub struct $Hram; impl Hram<$Curve> for $Hram { #[allow(non_snake_case)] - fn hram(R: &$Point, A: &$Point, m: &[u8]) -> Scalar { - let mut hash = Sha512::new(); + fn hram(R: &<$Curve as Ciphersuite>::G, A: &<$Curve as Ciphersuite>::G, m: &[u8]) -> Scalar { + let mut hash = <$Curve as Ciphersuite>::H::new(); if $chal.len() != 0 { hash.update(&[$CONTEXT.as_ref(), $chal].concat()); } @@ -67,24 +39,8 @@ macro_rules! dalek_curve { }; } -#[cfg(any(test, feature = "ristretto"))] -dalek_curve!( - "ristretto", - Ristretto, - IetfRistrettoHram, - RistrettoPoint, - b"ristretto", - b"FROST-RISTRETTO255-SHA512-v11", - b"chal", -); +#[cfg(feature = "ristretto")] +dalek_curve!("ristretto", Ristretto, IetfRistrettoHram, b"FROST-RISTRETTO255-SHA512-v11", b"chal"); #[cfg(feature = "ed25519")] -dalek_curve!( - "ed25519", - Ed25519, - IetfEd25519Hram, - EdwardsPoint, - b"edwards25519", - b"FROST-ED25519-SHA512-v11", - b"", -); +dalek_curve!("ed25519", Ed25519, IetfEd25519Hram, b"FROST-ED25519-SHA512-v11", b""); diff --git a/crypto/frost/src/curve/ed448.rs b/crypto/frost/src/curve/ed448.rs index b3ae8550..e6035048 100644 --- a/crypto/frost/src/curve/ed448.rs +++ b/crypto/frost/src/curve/ed448.rs @@ -1,41 +1,17 @@ -use zeroize::Zeroize; +use digest::Digest; -use sha3::{digest::ExtendableOutput, Shake256}; +use group::GroupEncoding; -use group::{Group, GroupEncoding}; -use minimal_ed448::{scalar::Scalar, point::Point}; +use minimal_ed448::{Scalar, Point}; + +pub use ciphersuite::{Shake256_114, Ed448}; use crate::{curve::Curve, algorithm::Hram}; const CONTEXT: &[u8] = b"FROST-ED448-SHAKE256-v11"; -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub struct Ed448; -impl Ed448 { - fn hash(prefix: &[u8], context: &[u8], dst: &[u8], data: &[u8]) -> [u8; 114] { - let mut res = [0; 114]; - Shake256::digest_xof(&[prefix, context, dst, data].concat(), &mut res); - res - } -} - impl Curve for Ed448 { - type F = Scalar; - type G = Point; - - const ID: &'static [u8] = b"ed448"; - - fn generator() -> Self::G { - Point::generator() - } - - fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec { - Self::hash(b"", CONTEXT, dst, data).as_ref().to_vec() - } - - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { - Scalar::wide_reduce(Self::hash(b"", CONTEXT, dst, data)) - } + const CONTEXT: &'static [u8] = CONTEXT; } #[derive(Copy, Clone)] @@ -43,12 +19,19 @@ pub struct Ietf8032Ed448Hram; impl Ietf8032Ed448Hram { #[allow(non_snake_case)] pub fn hram(context: &[u8], R: &Point, A: &Point, m: &[u8]) -> Scalar { - Scalar::wide_reduce(Ed448::hash( - &[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(), - context, - b"", - &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(), - )) + Scalar::wide_reduce( + Shake256_114::digest( + &[ + &[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(), + context, + &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(), + ] + .concat(), + ) + .as_ref() + .try_into() + .unwrap(), + ) } } diff --git a/crypto/frost/src/curve/kp256.rs b/crypto/frost/src/curve/kp256.rs index a7d65beb..653d5c18 100644 --- a/crypto/frost/src/curve/kp256.rs +++ b/crypto/frost/src/curve/kp256.rs @@ -1,17 +1,6 @@ -use zeroize::Zeroize; +use group::GroupEncoding; -use sha2::{Digest, Sha256}; - -use group::{ - ff::{Field, PrimeField}, - GroupEncoding, -}; - -use elliptic_curve::{ - generic_array::GenericArray, - bigint::{Encoding, U384}, - hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}, -}; +use ciphersuite::Ciphersuite; use crate::{curve::Curve, algorithm::Hram}; @@ -19,87 +8,37 @@ macro_rules! kp_curve { ( $feature: literal, - $lib: ident, $Curve: ident, $Hram: ident, - $ID: literal, $CONTEXT: literal ) => { - #[cfg_attr(docsrs, doc(cfg(feature = $feature)))] - #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] - pub struct $Curve; - impl $Curve { - fn hash(dst: &[u8], data: &[u8]) -> Sha256 { - Sha256::new().chain_update(&[$CONTEXT.as_ref(), dst, data].concat()) - } - } + pub use ciphersuite::$Curve; impl Curve for $Curve { - type F = $lib::Scalar; - type G = $lib::ProjectivePoint; - - const ID: &'static [u8] = $ID; - - fn generator() -> Self::G { - $lib::ProjectivePoint::GENERATOR - } - - fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec { - Self::hash(dst, data).finalize().to_vec() - } - - fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { - let mut dst = &[$CONTEXT, dst].concat(); - let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat()).to_vec(); - if dst.len() > 255 { - dst = &oversize; - } - - // While one of these two libraries does support directly hashing to the Scalar field, the - // other doesn't. While that's probably an oversight, this is a universally working method - let mut modulus = vec![0; 16]; - modulus.extend((Self::F::zero() - Self::F::one()).to_bytes()); - let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE); - - let mut unreduced = U384::from_be_bytes({ - let mut bytes = [0; 48]; - ExpandMsgXmd::::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes); - bytes - }) - .reduce(&modulus) - .unwrap() - .to_be_bytes(); - - let mut array = *GenericArray::from_slice(&unreduced[16 ..]); - let res = $lib::Scalar::from_repr(array).unwrap(); - unreduced.zeroize(); - array.zeroize(); - res - } + const CONTEXT: &'static [u8] = $CONTEXT; } - #[cfg_attr(docsrs, doc(cfg(feature = $feature)))] #[derive(Clone)] pub struct $Hram; impl Hram<$Curve> for $Hram { #[allow(non_snake_case)] - fn hram(R: &$lib::ProjectivePoint, A: &$lib::ProjectivePoint, m: &[u8]) -> $lib::Scalar { - $Curve::hash_to_F(b"chal", &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat()) + fn hram( + R: &<$Curve as Ciphersuite>::G, + A: &<$Curve as Ciphersuite>::G, + m: &[u8], + ) -> <$Curve as Ciphersuite>::F { + <$Curve as Curve>::hash_to_F( + b"chal", + &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(), + ) } } }; } #[cfg(feature = "p256")] -kp_curve!("p256", p256, P256, IetfP256Hram, b"P-256", b"FROST-P256-SHA256-v11"); +kp_curve!("p256", P256, IetfP256Hram, b"FROST-P256-SHA256-v11"); #[cfg(feature = "secp256k1")] -kp_curve!( - "secp256k1", - k256, - Secp256k1, - IetfSecp256k1Hram, - b"secp256k1", - b"FROST-secp256k1-SHA256-v11" -); +kp_curve!("secp256k1", Secp256k1, IetfSecp256k1Hram, b"FROST-secp256k1-SHA256-v11"); diff --git a/crypto/frost/src/curve/mod.rs b/crypto/frost/src/curve/mod.rs index b8365d30..a8bf4ad1 100644 --- a/crypto/frost/src/curve/mod.rs +++ b/crypto/frost/src/curve/mod.rs @@ -1,24 +1,27 @@ -use core::fmt::Debug; -use std::io::Read; - -use thiserror::Error; +use std::io::{self, Read}; use rand_core::{RngCore, CryptoRng}; use zeroize::Zeroize; use subtle::ConstantTimeEq; -use ff::{Field, PrimeField, PrimeFieldBits}; -use group::{Group, GroupOps, GroupEncoding, prime::PrimeGroup}; +use digest::Digest; -#[cfg(any(test, feature = "dalek"))] +use group::{ + ff::{Field, PrimeField}, + Group, +}; + +pub use ciphersuite::Ciphersuite; + +#[cfg(any(feature = "ristretto", feature = "ed25519"))] mod dalek; -#[cfg(any(test, feature = "ristretto"))] +#[cfg(feature = "ristretto")] pub use dalek::{Ristretto, IetfRistrettoHram}; #[cfg(feature = "ed25519")] pub use dalek::{Ed25519, IetfEd25519Hram}; -#[cfg(feature = "kp256")] +#[cfg(any(feature = "secp256k1", feature = "p256"))] mod kp256; #[cfg(feature = "secp256k1")] pub use kp256::{Secp256k1, IetfSecp256k1Hram}; @@ -30,42 +33,23 @@ mod ed448; #[cfg(feature = "ed448")] pub use ed448::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram}; -/// Set of errors for curve-related operations, namely encoding and decoding. -#[derive(Clone, Error, Debug)] -pub enum CurveError { - #[error("invalid scalar")] - InvalidScalar, - #[error("invalid point")] - InvalidPoint, -} - -/// Unified trait to manage an elliptic curve. -// This should be moved into its own crate if the need for generic cryptography over ff/group -// continues, which is the exact reason ff/group exists (to provide a generic interface) -// elliptic-curve exists, yet it doesn't really serve the same role, nor does it use &[u8]/Vec -// It uses GenericArray which will hopefully be deprecated as Rust evolves and doesn't offer enough -// advantages in the modern day to be worth the hassle -- Kayaba -pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize { - /// Scalar field element type. - // This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses - type F: PrimeField + PrimeFieldBits + Zeroize; - /// Group element type. - type G: Group + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq; - - /// ID for this curve. - const ID: &'static [u8]; - - /// Generator for the group. - // While group does provide this in its API, privacy coins may want to use a custom basepoint - fn generator() -> Self::G; +/// FROST Ciphersuite, except for the signing algorithm specific H2, making this solely the curve, +/// its associated hash function, and the functions derived from it. +pub trait Curve: Ciphersuite { + /// Context string for this curve. + const CONTEXT: &'static [u8]; /// Hash the given dst and data to a byte vector. Used to instantiate H4 and H5. - fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec; + fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec { + Self::H::digest(&[Self::CONTEXT, dst, data].concat()).as_ref().to_vec() + } /// Field element from hash. Used during key gen and by other crates under Serai as a general /// utility. Used to instantiate H1 and H3. #[allow(non_snake_case)] - fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F; + fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { + ::hash_to_F(&[Self::CONTEXT, dst].concat(), msg) + } /// Hash the message for the binding factor. H4 from the IETF draft. fn hash_msg(msg: &[u8]) -> Vec { @@ -79,17 +63,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize { /// Hash the commitments and message to calculate the binding factor. H1 from the IETF draft. fn hash_binding_factor(binding: &[u8]) -> Self::F { - Self::hash_to_F(b"rho", binding) - } - - #[allow(non_snake_case)] - fn random_F(rng: &mut R) -> Self::F { - let mut res; - while { - res = Self::F::random(&mut *rng); - res.ct_eq(&Self::F::zero()).into() - } {} - res + ::hash_to_F(b"rho", binding) } /// Securely generate a random nonce. H3 from the IETF draft. @@ -103,7 +77,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize { let mut res; while { seed.extend(repr.as_ref()); - res = Self::hash_to_F(b"nonce", &seed); + res = ::hash_to_F(b"nonce", &seed); res.ct_eq(&Self::F::zero()).into() } { rng.fill_bytes(&mut seed); @@ -117,40 +91,11 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize { } #[allow(non_snake_case)] - fn F_len() -> usize { - ::Repr::default().as_ref().len() - } - - #[allow(non_snake_case)] - fn G_len() -> usize { - ::Repr::default().as_ref().len() - } - - #[allow(non_snake_case)] - fn read_F(r: &mut R) -> Result { - let mut encoding = ::Repr::default(); - r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidScalar)?; - - // ff mandates this is canonical - let res = - Option::::from(Self::F::from_repr(encoding)).ok_or(CurveError::InvalidScalar); - for b in encoding.as_mut() { - b.zeroize(); + fn read_G(reader: &mut R) -> io::Result { + let res = ::read_G(reader)?; + if res.is_identity().into() { + Err(io::Error::new(io::ErrorKind::Other, "identity point"))?; } - res - } - - #[allow(non_snake_case)] - fn read_G(r: &mut R) -> Result { - let mut encoding = ::Repr::default(); - r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidPoint)?; - - let point = - Option::::from(Self::G::from_bytes(&encoding)).ok_or(CurveError::InvalidPoint)?; - // Ban the identity, per the FROST spec, and non-canonical points - if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) { - Err(CurveError::InvalidPoint)?; - } - Ok(point) + Ok(res) } } diff --git a/crypto/frost/src/key_gen.rs b/crypto/frost/src/key_gen.rs deleted file mode 100644 index 4a9571d7..00000000 --- a/crypto/frost/src/key_gen.rs +++ /dev/null @@ -1,357 +0,0 @@ -use std::{ - marker::PhantomData, - io::{Read, Cursor}, - collections::HashMap, -}; - -use rand_core::{RngCore, CryptoRng}; - -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use group::{ - ff::{Field, PrimeField}, - GroupEncoding, -}; - -use multiexp::{multiexp_vartime, BatchVerifier}; - -use crate::{ - curve::Curve, - FrostError, FrostParams, FrostCore, - schnorr::{self, SchnorrSignature}, - validate_map, -}; - -#[allow(non_snake_case)] -fn challenge(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F { - const DST: &[u8] = b"FROST Schnorr Proof of Knowledge"; - - // Uses hash_msg to get a fixed size value out of the context string - let mut transcript = C::hash_msg(context.as_bytes()); - transcript.extend(l.to_be_bytes()); - transcript.extend(R); - transcript.extend(Am); - C::hash_to_F(DST, &transcript) -} - -// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and -// the serialized commitments to be broadcasted over an authenticated channel to all parties -fn generate_key_r1( - rng: &mut R, - params: &FrostParams, - context: &str, -) -> (Vec, Vec, Vec) { - let t = usize::from(params.t); - let mut coefficients = Vec::with_capacity(t); - let mut commitments = Vec::with_capacity(t); - let mut serialized = Vec::with_capacity((C::G_len() * t) + C::G_len() + C::F_len()); - - for i in 0 .. t { - // Step 1: Generate t random values to form a polynomial with - coefficients.push(C::random_F(&mut *rng)); - // Step 3: Generate public commitments - commitments.push(C::generator() * coefficients[i]); - // Serialize them for publication - serialized.extend(commitments[i].to_bytes().as_ref()); - } - - // Step 2: Provide a proof of knowledge - let mut r = C::random_F(rng); - serialized.extend( - schnorr::sign::( - coefficients[0], - // This could be deterministic as the PoK is a singleton never opened up to cooperative - // discussion - // There's no reason to spend the time and effort to make this deterministic besides a - // general obsession with canonicity and determinism though - r, - challenge::(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized), - ) - .serialize(), - ); - r.zeroize(); - - // Step 4: Broadcast - (coefficients, commitments, serialized) -} - -// Verify the received data from the first round of key generation -fn verify_r1( - rng: &mut R, - params: &FrostParams, - context: &str, - our_commitments: Vec, - mut serialized: HashMap, -) -> Result>, FrostError> { - validate_map(&serialized, &(1 ..= params.n()).collect::>(), params.i())?; - - let mut commitments = HashMap::new(); - commitments.insert(params.i, our_commitments); - - let mut signatures = Vec::with_capacity(usize::from(params.n() - 1)); - for l in 1 ..= params.n() { - if l == params.i { - continue; - } - - let invalid = FrostError::InvalidCommitment(l); - - // Read the entire list of commitments as the key we're providing a PoK for (A) and the message - #[allow(non_snake_case)] - let mut Am = vec![0; usize::from(params.t()) * C::G_len()]; - serialized.get_mut(&l).unwrap().read_exact(&mut Am).map_err(|_| invalid)?; - - let mut these_commitments = vec![]; - let mut cursor = Cursor::new(&Am); - for _ in 0 .. usize::from(params.t()) { - these_commitments.push(C::read_G(&mut cursor).map_err(|_| invalid)?); - } - - // Don't bother validating our own proof of knowledge - if l != params.i() { - let cursor = serialized.get_mut(&l).unwrap(); - #[allow(non_snake_case)] - let R = C::read_G(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?; - let s = C::read_F(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?; - - // Step 5: Validate each proof of knowledge - // This is solely the prep step for the latter batch verification - signatures.push(( - l, - these_commitments[0], - challenge::(context, l, R.to_bytes().as_ref(), &Am), - SchnorrSignature:: { R, s }, - )); - } - - commitments.insert(l, these_commitments); - } - - schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?; - - Ok(commitments) -} - -fn polynomial(coefficients: &[F], l: u16) -> F { - let l = F::from(u64::from(l)); - let mut share = F::zero(); - for (idx, coefficient) in coefficients.iter().rev().enumerate() { - share += coefficient; - if idx != (coefficients.len() - 1) { - share *= l; - } - } - share -} - -// Implements round 1, step 5 and round 2, step 1 of FROST key generation -// Returns our secret share part, commitments for the next step, and a vector for each -// counterparty to receive -fn generate_key_r2( - rng: &mut R, - params: &FrostParams, - context: &str, - coefficients: &mut Vec, - our_commitments: Vec, - commitments: HashMap, -) -> Result<(C::F, HashMap>, HashMap>), FrostError> { - let commitments = verify_r1::<_, _, C>(rng, params, context, our_commitments, commitments)?; - - // Step 1: Generate secret shares for all other parties - let mut res = HashMap::new(); - for l in 1 ..= params.n() { - // Don't insert our own shares to the byte buffer which is meant to be sent around - // An app developer could accidentally send it. Best to keep this black boxed - if l == params.i() { - continue; - } - - res.insert(l, polynomial(coefficients, l).to_repr().as_ref().to_vec()); - } - - // Calculate our own share - let share = polynomial(coefficients, params.i()); - - coefficients.zeroize(); - - Ok((share, commitments, res)) -} - -/// Finishes round 2 and returns both the secret share and the serialized public key. -/// This key MUST NOT be considered usable until all parties confirm they have completed the -/// protocol without issue. -fn complete_r2( - rng: &mut R, - params: FrostParams, - mut secret_share: C::F, - commitments: &mut HashMap>, - mut serialized: HashMap, -) -> Result, FrostError> { - validate_map(&serialized, &(1 ..= params.n()).collect::>(), params.i())?; - - // Step 2. Verify each share - let mut shares = HashMap::new(); - // TODO: Clear serialized - for (l, share) in serialized.iter_mut() { - shares.insert(*l, C::read_F(share).map_err(|_| FrostError::InvalidShare(*l))?); - } - - // Calculate the exponent for a given participant and apply it to a series of commitments - // Initially used with the actual commitments to verify the secret share, later used with stripes - // to generate the verification shares - let exponential = |i: u16, values: &[_]| { - let i = C::F::from(i.into()); - let mut res = Vec::with_capacity(params.t().into()); - (0 .. usize::from(params.t())).into_iter().fold(C::F::one(), |exp, l| { - res.push((exp, values[l])); - exp * i - }); - res - }; - - let mut batch = BatchVerifier::new(shares.len()); - for (l, share) in shares.iter_mut() { - if *l == params.i() { - continue; - } - - secret_share += *share; - - // This can be insecurely linearized from n * t to just n using the below sums for a given - // stripe. Doing so uses naive addition which is subject to malleability. The only way to - // ensure that malleability isn't present is to use this n * t algorithm, which runs - // per sender and not as an aggregate of all senders, which also enables blame - let mut values = exponential(params.i, &commitments[l]); - values.push((-*share, C::generator())); - share.zeroize(); - - batch.queue(rng, *l, values); - } - batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?; - - // Stripe commitments per t and sum them in advance. Calculating verification shares relies on - // these sums so preprocessing them is a massive speedup - // If these weren't just sums, yet the tables used in multiexp, this would be further optimized - // As of right now, each multiexp will regenerate them - let mut stripes = Vec::with_capacity(usize::from(params.t())); - for t in 0 .. usize::from(params.t()) { - stripes.push(commitments.values().map(|commitments| commitments[t]).sum()); - } - - // Calculate each user's verification share - let mut verification_shares = HashMap::new(); - for i in 1 ..= params.n() { - verification_shares.insert(i, multiexp_vartime(&exponential(i, &stripes))); - } - // Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t) - debug_assert_eq!(C::generator() * secret_share, verification_shares[¶ms.i()]); - - Ok(FrostCore { params, secret_share, group_key: stripes[0], verification_shares }) -} - -/// State machine to begin the key generation protocol. -pub struct KeyGenMachine { - params: FrostParams, - context: String, - _curve: PhantomData, -} - -/// Advancement of the key generation state machine. -#[derive(Zeroize)] -pub struct SecretShareMachine { - #[zeroize(skip)] - params: FrostParams, - context: String, - coefficients: Vec, - #[zeroize(skip)] - our_commitments: Vec, -} - -impl Drop for SecretShareMachine { - fn drop(&mut self) { - self.zeroize() - } -} -impl ZeroizeOnDrop for SecretShareMachine {} - -/// Final step of the key generation protocol. -#[derive(Zeroize)] -pub struct KeyMachine { - #[zeroize(skip)] - params: FrostParams, - secret: C::F, - #[zeroize(skip)] - commitments: HashMap>, -} - -impl Drop for KeyMachine { - fn drop(&mut self) { - self.zeroize() - } -} -impl ZeroizeOnDrop for KeyMachine {} - -impl KeyGenMachine { - /// Creates a new machine to generate a key for the specified curve in the specified multisig. - // The context string should be unique among multisigs. - pub fn new(params: FrostParams, context: String) -> KeyGenMachine { - KeyGenMachine { params, context, _curve: PhantomData } - } - - /// Start generating a key according to the FROST DKG spec. - /// Returns a serialized list of commitments to be sent to all parties over an authenticated - /// channel. If any party submits multiple sets of commitments, they MUST be treated as - /// malicious. - pub fn generate_coefficients( - self, - rng: &mut R, - ) -> (SecretShareMachine, Vec) { - let (coefficients, our_commitments, serialized) = - generate_key_r1::<_, C>(rng, &self.params, &self.context); - - ( - SecretShareMachine { - params: self.params, - context: self.context, - coefficients, - our_commitments, - }, - serialized, - ) - } -} - -impl SecretShareMachine { - /// Continue generating a key. - /// Takes in everyone else's commitments. Returns a HashMap of byte vectors representing secret - /// shares. These MUST be encrypted and only then sent to their respective participants. - pub fn generate_secret_shares( - mut self, - rng: &mut R, - commitments: HashMap, - ) -> Result<(KeyMachine, HashMap>), FrostError> { - let (secret, commitments, shares) = generate_key_r2::<_, _, C>( - rng, - &self.params, - &self.context, - &mut self.coefficients, - self.our_commitments.clone(), - commitments, - )?; - Ok((KeyMachine { params: self.params, secret, commitments }, shares)) - } -} - -impl KeyMachine { - /// Complete key generation. - /// Takes in everyone elses' shares submitted to us. Returns a FrostCore object representing the - /// generated keys. Successful protocol completion MUST be confirmed by all parties before these - /// keys may be safely used. - pub fn complete( - mut self, - rng: &mut R, - shares: HashMap, - ) -> Result, FrostError> { - complete_r2(rng, self.params, self.secret, &mut self.commitments, shares) - } -} diff --git a/crypto/frost/src/lib.rs b/crypto/frost/src/lib.rs index 7c6e3753..460fb8cc 100644 --- a/crypto/frost/src/lib.rs +++ b/crypto/frost/src/lib.rs @@ -11,33 +11,23 @@ //! //! This library offers ciphersuites compatible with the //! [IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version -//! 10 is supported. +//! 11 is supported. -use core::fmt::{self, Debug}; -use std::{io::Read, sync::Arc, collections::HashMap}; +use core::fmt::Debug; +use std::collections::HashMap; use thiserror::Error; -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use group::{ - ff::{Field, PrimeField}, - GroupEncoding, -}; - -mod schnorr; +/// Distributed key generation protocol. +pub use dkg::{self, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView}; /// Curve trait and provided curves/HRAMs, forming various ciphersuites. pub mod curve; use curve::Curve; -/// Distributed key generation protocol. -pub mod key_gen; -/// Promote keys between curves. -pub mod promote; - /// Algorithm for the signing process. pub mod algorithm; +mod nonce; /// Threshold signing protocol. pub mod sign; @@ -45,7 +35,7 @@ pub mod sign; #[cfg(any(test, feature = "tests"))] pub mod tests; -// Validate a map of serialized values to have the expected included participants +// Validate a map of values to have the expected included participants pub(crate) fn validate_map( map: &HashMap, included: &[u16], @@ -71,59 +61,11 @@ pub(crate) fn validate_map( Ok(()) } -/// Parameters for a multisig. -// These fields can not be made public as they should be static -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct FrostParams { - /// Participants needed to sign on behalf of the group. - t: u16, - /// Amount of participants. - n: u16, - /// Index of the participant being acted for. - i: u16, -} - -impl FrostParams { - pub fn new(t: u16, n: u16, i: u16) -> Result { - if (t == 0) || (n == 0) { - Err(FrostError::ZeroParameter(t, n))?; - } - - // When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason), - // but it's not invalid to do so - if t > n { - Err(FrostError::InvalidRequiredQuantity(t, n))?; - } - if (i == 0) || (i > n) { - Err(FrostError::InvalidParticipantIndex(n, i))?; - } - - Ok(FrostParams { t, n, i }) - } - - pub fn t(&self) -> u16 { - self.t - } - pub fn n(&self) -> u16 { - self.n - } - pub fn i(&self) -> u16 { - self.i - } -} - -/// Various errors possible during key generation/signing. +/// Various errors possible during signing. #[derive(Copy, Clone, Error, Debug)] pub enum FrostError { - #[error("a parameter was 0 (required {0}, participants {1})")] - ZeroParameter(u16, u16), - #[error("too many participants (max {1}, got {0})")] - TooManyParticipants(usize, u16), - #[error("invalid amount of required participants (max {1}, got {0})")] - InvalidRequiredQuantity(u16, u16), #[error("invalid participant index (0 < index <= {0}, yet index is {1})")] InvalidParticipantIndex(u16, u16), - #[error("invalid signing set ({0})")] InvalidSigningSet(&'static str), #[error("invalid participant quantity (expected {0}, got {1})")] @@ -132,290 +74,12 @@ pub enum FrostError { DuplicatedIndex(u16), #[error("missing participant {0}")] MissingParticipant(u16), - #[error("invalid commitment (participant {0})")] - InvalidCommitment(u16), - #[error("invalid proof of knowledge (participant {0})")] - InvalidProofOfKnowledge(u16), + + #[error("invalid preprocess (participant {0})")] + InvalidPreprocess(u16), #[error("invalid share (participant {0})")] InvalidShare(u16), #[error("internal error ({0})")] InternalError(&'static str), } - -/// Calculate the lagrange coefficient for a signing set. -pub fn lagrange(i: u16, included: &[u16]) -> F { - let mut num = F::one(); - let mut denom = F::one(); - for l in included { - if i == *l { - continue; - } - - let share = F::from(u64::try_from(*l).unwrap()); - num *= share; - denom *= share - F::from(u64::try_from(i).unwrap()); - } - - // Safe as this will only be 0 if we're part of the above loop - // (which we have an if case to avoid) - num * denom.invert().unwrap() -} - -/// Core keys generated by performing a FROST keygen protocol. -#[derive(Clone, PartialEq, Eq, Zeroize)] -pub struct FrostCore { - /// FROST Parameters. - #[zeroize(skip)] - params: FrostParams, - - /// Secret share key. - secret_share: C::F, - /// Group key. - group_key: C::G, - /// Verification shares. - #[zeroize(skip)] - verification_shares: HashMap, -} - -impl Drop for FrostCore { - fn drop(&mut self) { - self.zeroize() - } -} -impl ZeroizeOnDrop for FrostCore {} - -impl Debug for FrostCore { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("FrostCore") - .field("params", &self.params) - .field("group_key", &self.group_key) - .field("verification_shares", &self.verification_shares) - .finish() - } -} - -impl FrostCore { - pub(crate) fn new( - params: FrostParams, - secret_share: C::F, - verification_shares: HashMap, - ) -> FrostCore { - #[cfg(debug_assertions)] - validate_map(&verification_shares, &(0 ..= params.n).collect::>(), 0).unwrap(); - - let t = (1 ..= params.t).collect::>(); - FrostCore { - params, - secret_share, - group_key: t.iter().map(|i| verification_shares[i] * lagrange::(*i, &t)).sum(), - verification_shares, - } - } - pub fn params(&self) -> FrostParams { - self.params - } - - #[cfg(any(test, feature = "tests"))] - pub(crate) fn secret_share(&self) -> C::F { - self.secret_share - } - - pub fn group_key(&self) -> C::G { - self.group_key - } - - pub(crate) fn verification_shares(&self) -> HashMap { - self.verification_shares.clone() - } - - pub fn serialized_len(n: u16) -> usize { - 8 + C::ID.len() + (3 * 2) + C::F_len() + C::G_len() + (usize::from(n) * C::G_len()) - } - - pub fn serialize(&self) -> Vec { - let mut serialized = Vec::with_capacity(FrostCore::::serialized_len(self.params.n)); - serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes()); - serialized.extend(C::ID); - serialized.extend(self.params.t.to_be_bytes()); - serialized.extend(self.params.n.to_be_bytes()); - serialized.extend(self.params.i.to_be_bytes()); - serialized.extend(self.secret_share.to_repr().as_ref()); - for l in 1 ..= self.params.n { - serialized.extend(self.verification_shares[&l].to_bytes().as_ref()); - } - serialized - } - - pub fn deserialize(cursor: &mut R) -> Result, FrostError> { - { - let missing = FrostError::InternalError("FrostCore serialization is missing its curve"); - let different = FrostError::InternalError("deserializing FrostCore for another curve"); - - let mut id_len = [0; 4]; - cursor.read_exact(&mut id_len).map_err(|_| missing)?; - if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len { - Err(different)?; - } - - let mut id = vec![0; C::ID.len()]; - cursor.read_exact(&mut id).map_err(|_| missing)?; - if id != C::ID { - Err(different)?; - } - } - - let (t, n, i) = { - let mut read_u16 = || { - let mut value = [0; 2]; - cursor - .read_exact(&mut value) - .map_err(|_| FrostError::InternalError("missing participant quantities"))?; - Ok(u16::from_be_bytes(value)) - }; - (read_u16()?, read_u16()?, read_u16()?) - }; - - let secret_share = - C::read_F(cursor).map_err(|_| FrostError::InternalError("invalid secret share"))?; - - let mut verification_shares = HashMap::new(); - for l in 1 ..= n { - verification_shares.insert( - l, - C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid verification share"))?, - ); - } - - Ok(FrostCore::new( - FrostParams::new(t, n, i).map_err(|_| FrostError::InternalError("invalid parameters"))?, - secret_share, - verification_shares, - )) - } -} - -/// FROST keys usable for signing. -#[derive(Clone, Debug, Zeroize)] -pub struct FrostKeys { - /// Core keys. - #[zeroize(skip)] - core: Arc>, - - /// Offset applied to these keys. - pub(crate) offset: Option, -} - -// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786 -impl Drop for FrostKeys { - fn drop(&mut self) { - self.zeroize() - } -} -impl ZeroizeOnDrop for FrostKeys {} - -/// View of keys passed to algorithm implementations. -#[derive(Clone, Zeroize)] -pub struct FrostView { - group_key: C::G, - #[zeroize(skip)] - included: Vec, - secret_share: C::F, - #[zeroize(skip)] - verification_shares: HashMap, -} - -impl Drop for FrostView { - fn drop(&mut self) { - self.zeroize() - } -} -impl ZeroizeOnDrop for FrostView {} - -impl FrostKeys { - pub fn new(core: FrostCore) -> FrostKeys { - FrostKeys { core: Arc::new(core), offset: None } - } - - /// Offset the keys by a given scalar to allow for account and privacy schemes. - /// This offset is ephemeral and will not be included when these keys are serialized. - /// Keys offset multiple times will form a new offset of their sum. - /// Not IETF compliant. - pub fn offset(&self, offset: C::F) -> FrostKeys { - let mut res = self.clone(); - // Carry any existing offset - // Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a - // one-time-key offset - res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero)); - res - } - - pub fn params(&self) -> FrostParams { - self.core.params - } - - pub(crate) fn secret_share(&self) -> C::F { - self.core.secret_share - } - - /// Returns the group key with any offset applied. - pub fn group_key(&self) -> C::G { - self.core.group_key + (C::generator() * self.offset.unwrap_or_else(C::F::zero)) - } - - /// Returns all participants' verification shares without any offsetting. - pub(crate) fn verification_shares(&self) -> HashMap { - self.core.verification_shares() - } - - pub fn serialized_len(n: u16) -> usize { - FrostCore::::serialized_len(n) - } - - pub fn serialize(&self) -> Vec { - self.core.serialize() - } - - pub fn view(&self, included: &[u16]) -> Result, FrostError> { - if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len()) - { - Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?; - } - - let offset_share = self.offset.unwrap_or_else(C::F::zero) * - C::F::from(included.len().try_into().unwrap()).invert().unwrap(); - let offset_verification_share = C::generator() * offset_share; - - Ok(FrostView { - group_key: self.group_key(), - secret_share: (self.secret_share() * lagrange::(self.params().i, included)) + - offset_share, - verification_shares: self - .verification_shares() - .iter() - .map(|(l, share)| { - (*l, (*share * lagrange::(*l, included)) + offset_verification_share) - }) - .collect(), - included: included.to_vec(), - }) - } -} - -impl FrostView { - pub fn group_key(&self) -> C::G { - self.group_key - } - - pub fn included(&self) -> Vec { - self.included.clone() - } - - pub fn secret_share(&self) -> C::F { - self.secret_share - } - - pub fn verification_share(&self, l: u16) -> C::G { - self.verification_shares[&l] - } -} diff --git a/crypto/frost/src/nonce.rs b/crypto/frost/src/nonce.rs new file mode 100644 index 00000000..4c80060f --- /dev/null +++ b/crypto/frost/src/nonce.rs @@ -0,0 +1,267 @@ +// FROST defines its nonce as sum(Di, Ei * bi) +// Monero needs not just the nonce over G however, yet also over H +// Then there is a signature (a modified Chaum Pedersen proof) using multiple nonces at once +// +// Accordingly, in order for this library to be robust, it supports generating an arbitrary amount +// of nonces, each against an arbitrary list of basepoints +// +// Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b) +// When multiple D, E pairs are provided, a DLEq proof is also provided to confirm their integrity + +use std::{ + io::{self, Read, Write}, + collections::HashMap, +}; + +use rand_core::{RngCore, CryptoRng}; + +use zeroize::{Zeroize, ZeroizeOnDrop}; + +use transcript::Transcript; + +use group::{ff::PrimeField, Group, GroupEncoding}; +use multiexp::multiexp_vartime; + +use dleq::DLEqProof; + +use crate::curve::Curve; + +fn dleq_transcript() -> T { + T::new(b"FROST_nonce_dleq") +} + +// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper +// This is considered a single nonce as r = d + be +#[derive(Clone, Zeroize)] +pub(crate) struct Nonce(pub(crate) [C::F; 2]); +impl Drop for Nonce { + fn drop(&mut self) { + self.zeroize(); + } +} +impl ZeroizeOnDrop for Nonce {} + +// Commitments to a specific generator for this nonce +#[derive(Copy, Clone, PartialEq, Eq)] +pub(crate) struct GeneratorCommitments(pub(crate) [C::G; 2]); +impl GeneratorCommitments { + fn read(reader: &mut R) -> io::Result> { + Ok(GeneratorCommitments([::read_G(reader)?, ::read_G(reader)?])) + } + + fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(self.0[0].to_bytes().as_ref())?; + writer.write_all(self.0[1].to_bytes().as_ref()) + } +} + +// A single nonce's commitments and relevant proofs +#[derive(Clone, PartialEq, Eq)] +pub(crate) struct NonceCommitments { + // Called generators as these commitments are indexed by generator + pub(crate) generators: Vec>, + // DLEq Proofs proving that these commitments are generated using the same scalar pair + // This could be further optimized with a multi-nonce proof, offering just one proof for all + // nonces. See https://github.com/serai-dex/serai/issues/38 + // TODO + pub(crate) dleqs: Option<[DLEqProof; 2]>, +} + +impl NonceCommitments { + pub(crate) fn new( + rng: &mut R, + mut secret_share: C::F, + generators: &[C::G], + ) -> (Nonce, NonceCommitments) { + let nonce = + Nonce([C::random_nonce(secret_share, &mut *rng), C::random_nonce(secret_share, &mut *rng)]); + secret_share.zeroize(); + + let mut commitments = Vec::with_capacity(generators.len()); + for generator in generators { + commitments.push(GeneratorCommitments([*generator * nonce.0[0], *generator * nonce.0[1]])); + } + + let mut dleqs = None; + if generators.len() >= 2 { + let mut dleq = |nonce| { + // Uses an independent transcript as each signer must prove this with their commitments, + // yet they're validated while processing everyone's data sequentially, by the global order + // This avoids needing to clone and fork the transcript around + // TODO: At least include a challenge from the existing transcript + DLEqProof::prove(&mut *rng, &mut dleq_transcript::(), generators, nonce) + }; + dleqs = Some([dleq(nonce.0[0]), dleq(nonce.0[1])]); + } + + (nonce, NonceCommitments { generators: commitments, dleqs }) + } + + fn read( + reader: &mut R, + generators: &[C::G], + ) -> io::Result> { + let commitments: Vec> = (0 .. generators.len()) + .map(|_| GeneratorCommitments::read(reader)) + .collect::>()?; + + let mut dleqs = None; + if generators.len() >= 2 { + let mut verify = |i| -> io::Result<_> { + let dleq = DLEqProof::deserialize(reader)?; + dleq + .verify( + &mut dleq_transcript::(), + generators, + &commitments.iter().map(|commitments| commitments.0[i]).collect::>(), + ) + .map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?; + Ok(dleq) + }; + dleqs = Some([verify(0)?, verify(1)?]); + } + + Ok(NonceCommitments { generators: commitments, dleqs }) + } + + fn write(&self, writer: &mut W) -> io::Result<()> { + for generator in &self.generators { + generator.write(writer)?; + } + if let Some(dleqs) = &self.dleqs { + dleqs[0].serialize(writer)?; + dleqs[1].serialize(writer)?; + } + Ok(()) + } +} + +#[derive(Clone, PartialEq, Eq)] +pub(crate) struct Commitments { + // Called nonces as these commitments are indexed by nonce + pub(crate) nonces: Vec>, +} + +impl Commitments { + pub(crate) fn new( + rng: &mut R, + secret_share: C::F, + planned_nonces: &[Vec], + ) -> (Vec>, Commitments) { + let mut nonces = vec![]; + let mut commitments = vec![]; + for generators in planned_nonces { + let (nonce, these_commitments) = + NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators); + nonces.push(nonce); + commitments.push(these_commitments); + } + (nonces, Commitments { nonces: commitments }) + } + + pub(crate) fn transcript(&self, t: &mut T) { + for nonce in &self.nonces { + for commitments in &nonce.generators { + t.append_message(b"commitment_D", commitments.0[0].to_bytes().as_ref()); + t.append_message(b"commitment_E", commitments.0[1].to_bytes().as_ref()); + } + + // Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce + // This means it shouldn't be possible for variadic generators to cause conflicts as they're + // committed to as their entire series per-nonce, not as isolates + if let Some(dleqs) = &nonce.dleqs { + let mut transcript_dleq = |label, dleq: &DLEqProof| { + let mut buf = vec![]; + dleq.serialize(&mut buf).unwrap(); + t.append_message(label, &buf); + }; + transcript_dleq(b"dleq_D", &dleqs[0]); + transcript_dleq(b"dleq_E", &dleqs[1]); + } + } + } + + pub(crate) fn read( + reader: &mut R, + nonces: &[Vec], + ) -> io::Result { + Ok(Commitments { + nonces: (0 .. nonces.len()) + .map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i])) + .collect::>()?, + }) + } + + pub(crate) fn write(&self, writer: &mut W) -> io::Result<()> { + for nonce in &self.nonces { + nonce.write(writer)?; + } + Ok(()) + } +} + +pub(crate) struct IndividualBinding { + commitments: Commitments, + binding_factors: Option>, +} + +pub(crate) struct BindingFactor(pub(crate) HashMap>); + +impl BindingFactor { + pub(crate) fn insert(&mut self, i: u16, commitments: Commitments) { + self.0.insert(i, IndividualBinding { commitments, binding_factors: None }); + } + + pub(crate) fn calculate_binding_factors(&mut self, transcript: &mut T) { + for (l, binding) in self.0.iter_mut() { + let mut transcript = transcript.clone(); + transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref()); + // It *should* be perfectly fine to reuse a binding factor for multiple nonces + // This generates a binding factor per nonce just to ensure it never comes up as a question + binding.binding_factors = Some( + (0 .. binding.commitments.nonces.len()) + .map(|_| C::hash_binding_factor(transcript.challenge(b"rho").as_ref())) + .collect(), + ); + } + } + + pub(crate) fn binding_factors(&self, i: u16) -> &[C::F] { + self.0[&i].binding_factors.as_ref().unwrap() + } + + // Get the bound nonces for a specific party + pub(crate) fn bound(&self, l: u16) -> Vec> { + let mut res = vec![]; + for (i, (nonce, rho)) in + self.0[&l].commitments.nonces.iter().zip(self.binding_factors(l).iter()).enumerate() + { + res.push(vec![]); + for generator in &nonce.generators { + res[i].push(generator.0[0] + (generator.0[1] * rho)); + } + } + res + } + + // Get the nonces for this signing session + pub(crate) fn nonces(&self, planned_nonces: &[Vec]) -> Vec> { + let mut nonces = Vec::with_capacity(planned_nonces.len()); + for n in 0 .. planned_nonces.len() { + nonces.push(Vec::with_capacity(planned_nonces[n].len())); + for g in 0 .. planned_nonces[n].len() { + #[allow(non_snake_case)] + let mut D = C::G::identity(); + let mut statements = Vec::with_capacity(self.0.len()); + #[allow(non_snake_case)] + for IndividualBinding { commitments, binding_factors } in self.0.values() { + D += commitments.nonces[n].generators[g].0[0]; + statements + .push((binding_factors.as_ref().unwrap()[n], commitments.nonces[n].generators[g].0[1])); + } + nonces[n].push(D + multiexp_vartime(&statements)); + } + } + nonces + } +} diff --git a/crypto/frost/src/schnorr.rs b/crypto/frost/src/schnorr.rs deleted file mode 100644 index da229145..00000000 --- a/crypto/frost/src/schnorr.rs +++ /dev/null @@ -1,73 +0,0 @@ -use rand_core::{RngCore, CryptoRng}; - -use zeroize::Zeroize; - -use group::{ - ff::{Field, PrimeField}, - GroupEncoding, -}; - -use multiexp::BatchVerifier; - -use crate::Curve; - -/// A Schnorr signature of the form (R, s) where s = r + cx. -#[allow(non_snake_case)] -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct SchnorrSignature { - pub R: C::G, - pub s: C::F, -} - -impl SchnorrSignature { - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(C::G_len() + C::F_len()); - res.extend(self.R.to_bytes().as_ref()); - res.extend(self.s.to_repr().as_ref()); - res - } -} - -pub(crate) fn sign( - mut private_key: C::F, - mut nonce: C::F, - challenge: C::F, -) -> SchnorrSignature { - let res = SchnorrSignature { R: C::generator() * nonce, s: nonce + (private_key * challenge) }; - private_key.zeroize(); - nonce.zeroize(); - res -} - -#[must_use] -pub(crate) fn verify( - public_key: C::G, - challenge: C::F, - signature: &SchnorrSignature, -) -> bool { - (C::generator() * signature.s) == (signature.R + (public_key * challenge)) -} - -pub(crate) fn batch_verify( - rng: &mut R, - triplets: &[(u16, C::G, C::F, SchnorrSignature)], -) -> Result<(), u16> { - let mut values = [(C::F::one(), C::generator()); 3]; - let mut batch = BatchVerifier::new(triplets.len()); - for triple in triplets { - // s = r + ca - // sG == R + cA - // R + cA - sG == 0 - - // R - values[0].1 = triple.3.R; - // cA - values[1] = (triple.2, triple.1); - // -sG - values[2].0 = -triple.3.s; - - batch.queue(rng, triple.0, values); - } - - batch.verify_vartime_with_vartime_blame() -} diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index a863a76a..a6c4c947 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -1,41 +1,65 @@ use core::fmt; use std::{ - io::{Read, Cursor}, + io::{self, Read, Write}, collections::HashMap, }; use rand_core::{RngCore, CryptoRng}; use zeroize::{Zeroize, ZeroizeOnDrop}; -use subtle::ConstantTimeEq; use transcript::Transcript; -use group::{ - ff::{Field, PrimeField}, - Group, GroupEncoding, -}; -use multiexp::multiexp_vartime; - -use dleq::DLEqProof; +use group::{ff::PrimeField, GroupEncoding}; use crate::{ - curve::Curve, FrostError, FrostParams, FrostKeys, FrostView, algorithm::Algorithm, validate_map, + curve::Curve, + FrostError, ThresholdParams, ThresholdKeys, ThresholdView, + algorithm::{WriteAddendum, Addendum, Algorithm}, + validate_map, }; -/// Pairing of an Algorithm with a FrostKeys instance and this specific signing set. -#[derive(Clone)] -pub struct Params> { - algorithm: A, - keys: FrostKeys, - view: FrostView, +pub(crate) use crate::nonce::*; + +/// Trait enabling writing preprocesses and signature shares. +pub trait Writable { + fn write(&self, writer: &mut W) -> io::Result<()>; + + fn serialize(&self) -> Vec { + let mut buf = vec![]; + self.write(&mut buf).unwrap(); + buf + } } -// Currently public to enable more complex operations as desired, yet solely used in testing +impl Writable for Vec { + fn write(&self, writer: &mut W) -> io::Result<()> { + for w in self { + w.write(writer)?; + } + Ok(()) + } +} + +/// Pairing of an Algorithm with a ThresholdKeys instance and this specific signing set. +#[derive(Clone, Zeroize)] +pub struct Params> { + #[zeroize(skip)] + algorithm: A, + keys: ThresholdKeys, + view: ThresholdView, +} +impl> Drop for Params { + fn drop(&mut self) { + self.zeroize() + } +} +impl> ZeroizeOnDrop for Params {} + impl> Params { pub fn new( algorithm: A, - keys: FrostKeys, + keys: ThresholdKeys, included: &[u16], ) -> Result, FrostError> { let params = keys.params(); @@ -44,16 +68,16 @@ impl> Params { included.sort_unstable(); // Included < threshold - if included.len() < usize::from(params.t) { + if included.len() < usize::from(params.t()) { Err(FrostError::InvalidSigningSet("not enough signers"))?; } // Invalid index if included[0] == 0 { - Err(FrostError::InvalidParticipantIndex(included[0], params.n))?; + Err(FrostError::InvalidParticipantIndex(included[0], params.n()))?; } // OOB index - if included[included.len() - 1] > params.n { - Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n))?; + if included[included.len() - 1] > params.n() { + Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n()))?; } // Same signer included multiple times for i in 0 .. (included.len() - 1) { @@ -62,7 +86,7 @@ impl> Params { } } // Not included - if !included.contains(¶ms.i) { + if !included.contains(¶ms.i()) { Err(FrostError::InvalidSigningSet("signing despite not being included"))?; } @@ -70,338 +94,43 @@ impl> Params { Ok(Params { algorithm, view: keys.view(&included).unwrap(), keys }) } - pub fn multisig_params(&self) -> FrostParams { + pub fn multisig_params(&self) -> ThresholdParams { self.keys.params() } - pub fn view(&self) -> FrostView { + pub fn view(&self) -> ThresholdView { self.view.clone() } } -fn nonce_transcript() -> T { - T::new(b"FROST_nonce_dleq") +/// Preprocess for an instance of the FROST signing protocol. +#[derive(Clone, PartialEq, Eq)] +pub struct Preprocess { + pub(crate) commitments: Commitments, + pub addendum: A, } -#[derive(Zeroize)] -pub(crate) struct PreprocessPackage { - pub(crate) nonces: Vec<[C::F; 2]>, - #[zeroize(skip)] - pub(crate) commitments: Vec>, - pub(crate) addendum: Vec, -} - -impl Drop for PreprocessPackage { - fn drop(&mut self) { - self.zeroize() +impl Writable for Preprocess { + fn write(&self, writer: &mut W) -> io::Result<()> { + self.commitments.write(writer)?; + self.addendum.write(writer) } } -impl ZeroizeOnDrop for PreprocessPackage {} - -fn preprocess>( - rng: &mut R, - params: &mut Params, -) -> (PreprocessPackage, Vec) { - let mut serialized = Vec::with_capacity(2 * C::G_len()); - let (nonces, commitments) = params - .algorithm - .nonces() - .iter() - .map(|generators| { - let nonces = [ - C::random_nonce(params.view().secret_share(), &mut *rng), - C::random_nonce(params.view().secret_share(), &mut *rng), - ]; - - let commit = |generator: C::G, buf: &mut Vec| { - let commitments = [generator * nonces[0], generator * nonces[1]]; - buf.extend(commitments[0].to_bytes().as_ref()); - buf.extend(commitments[1].to_bytes().as_ref()); - commitments - }; - - let mut commitments = Vec::with_capacity(generators.len()); - for generator in generators.iter() { - commitments.push(commit(*generator, &mut serialized)); - } - - // Provide a DLEq proof to verify these commitments are for the same nonce - if generators.len() >= 2 { - // Uses an independent transcript as each signer must do this now, yet we validate them - // sequentially by the global order. Avoids needing to clone and fork the transcript around - let mut transcript = nonce_transcript::(); - - // This could be further optimized with a multi-nonce proof. - // See https://github.com/serai-dex/serai/issues/38 - for mut nonce in nonces { - DLEqProof::prove(&mut *rng, &mut transcript, generators, nonce) - .serialize(&mut serialized) - .unwrap(); - nonce.zeroize(); - } - } - - (nonces, commitments) - }) - .unzip(); - - let addendum = params.algorithm.preprocess_addendum(rng, ¶ms.view); - serialized.extend(&addendum); - - (PreprocessPackage { nonces, commitments, addendum }, serialized) -} - -#[allow(non_snake_case)] -fn read_D_E(cursor: &mut Re, l: u16) -> Result<[C::G; 2], FrostError> { - Ok([ - C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?, - C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?, - ]) -} - -#[allow(non_snake_case)] -struct Package { - B: HashMap>, C::F)>, - Rs: Vec>, - share: C::F, -} - -// Has every signer perform the role of the signature aggregator -// Step 1 was already deprecated by performing nonce generation as needed -// Step 2 is simply the broadcast round from step 1 -fn sign_with_share>( - params: &mut Params, - our_preprocess: PreprocessPackage, - mut commitments: HashMap, - msg: &[u8], -) -> Result<(Package, Vec), FrostError> { - let multisig_params = params.multisig_params(); - validate_map(&commitments, ¶ms.view.included, multisig_params.i)?; - - { - // Domain separate FROST - params.algorithm.transcript().domain_separate(b"FROST"); - } - - let nonces = params.algorithm.nonces(); - #[allow(non_snake_case)] - let mut B = HashMap::::with_capacity(params.view.included.len()); - { - // Parse the commitments - for l in ¶ms.view.included { - { - params - .algorithm - .transcript() - .append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref()); - } - - // While this doesn't note which nonce/basepoint this is for, those are expected to be - // static. Beyond that, they're committed to in the DLEq proof transcripts, ensuring - // consistency. While this is suboptimal, it maintains IETF compliance, and Algorithm is - // documented accordingly - let transcript = |t: &mut A::Transcript, commitments: [C::G; 2]| { - if commitments[0].ct_eq(&C::G::identity()).into() || - commitments[1].ct_eq(&C::G::identity()).into() - { - Err(FrostError::InvalidCommitment(*l))?; - } - t.append_message(b"commitment_D", commitments[0].to_bytes().as_ref()); - t.append_message(b"commitment_E", commitments[1].to_bytes().as_ref()); - Ok(()) - }; - - if *l == params.keys.params().i { - for nonce_commitments in &our_preprocess.commitments { - for commitments in nonce_commitments { - transcript(params.algorithm.transcript(), *commitments).unwrap(); - } - } - - B.insert(*l, (our_preprocess.commitments.clone(), C::F::zero())); - params.algorithm.process_addendum( - ¶ms.view, - *l, - &mut Cursor::new(our_preprocess.addendum.clone()), - )?; - } else { - let mut cursor = commitments.remove(l).unwrap(); - - let mut commitments = Vec::with_capacity(nonces.len()); - for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() { - commitments.push(Vec::with_capacity(nonce_generators.len())); - for _ in 0 .. nonce_generators.len() { - commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?); - transcript(params.algorithm.transcript(), commitments[n][commitments[n].len() - 1])?; - } - - if nonce_generators.len() >= 2 { - let mut transcript = nonce_transcript::(); - for de in 0 .. 2 { - DLEqProof::deserialize(&mut cursor) - .map_err(|_| FrostError::InvalidCommitment(*l))? - .verify( - &mut transcript, - nonce_generators, - &commitments[n].iter().map(|commitments| commitments[de]).collect::>(), - ) - .map_err(|_| FrostError::InvalidCommitment(*l))?; - } - } - } - - B.insert(*l, (commitments, C::F::zero())); - params.algorithm.process_addendum(¶ms.view, *l, &mut cursor)?; - } - } - - // Re-format into the FROST-expected rho transcript - let mut rho_transcript = A::Transcript::new(b"FROST_rho"); - rho_transcript.append_message(b"message", &C::hash_msg(msg)); - // This won't just be the commitments, yet the full existing transcript if used in an extended - // protocol - rho_transcript.append_message( - b"commitments", - &C::hash_commitments(params.algorithm.transcript().challenge(b"commitments").as_ref()), - ); - - // Include the offset, if one exists - // While this isn't part of the FROST-expected rho transcript, the offset being here coincides - // with another specification (despite the transcript format being distinct) - if let Some(offset) = params.keys.offset { - // Transcript as a point - // Under a coordinated model, the coordinater can be the only party to know the discrete log - // of the offset. This removes the ability for any signer to provide the discrete log, - // proving a key is related to another, slightly increasing security - // While further code edits would still be required for such a model (having the offset - // communicated as a point along with only a single party applying the offset), this means it - // wouldn't require a transcript change as well - rho_transcript.append_message(b"offset", (C::generator() * offset).to_bytes().as_ref()); - } - - // Generate the per-signer binding factors - for (l, commitments) in B.iter_mut() { - let mut rho_transcript = rho_transcript.clone(); - rho_transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref()); - commitments.1 = C::hash_binding_factor(rho_transcript.challenge(b"rho").as_ref()); - } - - // Merge the rho transcript back into the global one to ensure its advanced while committing to - // everything - params - .algorithm - .transcript() - .append_message(b"rho_transcript", rho_transcript.challenge(b"merge").as_ref()); - } - - #[allow(non_snake_case)] - let mut Rs = Vec::with_capacity(nonces.len()); - for n in 0 .. nonces.len() { - Rs.push(vec![C::G::identity(); nonces[n].len()]); - for g in 0 .. nonces[n].len() { - #[allow(non_snake_case)] - let mut D = C::G::identity(); - let mut statements = Vec::with_capacity(B.len()); - #[allow(non_snake_case)] - for (B, binding) in B.values() { - D += B[n][g][0]; - statements.push((*binding, B[n][g][1])); - } - Rs[n][g] = D + multiexp_vartime(&statements); - } - } - - let mut nonces = our_preprocess - .nonces - .iter() - .map(|nonces| nonces[0] + (nonces[1] * B[¶ms.keys.params().i()].1)) - .collect::>(); - - let share = params.algorithm.sign_share(¶ms.view, &Rs, &nonces, msg); - nonces.zeroize(); - - Ok((Package { B, Rs, share }, share.to_repr().as_ref().to_vec())) -} - -fn complete>( - sign_params: &Params, - sign: Package, - mut shares: HashMap, -) -> Result { - let params = sign_params.multisig_params(); - validate_map(&shares, &sign_params.view.included, params.i)?; - - let mut responses = HashMap::new(); - let mut sum = C::F::zero(); - for l in &sign_params.view.included { - let part = if *l == params.i { - sign.share - } else { - C::read_F(shares.get_mut(l).unwrap()).map_err(|_| FrostError::InvalidShare(*l))? - }; - sum += part; - responses.insert(*l, part); - } - - // Perform signature validation instead of individual share validation - // For the success route, which should be much more frequent, this should be faster - // It also acts as an integrity check of this library's signing function - let res = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum); - if let Some(res) = res { - return Ok(res); - } - - // Find out who misbehaved. It may be beneficial to randomly sort this to have detection be - // within n / 2 on average, and not gameable to n, though that should be minor - for l in &sign_params.view.included { - if !sign_params.algorithm.verify_share( - sign_params.view.verification_share(*l), - &sign.B[l] - .0 - .iter() - .map(|nonces| { - nonces.iter().map(|commitments| commitments[0] + (commitments[1] * sign.B[l].1)).collect() - }) - .collect::>(), - responses[l], - ) { - Err(FrostError::InvalidShare(*l))?; - } - } - - // If everyone has a valid share and there were enough participants, this should've worked - Err(FrostError::InternalError("everyone had a valid share yet the signature was still invalid")) -} /// Trait for the initial state machine of a two-round signing protocol. pub trait PreprocessMachine { + /// Preprocess message for this machine. + type Preprocess: Clone + PartialEq + Writable; + /// Signature produced by this machine. type Signature: Clone + PartialEq + fmt::Debug; - type SignMachine: SignMachine; + /// SignMachine this PreprocessMachine turns into. + type SignMachine: SignMachine; /// Perform the preprocessing round required in order to sign. - /// Returns a byte vector to be broadcast to all participants, over an authenticated channel. - fn preprocess(self, rng: &mut R) -> (Self::SignMachine, Vec); -} - -/// Trait for the second machine of a two-round signing protocol. -pub trait SignMachine { - type SignatureMachine: SignatureMachine; - - /// Sign a message. - /// Takes in the participants' preprocesses. Returns a byte vector representing a signature share - /// to be broadcast to all participants, over an authenticated channel. - fn sign( - self, - commitments: HashMap, - msg: &[u8], - ) -> Result<(Self::SignatureMachine, Vec), FrostError>; -} - -/// Trait for the final machine of a two-round signing protocol. -pub trait SignatureMachine { - /// Complete signing. - /// Takes in everyone elses' shares. Returns the signature. - fn complete(self, shares: HashMap) -> Result; + /// Returns a preprocess message to be broadcast to all participants, over an authenticated + /// channel. + fn preprocess(self, rng: &mut R) + -> (Self::SignMachine, Self::Preprocess); } /// State machine which manages signing for an arbitrary signature algorithm. @@ -409,23 +138,11 @@ pub struct AlgorithmMachine> { params: Params, } -/// Next step of the state machine for the signing process. -pub struct AlgorithmSignMachine> { - params: Params, - preprocess: PreprocessPackage, -} - -/// Final step of the state machine for the signing process. -pub struct AlgorithmSignatureMachine> { - params: Params, - sign: Package, -} - impl> AlgorithmMachine { /// Creates a new machine to generate a signature with the specified keys. pub fn new( algorithm: A, - keys: FrostKeys, + keys: ThresholdKeys, included: &[u16], ) -> Result, FrostError> { Ok(AlgorithmMachine { params: Params::new(algorithm, keys, included)? }) @@ -434,39 +151,274 @@ impl> AlgorithmMachine { #[cfg(any(test, feature = "tests"))] pub(crate) fn unsafe_override_preprocess( self, - preprocess: PreprocessPackage, + nonces: Vec>, + preprocess: Preprocess, ) -> AlgorithmSignMachine { - AlgorithmSignMachine { params: self.params, preprocess } + AlgorithmSignMachine { params: self.params, nonces, preprocess } } } impl> PreprocessMachine for AlgorithmMachine { + type Preprocess = Preprocess; type Signature = A::Signature; type SignMachine = AlgorithmSignMachine; - fn preprocess(self, rng: &mut R) -> (Self::SignMachine, Vec) { + fn preprocess( + self, + rng: &mut R, + ) -> (Self::SignMachine, Preprocess) { let mut params = self.params; - let (preprocess, serialized) = preprocess::(rng, &mut params); - (AlgorithmSignMachine { params, preprocess }, serialized) + + let (nonces, commitments) = Commitments::new::<_, A::Transcript>( + &mut *rng, + params.view().secret_share(), + ¶ms.algorithm.nonces(), + ); + let addendum = params.algorithm.preprocess_addendum(rng, ¶ms.view); + + let preprocess = Preprocess { commitments, addendum }; + (AlgorithmSignMachine { params, nonces, preprocess: preprocess.clone() }, preprocess) } } +/// Share of a signature produced via FROST. +#[derive(Clone, PartialEq, Eq)] +pub struct SignatureShare(C::F); +impl Writable for SignatureShare { + fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(self.0.to_repr().as_ref()) + } +} + +/// Trait for the second machine of a two-round signing protocol. +pub trait SignMachine { + /// Preprocess message for this machine. + type Preprocess: Clone + PartialEq + Writable; + /// SignatureShare message for this machine. + type SignatureShare: Clone + PartialEq + Writable; + /// SignatureMachine this SignMachine turns into. + type SignatureMachine: SignatureMachine; + + /// Read a Preprocess message. + fn read_preprocess(&self, reader: &mut R) -> io::Result; + + /// Sign a message. + /// Takes in the participants' preprocess messages. Returns the signature share to be broadcast + /// to all participants, over an authenticated channel. + fn sign( + self, + commitments: HashMap, + msg: &[u8], + ) -> Result<(Self::SignatureMachine, Self::SignatureShare), FrostError>; +} + +/// Next step of the state machine for the signing process. +pub struct AlgorithmSignMachine> { + params: Params, + pub(crate) nonces: Vec>, + pub(crate) preprocess: Preprocess, +} +impl> Zeroize for AlgorithmSignMachine { + fn zeroize(&mut self) { + self.nonces.zeroize() + } +} +impl> Drop for AlgorithmSignMachine { + fn drop(&mut self) { + self.zeroize() + } +} +impl> ZeroizeOnDrop for AlgorithmSignMachine {} + impl> SignMachine for AlgorithmSignMachine { + type Preprocess = Preprocess; + type SignatureShare = SignatureShare; type SignatureMachine = AlgorithmSignatureMachine; - fn sign( - self, - commitments: HashMap, - msg: &[u8], - ) -> Result<(Self::SignatureMachine, Vec), FrostError> { - let mut params = self.params; - let (sign, serialized) = sign_with_share(&mut params, self.preprocess, commitments, msg)?; - Ok((AlgorithmSignatureMachine { params, sign }, serialized)) + fn read_preprocess(&self, reader: &mut R) -> io::Result { + Ok(Preprocess { + commitments: Commitments::read::<_, A::Transcript>(reader, &self.params.algorithm.nonces())?, + addendum: self.params.algorithm.read_addendum(reader)?, + }) } + + fn sign( + mut self, + mut preprocesses: HashMap>, + msg: &[u8], + ) -> Result<(Self::SignatureMachine, SignatureShare), FrostError> { + let multisig_params = self.params.multisig_params(); + validate_map(&preprocesses, &self.params.view.included(), multisig_params.i())?; + + { + // Domain separate FROST + self.params.algorithm.transcript().domain_separate(b"FROST"); + } + + let nonces = self.params.algorithm.nonces(); + #[allow(non_snake_case)] + let mut B = BindingFactor(HashMap::::with_capacity(self.params.view.included().len())); + { + // Parse the preprocesses + for l in &self.params.view.included() { + { + self + .params + .algorithm + .transcript() + .append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref()); + } + + if *l == self.params.keys.params().i() { + let commitments = self.preprocess.commitments.clone(); + commitments.transcript(self.params.algorithm.transcript()); + + let addendum = self.preprocess.addendum.clone(); + { + let mut buf = vec![]; + addendum.write(&mut buf).unwrap(); + self.params.algorithm.transcript().append_message(b"addendum", &buf); + } + + B.insert(*l, commitments); + self.params.algorithm.process_addendum(&self.params.view, *l, addendum)?; + } else { + let preprocess = preprocesses.remove(l).unwrap(); + preprocess.commitments.transcript(self.params.algorithm.transcript()); + { + let mut buf = vec![]; + preprocess.addendum.write(&mut buf).unwrap(); + self.params.algorithm.transcript().append_message(b"addendum", &buf); + } + + B.insert(*l, preprocess.commitments); + self.params.algorithm.process_addendum(&self.params.view, *l, preprocess.addendum)?; + } + } + + // Re-format into the FROST-expected rho transcript + let mut rho_transcript = A::Transcript::new(b"FROST_rho"); + rho_transcript.append_message(b"message", &C::hash_msg(msg)); + rho_transcript.append_message( + b"preprocesses", + &C::hash_commitments( + self.params.algorithm.transcript().challenge(b"preprocesses").as_ref(), + ), + ); + + // Include the offset, if one exists + // While this isn't part of the FROST-expected rho transcript, the offset being here + // coincides with another specification (despite the transcript format still being distinct) + if let Some(offset) = self.params.keys.current_offset() { + // Transcript as a point + // Under a coordinated model, the coordinater can be the only party to know the discrete + // log of the offset. This removes the ability for any signer to provide the discrete log, + // proving a key is related to another, slightly increasing security + // While further code edits would still be required for such a model (having the offset + // communicated as a point along with only a single party applying the offset), this means + // it wouldn't require a transcript change as well + rho_transcript.append_message(b"offset", (C::generator() * offset).to_bytes().as_ref()); + } + + // Generate the per-signer binding factors + B.calculate_binding_factors(&mut rho_transcript); + + // Merge the rho transcript back into the global one to ensure its advanced, while + // simultaneously committing to everything + self + .params + .algorithm + .transcript() + .append_message(b"rho_transcript", rho_transcript.challenge(b"merge").as_ref()); + } + + #[allow(non_snake_case)] + let Rs = B.nonces(&nonces); + + let our_binding_factors = B.binding_factors(multisig_params.i()); + let mut nonces = self + .nonces + .iter() + .enumerate() + .map(|(n, nonces)| nonces.0[0] + (nonces.0[1] * our_binding_factors[n])) + .collect::>(); + self.nonces.zeroize(); + + let share = self.params.algorithm.sign_share(&self.params.view, &Rs, &nonces, msg); + nonces.zeroize(); + + Ok(( + AlgorithmSignatureMachine { params: self.params.clone(), B, Rs, share }, + SignatureShare(share), + )) + } +} + +/// Trait for the final machine of a two-round signing protocol. +pub trait SignatureMachine { + /// SignatureShare message for this machine. + type SignatureShare: Clone + PartialEq + Writable; + + /// Read a Signature Share message. + fn read_share(&self, reader: &mut R) -> io::Result; + + /// Complete signing. + /// Takes in everyone elses' shares. Returns the signature. + fn complete(self, shares: HashMap) -> Result; +} + +/// Final step of the state machine for the signing process. +#[allow(non_snake_case)] +pub struct AlgorithmSignatureMachine> { + params: Params, + B: BindingFactor, + Rs: Vec>, + share: C::F, } impl> SignatureMachine for AlgorithmSignatureMachine { - fn complete(self, shares: HashMap) -> Result { - complete(&self.params, self.sign, shares) + type SignatureShare = SignatureShare; + + fn read_share(&self, reader: &mut R) -> io::Result> { + Ok(SignatureShare(C::read_F(reader)?)) + } + + fn complete( + self, + mut shares: HashMap>, + ) -> Result { + let params = self.params.multisig_params(); + validate_map(&shares, &self.params.view.included(), params.i())?; + + let mut responses = HashMap::new(); + responses.insert(params.i(), self.share); + let mut sum = self.share; + for (l, share) in shares.drain() { + responses.insert(l, share.0); + sum += share.0; + } + + // Perform signature validation instead of individual share validation + // For the success route, which should be much more frequent, this should be faster + // It also acts as an integrity check of this library's signing function + if let Some(sig) = self.params.algorithm.verify(self.params.view.group_key(), &self.Rs, sum) { + return Ok(sig); + } + + // Find out who misbehaved. It may be beneficial to randomly sort this to have detection be + // within n / 2 on average, and not gameable to n, though that should be minor + // TODO + for l in &self.params.view.included() { + if !self.params.algorithm.verify_share( + self.params.view.verification_share(*l), + &self.B.bound(*l), + responses[l], + ) { + Err(FrostError::InvalidShare(*l))?; + } + } + + // If everyone has a valid share and there were enough participants, this should've worked + Err(FrostError::InternalError("everyone had a valid share yet the signature was still invalid")) } } diff --git a/crypto/frost/src/tests/curve.rs b/crypto/frost/src/tests/curve.rs index 56265f85..980f2435 100644 --- a/crypto/frost/src/tests/curve.rs +++ b/crypto/frost/src/tests/curve.rs @@ -1,23 +1,8 @@ -use std::io::Cursor; - use rand_core::{RngCore, CryptoRng}; use group::Group; -use crate::{Curve, FrostCore, tests::core_gen}; - -// Test generation of FROST keys -fn key_generation(rng: &mut R) { - // This alone verifies the verification shares and group key are agreed upon as expected - core_gen::<_, C>(rng); -} - -// Test serialization of generated keys -fn keys_serialization(rng: &mut R) { - for (_, keys) in core_gen::<_, C>(rng) { - assert_eq!(&FrostCore::::deserialize(&mut Cursor::new(keys.serialize())).unwrap(), &keys); - } -} +use crate::Curve; // Test successful multiexp, with enough pairs to trigger its variety of algorithms // Multiexp has its own tests, yet only against k256 and Ed25519 (which should be sufficient @@ -27,7 +12,7 @@ pub fn test_multiexp(rng: &mut R) { let mut sum = C::G::identity(); for _ in 0 .. 10 { for _ in 0 .. 100 { - pairs.push((C::random_F(&mut *rng), C::generator() * C::random_F(&mut *rng))); + pairs.push((C::random_nonzero_F(&mut *rng), C::generator() * C::random_nonzero_F(&mut *rng))); sum += pairs[pairs.len() - 1].1 * pairs[pairs.len() - 1].0; } assert_eq!(multiexp::multiexp(&pairs), sum); @@ -39,8 +24,4 @@ pub fn test_curve(rng: &mut R) { // TODO: Test the Curve functions themselves test_multiexp::<_, C>(rng); - - // Test FROST key generation and serialization of FrostCore works as expected - key_generation::<_, C>(rng); - keys_serialization::<_, C>(rng); } diff --git a/crypto/frost/src/tests/literal/dalek.rs b/crypto/frost/src/tests/literal/dalek.rs index 02cddec2..9a11c5d2 100644 --- a/crypto/frost/src/tests/literal/dalek.rs +++ b/crypto/frost/src/tests/literal/dalek.rs @@ -5,7 +5,7 @@ use crate::{ tests::vectors::{Vectors, test_with_vectors}, }; -#[cfg(any(test, feature = "ristretto"))] +#[cfg(feature = "ristretto")] #[test] fn ristretto_vectors() { test_with_vectors::<_, curve::Ristretto, curve::IetfRistrettoHram>( diff --git a/crypto/frost/src/tests/literal/ed448.rs b/crypto/frost/src/tests/literal/ed448.rs index df0c9ee7..dc5bf3ac 100644 --- a/crypto/frost/src/tests/literal/ed448.rs +++ b/crypto/frost/src/tests/literal/ed448.rs @@ -1,10 +1,11 @@ -use std::io::Cursor; - use rand_core::OsRng; +use ciphersuite::Ciphersuite; + +use schnorr::SchnorrSignature; + use crate::{ - curve::{Curve, Ed448, Ietf8032Ed448Hram, IetfEd448Hram}, - schnorr::{SchnorrSignature, verify}, + curve::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram}, tests::vectors::{Vectors, test_with_vectors}, }; @@ -13,38 +14,35 @@ fn ed448_8032_vector() { let context = hex::decode("666f6f").unwrap(); #[allow(non_snake_case)] - let A = Ed448::read_G(&mut Cursor::new( - hex::decode( + let A = Ed448::read_G::<&[u8]>( + &mut hex::decode( "43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c".to_owned() + "6798c0866aea01eb00742802b8438ea4cb82169c235160627b4c3a94" + "80", ) - .unwrap(), - )) + .unwrap() + .as_ref(), + ) .unwrap(); let msg = hex::decode("03").unwrap(); - let mut sig = Cursor::new( - hex::decode( - "d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b3".to_owned() + - "2a89f7d2151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea" + - "00" + - "0c85741de5c8da1144a6a1aba7f96de42505d7a7298524fda538fccb" + - "bb754f578c1cad10d54d0d5428407e85dcbc98a49155c13764e66c3c" + - "00", - ) - .unwrap(), - ); + let sig = hex::decode( + "d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b3".to_owned() + + "2a89f7d2151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea" + + "00" + + "0c85741de5c8da1144a6a1aba7f96de42505d7a7298524fda538fccb" + + "bb754f578c1cad10d54d0d5428407e85dcbc98a49155c13764e66c3c" + + "00", + ) + .unwrap(); #[allow(non_snake_case)] - let R = Ed448::read_G(&mut sig).unwrap(); - let s = Ed448::read_F(&mut sig).unwrap(); + let R = Ed448::read_G::<&[u8]>(&mut sig.as_ref()).unwrap(); + let s = Ed448::read_F::<&[u8]>(&mut &sig[57 ..]).unwrap(); - assert!(verify( - A, - Ietf8032Ed448Hram::hram(&context, &R, &A, &msg), - &SchnorrSignature:: { R, s } - )); + assert!( + SchnorrSignature:: { R, s }.verify(A, Ietf8032Ed448Hram::hram(&context, &R, &A, &msg)) + ); } #[test] diff --git a/crypto/frost/src/tests/literal/mod.rs b/crypto/frost/src/tests/literal/mod.rs index f825b95a..9650571f 100644 --- a/crypto/frost/src/tests/literal/mod.rs +++ b/crypto/frost/src/tests/literal/mod.rs @@ -1,6 +1,6 @@ -#[cfg(any(test, feature = "dalek"))] +#[cfg(any(feature = "ristretto", feature = "ed25519"))] mod dalek; -#[cfg(feature = "kp256")] +#[cfg(any(feature = "secp256k1", feature = "p256"))] mod kp256; #[cfg(feature = "ed448")] mod ed448; diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index 72f6ccc5..81bb9f8f 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -1,22 +1,17 @@ -use std::{io::Cursor, collections::HashMap}; +use std::collections::HashMap; use rand_core::{RngCore, CryptoRng}; -use group::ff::Field; +pub use dkg::tests::{key_gen, recover_key}; use crate::{ - Curve, FrostParams, FrostCore, FrostKeys, lagrange, - key_gen::KeyGenMachine, + Curve, ThresholdKeys, algorithm::Algorithm, - sign::{PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, + sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, }; /// Curve tests. pub mod curve; -/// Schnorr signature tests. -pub mod schnorr; -/// Promotion tests. -pub mod promote; /// Vectorized test suite to ensure consistency. pub mod vectors; @@ -39,85 +34,11 @@ pub fn clone_without( res } -/// Generate FROST keys (as FrostCore objects) for tests. -pub fn core_gen(rng: &mut R) -> HashMap> { - let mut machines = HashMap::new(); - let mut commitments = HashMap::new(); - for i in 1 ..= PARTICIPANTS { - let machine = KeyGenMachine::::new( - FrostParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(), - "FROST Test key_gen".to_string(), - ); - let (machine, these_commitments) = machine.generate_coefficients(rng); - machines.insert(i, machine); - commitments.insert(i, Cursor::new(these_commitments)); - } - - let mut secret_shares = HashMap::new(); - let mut machines = machines - .drain() - .map(|(l, machine)| { - let (machine, shares) = - machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); - secret_shares.insert(l, shares); - (l, machine) - }) - .collect::>(); - - let mut verification_shares = None; - let mut group_key = None; - machines - .drain() - .map(|(i, machine)| { - let mut our_secret_shares = HashMap::new(); - for (l, shares) in &secret_shares { - if i == *l { - continue; - } - our_secret_shares.insert(*l, Cursor::new(shares[&i].clone())); - } - let these_keys = machine.complete(rng, our_secret_shares).unwrap(); - - // Verify the verification_shares are agreed upon - if verification_shares.is_none() { - verification_shares = Some(these_keys.verification_shares()); - } - assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares()); - - // Verify the group keys are agreed upon - if group_key.is_none() { - group_key = Some(these_keys.group_key()); - } - assert_eq!(group_key.unwrap(), these_keys.group_key()); - - (i, these_keys) - }) - .collect::>() -} - -/// Generate FROST keys for tests. -pub fn key_gen(rng: &mut R) -> HashMap> { - core_gen(rng).drain().map(|(i, core)| (i, FrostKeys::new(core))).collect() -} - -/// Recover the secret from a collection of keys. -pub fn recover(keys: &HashMap>) -> C::F { - let first = keys.values().next().expect("no keys provided"); - assert!(keys.len() >= first.params().t().into(), "not enough keys provided"); - let included = keys.keys().cloned().collect::>(); - - let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| { - accum + (keys.secret_share() * lagrange::(*i, &included)) - }); - assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys"); - group_private -} - /// Spawn algorithm machines for a random selection of signers, each executing the given algorithm. pub fn algorithm_machines>( rng: &mut R, algorithm: A, - keys: &HashMap>, + keys: &HashMap>, ) -> HashMap> { let mut included = vec![]; while included.len() < usize::from(keys[&1].params().t()) { @@ -154,7 +75,11 @@ pub fn sign( .drain() .map(|(i, machine)| { let (machine, preprocess) = machine.preprocess(rng); - commitments.insert(i, Cursor::new(preprocess)); + commitments.insert(i, { + let mut buf = vec![]; + preprocess.write(&mut buf).unwrap(); + machine.read_preprocess::<&[u8]>(&mut buf.as_ref()).unwrap() + }); (i, machine) }) .collect::>(); @@ -164,7 +89,11 @@ pub fn sign( .drain() .map(|(i, machine)| { let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap(); - shares.insert(i, Cursor::new(share)); + shares.insert(i, { + let mut buf = vec![]; + share.write(&mut buf).unwrap(); + machine.read_share::<&[u8]>(&mut buf.as_ref()).unwrap() + }); (i, machine) }) .collect::>(); diff --git a/crypto/frost/src/tests/promote.rs b/crypto/frost/src/tests/promote.rs deleted file mode 100644 index c4984788..00000000 --- a/crypto/frost/src/tests/promote.rs +++ /dev/null @@ -1,121 +0,0 @@ -use std::{marker::PhantomData, collections::HashMap}; - -use rand_core::{RngCore, CryptoRng}; - -use zeroize::Zeroize; - -use group::Group; - -use crate::{ - Curve, // FrostKeys, - promote::{GeneratorPromotion /* CurvePromote */}, - tests::{clone_without, key_gen, schnorr::sign_core}, -}; - -/* -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -struct AltFunctions { - _curve: PhantomData, -} - -impl Curve for AltFunctions { - type F = C::F; - type G = C::G; - - const ID: &'static [u8] = b"alt_functions"; - - fn generator() -> Self::G { - C::generator() - } - - fn hash_msg(msg: &[u8]) -> Vec { - C::hash_msg(&[msg, b"alt"].concat()) - } - - fn hash_binding_factor(binding: &[u8]) -> Self::F { - C::hash_to_F(b"rho_alt", binding) - } - - fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { - C::hash_to_F(&[dst, b"alt"].concat(), msg) - } -} - -// Test promotion of FROST keys to another set of functions for interoperability -fn test_ciphersuite_promotion(rng: &mut R) { - let keys = key_gen::<_, C>(&mut *rng); - for keys in keys.values() { - let promoted: FrostKeys> = keys.clone().promote(); - // Verify equivalence via their serializations, minus the ID's length and ID itself - assert_eq!( - keys.serialize()[(4 + C::ID.len()) ..], - promoted.serialize()[(4 + AltFunctions::::ID.len()) ..] - ); - } -} -*/ - -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -struct AltGenerator { - _curve: PhantomData, -} - -impl Curve for AltGenerator { - type F = C::F; - type G = C::G; - - const ID: &'static [u8] = b"alt_generator"; - - fn generator() -> Self::G { - C::G::generator() * C::hash_to_F(b"FROST_tests", b"generator") - } - - fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec { - C::hash_to_vec(&[b"FROST_tests_alt", dst].concat(), data) - } - - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { - C::hash_to_F(&[b"FROST_tests_alt", dst].concat(), data) - } -} - -// Test promotion of FROST keys to another generator -fn test_generator_promotion(rng: &mut R) { - // A seeded RNG can theoretically generate for C1 and C2, verifying promotion that way? - // TODO - let keys = key_gen::<_, C>(&mut *rng); - - let mut promotions = HashMap::new(); - let mut proofs = HashMap::new(); - for (i, keys) in &keys { - let promotion = GeneratorPromotion::<_, AltGenerator>::promote(&mut *rng, keys.clone()); - promotions.insert(*i, promotion.0); - proofs.insert(*i, promotion.1); - } - - let mut new_keys = HashMap::new(); - let mut group_key = None; - let mut verification_shares = None; - for (i, promoting) in promotions.drain() { - let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); - assert_eq!(keys[&i].params(), promoted.params()); - assert_eq!(keys[&i].secret_share(), promoted.secret_share()); - - if group_key.is_none() { - group_key = Some(keys[&i].group_key()); - verification_shares = Some(keys[&i].verification_shares()); - } - assert_eq!(keys[&i].group_key(), group_key.unwrap()); - assert_eq!(&keys[&i].verification_shares(), verification_shares.as_ref().unwrap()); - - new_keys.insert(i, promoted); - } - - // Sign with the keys to ensure their integrity - sign_core(rng, &new_keys); -} - -pub fn test_promotion(rng: &mut R) { - // test_ciphersuite_promotion::<_, C>(rng); - test_generator_promotion::<_, C>(rng); -} diff --git a/crypto/frost/src/tests/schnorr.rs b/crypto/frost/src/tests/schnorr.rs deleted file mode 100644 index d25d066d..00000000 --- a/crypto/frost/src/tests/schnorr.rs +++ /dev/null @@ -1,131 +0,0 @@ -use std::{marker::PhantomData, collections::HashMap}; - -use rand_core::{RngCore, CryptoRng}; - -use group::{ff::Field, Group, GroupEncoding}; - -use crate::{ - Curve, FrostKeys, - schnorr::{self, SchnorrSignature}, - algorithm::{Hram, Schnorr}, - tests::{key_gen, algorithm_machines, sign as sign_test}, -}; - -pub(crate) fn core_sign(rng: &mut R) { - let private_key = C::random_F(&mut *rng); - let nonce = C::random_F(&mut *rng); - let challenge = C::random_F(rng); // Doesn't bother to craft an HRAm - assert!(schnorr::verify::( - C::generator() * private_key, - challenge, - &schnorr::sign(private_key, nonce, challenge) - )); -} - -// The above sign function verifies signing works -// This verifies invalid signatures don't pass, using zero signatures, which should effectively be -// random -pub(crate) fn core_verify(rng: &mut R) { - assert!(!schnorr::verify::( - C::generator() * C::random_F(&mut *rng), - C::random_F(rng), - &SchnorrSignature { R: C::G::identity(), s: C::F::zero() } - )); -} - -pub(crate) fn core_batch_verify(rng: &mut R) { - // Create 5 signatures - let mut keys = vec![]; - let mut challenges = vec![]; - let mut sigs = vec![]; - for i in 0 .. 5 { - keys.push(C::random_F(&mut *rng)); - challenges.push(C::random_F(&mut *rng)); - sigs.push(schnorr::sign::(keys[i], C::random_F(&mut *rng), challenges[i])); - } - - // Batch verify - let triplets = (0 .. 5) - .map(|i| (u16::try_from(i + 1).unwrap(), C::generator() * keys[i], challenges[i], sigs[i])) - .collect::>(); - schnorr::batch_verify(rng, &triplets).unwrap(); - - // Shift 1 from s from one to another and verify it fails - // This test will fail if unique factors aren't used per-signature, hence its inclusion - { - let mut triplets = triplets.clone(); - triplets[1].3.s += C::F::one(); - triplets[2].3.s -= C::F::one(); - if let Err(blame) = schnorr::batch_verify(rng, &triplets) { - assert_eq!(blame, 2); - } else { - panic!("batch verification considered a malleated signature valid"); - } - } - - // Make sure a completely invalid signature fails when included - for i in 0 .. 5 { - let mut triplets = triplets.clone(); - triplets[i].3.s = C::random_F(&mut *rng); - if let Err(blame) = schnorr::batch_verify(rng, &triplets) { - assert_eq!(blame, u16::try_from(i + 1).unwrap()); - } else { - panic!("batch verification considered an invalid signature valid"); - } - } -} - -pub(crate) fn sign_core( - rng: &mut R, - keys: &HashMap>, -) { - const MESSAGE: &[u8] = b"Hello, World!"; - - let machines = algorithm_machines(rng, Schnorr::>::new(), keys); - let sig = sign_test(&mut *rng, machines, MESSAGE); - - let group_key = keys[&1].group_key(); - assert!(schnorr::verify(group_key, TestHram::::hram(&sig.R, &group_key, MESSAGE), &sig)); -} - -#[derive(Clone)] -pub struct TestHram { - _curve: PhantomData, -} -impl Hram for TestHram { - #[allow(non_snake_case)] - fn hram(R: &C::G, A: &C::G, m: &[u8]) -> C::F { - C::hash_to_F(b"challenge", &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat()) - } -} - -fn sign(rng: &mut R) { - let keys = key_gen::<_, C>(&mut *rng); - sign_core(rng, &keys); -} - -fn sign_with_offset(rng: &mut R) { - let mut keys = key_gen::<_, C>(&mut *rng); - let group_key = keys[&1].group_key(); - - let offset = C::hash_to_F(b"FROST Test sign_with_offset", b"offset"); - for i in 1 ..= u16::try_from(keys.len()).unwrap() { - keys.insert(i, keys[&i].offset(offset)); - } - let offset_key = group_key + (C::generator() * offset); - assert_eq!(keys[&1].group_key(), offset_key); - - sign_core(rng, &keys); -} - -pub fn test_schnorr(rng: &mut R) { - // Test Schnorr signatures work as expected - // This is a bit unnecessary, as they should for any valid curve, yet this establishes sanity - core_sign::<_, C>(rng); - core_verify::<_, C>(rng); - core_batch_verify::<_, C>(rng); - - // Test Schnorr signatures under FROST - sign::<_, C>(rng); - sign_with_offset::<_, C>(rng); -} diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 747df6a1..4a64b4ec 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -1,4 +1,4 @@ -use std::{io::Cursor, collections::HashMap}; +use std::collections::HashMap; #[cfg(test)] use std::str::FromStr; @@ -6,14 +6,17 @@ use rand_core::{RngCore, CryptoRng}; use group::{ff::PrimeField, GroupEncoding}; +use dkg::tests::{test_ciphersuite as test_dkg}; + use crate::{ curve::Curve, - FrostCore, FrostKeys, + ThresholdCore, ThresholdKeys, algorithm::{Schnorr, Hram}, - sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine}, - tests::{ - clone_without, curve::test_curve, schnorr::test_schnorr, promote::test_promotion, recover, + sign::{ + Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess, SignMachine, + SignatureMachine, AlgorithmMachine, }, + tests::{clone_without, recover_key, curve::test_curve}, }; pub struct Vectors { @@ -73,17 +76,18 @@ impl From for Vectors { } } -// Load these vectors into FrostKeys using a custom serialization it'll deserialize -fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap> { +// Load these vectors into ThresholdKeys using a custom serialization it'll deserialize +fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap> { let shares = vectors .shares .iter() - .map(|secret| C::read_F(&mut Cursor::new(hex::decode(secret).unwrap())).unwrap()) + .map(|secret| C::read_F::<&[u8]>(&mut hex::decode(secret).unwrap().as_ref()).unwrap()) .collect::>(); let verification_shares = shares.iter().map(|secret| C::generator() * secret).collect::>(); let mut keys = HashMap::new(); for i in 1 ..= u16::try_from(shares.len()).unwrap() { + // Manually re-implement the serialization for ThresholdCore to import this data let mut serialized = vec![]; serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes()); serialized.extend(C::ID); @@ -95,13 +99,13 @@ fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap::deserialize(&mut Cursor::new(serialized)).unwrap(); + let these_keys = ThresholdCore::::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap(); assert_eq!(these_keys.params().t(), vectors.threshold); assert_eq!(usize::from(these_keys.params().n()), shares.len()); assert_eq!(these_keys.params().i(), i); assert_eq!(these_keys.secret_share(), shares[usize::from(i - 1)]); assert_eq!(hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key); - keys.insert(i, FrostKeys::new(these_keys)); + keys.insert(i, ThresholdKeys::new(these_keys)); } keys @@ -113,15 +117,18 @@ pub fn test_with_vectors>( ) { // Do basic tests before trying the vectors test_curve::<_, C>(&mut *rng); - test_schnorr::<_, C>(&mut *rng); - test_promotion::<_, C>(rng); + + // Test the DKG + test_dkg::<_, C>(&mut *rng); // Test against the vectors let keys = vectors_to_multisig_keys::(&vectors); - let group_key = C::read_G(&mut Cursor::new(hex::decode(&vectors.group_key).unwrap())).unwrap(); - let secret = C::read_F(&mut Cursor::new(hex::decode(&vectors.group_secret).unwrap())).unwrap(); + let group_key = + ::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap(); + let secret = + C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap(); assert_eq!(C::generator() * secret, group_key); - assert_eq!(recover(&keys), secret); + assert_eq!(recover_key(&keys), secret); let mut machines = vec![]; for i in &vectors.included { @@ -142,27 +149,36 @@ pub fn test_with_vectors>( .drain(..) .map(|(i, machine)| { let nonces = [ - C::read_F(&mut Cursor::new(hex::decode(&vectors.nonces[c][0]).unwrap())).unwrap(), - C::read_F(&mut Cursor::new(hex::decode(&vectors.nonces[c][1]).unwrap())).unwrap(), + C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][0]).unwrap().as_ref()).unwrap(), + C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][1]).unwrap().as_ref()).unwrap(), ]; c += 1; - let these_commitments = vec![[C::generator() * nonces[0], C::generator() * nonces[1]]]; - let machine = machine.unsafe_override_preprocess(PreprocessPackage { - nonces: vec![nonces], - commitments: vec![these_commitments.clone()], - addendum: vec![], - }); + let these_commitments = [C::generator() * nonces[0], C::generator() * nonces[1]]; + let machine = machine.unsafe_override_preprocess( + vec![Nonce(nonces)], + Preprocess { + commitments: Commitments { + nonces: vec![NonceCommitments { + generators: vec![GeneratorCommitments(these_commitments)], + dleqs: None, + }], + }, + addendum: (), + }, + ); commitments.insert( *i, - Cursor::new( - [ - these_commitments[0][0].to_bytes().as_ref(), - these_commitments[0][1].to_bytes().as_ref(), - ] - .concat() - .to_vec(), - ), + machine + .read_preprocess::<&[u8]>( + &mut [ + these_commitments[0].to_bytes().as_ref(), + these_commitments[1].to_bytes().as_ref(), + ] + .concat() + .as_ref(), + ) + .unwrap(), ); (i, machine) }) @@ -176,10 +192,15 @@ pub fn test_with_vectors>( let (machine, share) = machine.sign(clone_without(&commitments, i), &hex::decode(&vectors.msg).unwrap()).unwrap(); + let share = { + let mut buf = vec![]; + share.write(&mut buf).unwrap(); + buf + }; assert_eq!(share, hex::decode(&vectors.sig_shares[c]).unwrap()); c += 1; - shares.insert(*i, Cursor::new(share)); + shares.insert(*i, machine.read_share::<&[u8]>(&mut share.as_ref()).unwrap()); (i, machine) }) .collect::>(); diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml new file mode 100644 index 00000000..83c862e6 --- /dev/null +++ b/crypto/schnorr/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "schnorr-signatures" +version = "0.1.0" +description = "Minimal Schnorr signatures crate hosting common code" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr" +authors = ["Luke Parker "] +keywords = ["schnorr", "ff", "group"] +edition = "2021" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[dependencies] +rand_core = "0.6" + +zeroize = { version = "1.5", features = ["zeroize_derive"] } + +group = "0.12" +ciphersuite = { path = "../ciphersuite", version = "0.1" } + +multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } + +[dev-dependencies] +dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" } +ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["ristretto"] } diff --git a/crypto/schnorr/LICENSE b/crypto/schnorr/LICENSE new file mode 100644 index 00000000..c0617e57 --- /dev/null +++ b/crypto/schnorr/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2022 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/schnorr/src/lib.rs b/crypto/schnorr/src/lib.rs new file mode 100644 index 00000000..b9a1dad8 --- /dev/null +++ b/crypto/schnorr/src/lib.rs @@ -0,0 +1,86 @@ +use std::io::{self, Read, Write}; + +use rand_core::{RngCore, CryptoRng}; + +use zeroize::Zeroize; + +use group::{ + ff::{Field, PrimeField}, + GroupEncoding, +}; + +use multiexp::BatchVerifier; + +use ciphersuite::Ciphersuite; + +#[cfg(test)] +mod tests; + +/// A Schnorr signature of the form (R, s) where s = r + cx. +#[allow(non_snake_case)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct SchnorrSignature { + pub R: C::G, + pub s: C::F, +} + +impl SchnorrSignature { + /// Read a SchnorrSignature from something implementing Read. + pub fn read(reader: &mut R) -> io::Result { + Ok(SchnorrSignature { R: C::read_G(reader)?, s: C::read_F(reader)? }) + } + + /// Write a SchnorrSignature to something implementing Read. + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(self.R.to_bytes().as_ref())?; + writer.write_all(self.s.to_repr().as_ref()) + } + + /// Serialize a SchnorrSignature, returning a Vec. + pub fn serialize(&self) -> Vec { + let mut buf = vec![]; + self.write(&mut buf).unwrap(); + buf + } + + /// Sign a Schnorr signature with the given nonce for the specified challenge. + pub fn sign(mut private_key: C::F, mut nonce: C::F, challenge: C::F) -> SchnorrSignature { + let res = SchnorrSignature { R: C::generator() * nonce, s: nonce + (private_key * challenge) }; + private_key.zeroize(); + nonce.zeroize(); + res + } + + /// Verify a Schnorr signature for the given key with the specified challenge. + #[must_use] + pub fn verify(&self, public_key: C::G, challenge: C::F) -> bool { + (C::generator() * self.s) == (self.R + (public_key * challenge)) + } + + /// Queue a signature for batch verification. + pub fn batch_verify( + &self, + rng: &mut R, + batch: &mut BatchVerifier, + id: I, + public_key: C::G, + challenge: C::F, + ) { + // s = r + ca + // sG == R + cA + // R + cA - sG == 0 + + batch.queue( + rng, + id, + [ + // R + (C::F::one(), self.R), + // cA + (challenge, public_key), + // -sG + (-self.s, C::generator()), + ], + ); + } +} diff --git a/crypto/schnorr/src/tests.rs b/crypto/schnorr/src/tests.rs new file mode 100644 index 00000000..7aa1f08b --- /dev/null +++ b/crypto/schnorr/src/tests.rs @@ -0,0 +1,72 @@ +use rand_core::OsRng; + +use group::{ff::Field, Group}; + +use multiexp::BatchVerifier; + +use ciphersuite::{Ciphersuite, Ristretto}; +use crate::SchnorrSignature; + +pub(crate) fn core_sign() { + let private_key = C::random_nonzero_F(&mut OsRng); + let nonce = C::random_nonzero_F(&mut OsRng); + let challenge = C::random_nonzero_F(&mut OsRng); // Doesn't bother to craft an HRAm + assert!(SchnorrSignature::::sign(private_key, nonce, challenge) + .verify(C::generator() * private_key, challenge)); +} + +// The above sign function verifies signing works +// This verifies invalid signatures don't pass, using zero signatures, which should effectively be +// random +pub(crate) fn core_verify() { + assert!(!SchnorrSignature:: { R: C::G::identity(), s: C::F::zero() } + .verify(C::generator() * C::random_nonzero_F(&mut OsRng), C::random_nonzero_F(&mut OsRng))); +} + +pub(crate) fn core_batch_verify() { + // Create 5 signatures + let mut keys = vec![]; + let mut challenges = vec![]; + let mut sigs = vec![]; + for i in 0 .. 5 { + keys.push(C::random_nonzero_F(&mut OsRng)); + challenges.push(C::random_nonzero_F(&mut OsRng)); + sigs.push(SchnorrSignature::::sign(keys[i], C::random_nonzero_F(&mut OsRng), challenges[i])); + } + + // Batch verify + { + let mut batch = BatchVerifier::new(5); + for (i, sig) in sigs.iter().enumerate() { + sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i], challenges[i]); + } + batch.verify_with_vartime_blame().unwrap(); + } + + // Shift 1 from s from one to another and verify it fails + // This test will fail if unique factors aren't used per-signature, hence its inclusion + { + let mut batch = BatchVerifier::new(5); + for (i, mut sig) in sigs.clone().drain(..).enumerate() { + if i == 1 { + sig.s += C::F::one(); + } + if i == 2 { + sig.s -= C::F::one(); + } + sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i], challenges[i]); + } + if let Err(blame) = batch.verify_with_vartime_blame() { + assert!((blame == 1) || (blame == 2)); + } else { + panic!("Batch verification considered malleated signatures valid"); + } + } +} + +#[test] +fn test() { + core_sign::(); + core_verify::(); + core_batch_verify::(); +} diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml index 78e96cae..ff371db3 100644 --- a/deploy/docker-compose.yml +++ b/deploy/docker-compose.yml @@ -38,6 +38,8 @@ services: args: TAG: serai entrypoint: /scripts/entry-dev.sh + volumes: + - "./serai/scripts:/scripts" serai-base: <<: *serai_defaults diff --git a/deploy/serai/Dockerfile b/deploy/serai/Dockerfile index 87ea5998..b46543cd 100644 --- a/deploy/serai/Dockerfile +++ b/deploy/serai/Dockerfile @@ -20,12 +20,17 @@ RUN pip3 install solc-select==0.2.1 RUN solc-select install 0.8.16 RUN solc-select use 0.8.16 -# Build it -RUN cargo build --release +# Mount cargo and serai cache for Cache & Build +RUN --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/serai/target/release/build \ + --mount=type=cache,target=/serai/target/release/deps \ + --mount=type=cache,target=/serai/target/release/.fingerprint \ + --mount=type=cache,target=/serai/target/release/incremental \ + --mount=type=cache,target=/serai/target/release/wbuild \ + --mount=type=cache,target=/serai/target/release/lib* \ + cargo build --release -# Mount for Cache -RUN --mount=type=cache,target=/usr/local/cargo/registry \ - --mount=type=cache,target=/serai/target # Prepare Image FROM ubuntu:latest as image @@ -34,9 +39,8 @@ LABEL description="STAGE 2: Copy and Run" WORKDIR /home/serai # Copy necessary files to run node -COPY --from=builder /serai/target/release/* /bin/ +COPY --from=builder /serai/target/release/ /bin/ COPY --from=builder /serai/AGPL-3.0 . -COPY deploy/serai/scripts /scripts # Run node EXPOSE 30333 9615 9933 9944 diff --git a/processor/src/coin/mod.rs b/processor/src/coin/mod.rs index 6420faff..bc39bbfd 100644 --- a/processor/src/coin/mod.rs +++ b/processor/src/coin/mod.rs @@ -4,7 +4,11 @@ use async_trait::async_trait; use thiserror::Error; use transcript::RecommendedTranscript; -use frost::{curve::Curve, FrostKeys, sign::PreprocessMachine}; +use frost::{ + curve::{Ciphersuite, Curve}, + ThresholdKeys, + sign::PreprocessMachine, +}; pub mod monero; pub use self::monero::Monero; @@ -45,14 +49,14 @@ pub trait Coin { const MAX_OUTPUTS: usize; // TODO: Decide if this includes change or not // Doesn't have to take self, enables some level of caching which is pleasant - fn address(&self, key: ::G) -> Self::Address; + fn address(&self, key: ::G) -> Self::Address; async fn get_latest_block_number(&self) -> Result; async fn get_block(&self, number: usize) -> Result; async fn get_outputs( &self, block: &Self::Block, - key: ::G, + key: ::G, ) -> Result, CoinError>; // TODO: Remove @@ -60,7 +64,7 @@ pub trait Coin { async fn prepare_send( &self, - keys: FrostKeys, + keys: ThresholdKeys, transcript: RecommendedTranscript, block_number: usize, inputs: Vec, diff --git a/processor/src/coin/monero.rs b/processor/src/coin/monero.rs index 6643c466..ffa0caf8 100644 --- a/processor/src/coin/monero.rs +++ b/processor/src/coin/monero.rs @@ -4,7 +4,7 @@ use curve25519_dalek::scalar::Scalar; use dalek_ff_group as dfg; use transcript::RecommendedTranscript; -use frost::{curve::Ed25519, FrostKeys}; +use frost::{curve::Ed25519, ThresholdKeys}; use monero_serai::{ transaction::Transaction, @@ -55,7 +55,7 @@ impl OutputTrait for Output { #[derive(Debug)] pub struct SignableTransaction { - keys: FrostKeys, + keys: ThresholdKeys, transcript: RecommendedTranscript, // Monero height, defined as the length of the chain height: usize, @@ -157,7 +157,7 @@ impl Coin for Monero { async fn prepare_send( &self, - keys: FrostKeys, + keys: ThresholdKeys, transcript: RecommendedTranscript, block_number: usize, mut inputs: Vec, diff --git a/processor/src/lib.rs b/processor/src/lib.rs index 45583dc2..24d8c0d2 100644 --- a/processor/src/lib.rs +++ b/processor/src/lib.rs @@ -1,9 +1,9 @@ -use std::{marker::Send, io::Cursor, collections::HashMap}; +use std::{marker::Send, collections::HashMap}; use async_trait::async_trait; use thiserror::Error; -use frost::{curve::Curve, FrostError}; +use frost::{curve::Ciphersuite, FrostError}; mod coin; use coin::{CoinError, Coin}; @@ -18,7 +18,7 @@ pub enum NetworkError {} #[async_trait] pub trait Network: Send { - async fn round(&mut self, data: Vec) -> Result>>, NetworkError>; + async fn round(&mut self, data: Vec) -> Result>, NetworkError>; } #[derive(Clone, Error, Debug)] @@ -35,6 +35,9 @@ pub enum SignError { // Doesn't consider the current group key to increase the simplicity of verifying Serai's status // Takes an index, k, to support protocols which use multiple secondary keys // Presumably a view key -pub(crate) fn additional_key(k: u64) -> ::F { - C::Curve::hash_to_F(b"Serai DEX Additional Key", &[C::ID, &k.to_le_bytes()].concat()) +pub(crate) fn additional_key(k: u64) -> ::F { + ::hash_to_F( + b"Serai DEX Additional Key", + &[C::ID, &k.to_le_bytes()].concat(), + ) } diff --git a/processor/src/tests/mod.rs b/processor/src/tests/mod.rs index 27cd8f31..11520a63 100644 --- a/processor/src/tests/mod.rs +++ b/processor/src/tests/mod.rs @@ -1,5 +1,4 @@ use std::{ - io::Cursor, sync::{Arc, RwLock}, collections::HashMap, }; @@ -19,7 +18,7 @@ struct LocalNetwork { i: u16, size: u16, round: usize, - rounds: Arc>>>>>, + rounds: Arc>>>>, } impl LocalNetwork { @@ -35,13 +34,13 @@ impl LocalNetwork { #[async_trait] impl Network for LocalNetwork { - async fn round(&mut self, data: Vec) -> Result>>, NetworkError> { + async fn round(&mut self, data: Vec) -> Result>, NetworkError> { { let mut rounds = self.rounds.write().unwrap(); if rounds.len() == self.round { rounds.push(HashMap::new()); } - rounds[self.round].insert(self.i, Cursor::new(data)); + rounds[self.round].insert(self.i, data); } while { diff --git a/processor/src/wallet.rs b/processor/src/wallet.rs index b2496e8a..82fadc8a 100644 --- a/processor/src/wallet.rs +++ b/processor/src/wallet.rs @@ -6,9 +6,9 @@ use group::GroupEncoding; use transcript::{Transcript, RecommendedTranscript}; use frost::{ - curve::Curve, - FrostKeys, - sign::{PreprocessMachine, SignMachine, SignatureMachine}, + curve::{Ciphersuite, Curve}, + FrostError, ThresholdKeys, + sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine}, }; use crate::{ @@ -17,12 +17,12 @@ use crate::{ }; pub struct WalletKeys { - keys: FrostKeys, + keys: ThresholdKeys, creation_block: usize, } impl WalletKeys { - pub fn new(keys: FrostKeys, creation_block: usize) -> WalletKeys { + pub fn new(keys: ThresholdKeys, creation_block: usize) -> WalletKeys { WalletKeys { keys, creation_block } } @@ -34,13 +34,13 @@ impl WalletKeys { // system, there are potentially other benefits to binding this to a specific group key // It's no longer possible to influence group key gen to key cancel without breaking the hash // function as well, although that degree of influence means key gen is broken already - fn bind(&self, chain: &[u8]) -> FrostKeys { + fn bind(&self, chain: &[u8]) -> ThresholdKeys { const DST: &[u8] = b"Serai Processor Wallet Chain Bind"; let mut transcript = RecommendedTranscript::new(DST); transcript.append_message(b"chain", chain); transcript.append_message(b"curve", C::ID); transcript.append_message(b"group_key", self.keys.group_key().to_bytes().as_ref()); - self.keys.offset(C::hash_to_F(DST, &transcript.challenge(b"offset"))) + self.keys.offset(::hash_to_F(DST, &transcript.challenge(b"offset"))) } } @@ -203,8 +203,8 @@ fn select_inputs_outputs( pub struct Wallet { db: D, coin: C, - keys: Vec<(FrostKeys, Vec)>, - pending: Vec<(usize, FrostKeys)>, + keys: Vec<(ThresholdKeys, Vec)>, + pending: Vec<(usize, ThresholdKeys)>, } impl Wallet { @@ -343,10 +343,36 @@ impl Wallet { self.coin.attempt_send(prepared, &included).await.map_err(SignError::CoinError)?; let (attempt, commitments) = attempt.preprocess(&mut OsRng); - let commitments = network.round(commitments).await.map_err(SignError::NetworkError)?; + let commitments = network + .round(commitments.serialize()) + .await + .map_err(SignError::NetworkError)? + .drain() + .map(|(validator, preprocess)| { + Ok(( + validator, + attempt + .read_preprocess::<&[u8]>(&mut preprocess.as_ref()) + .map_err(|_| SignError::FrostError(FrostError::InvalidPreprocess(validator)))?, + )) + }) + .collect::, _>>()?; let (attempt, share) = attempt.sign(commitments, b"").map_err(SignError::FrostError)?; - let shares = network.round(share).await.map_err(SignError::NetworkError)?; + let shares = network + .round(share.serialize()) + .await + .map_err(SignError::NetworkError)? + .drain() + .map(|(validator, share)| { + Ok(( + validator, + attempt + .read_share::<&[u8]>(&mut share.as_ref()) + .map_err(|_| SignError::FrostError(FrostError::InvalidShare(validator)))?, + )) + }) + .collect::, _>>()?; let tx = attempt.complete(shares).map_err(SignError::FrostError)?;