From 9f84adf8b36308823c62d95e5bfc35f81287020d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 01:24:40 -0400 Subject: [PATCH] Smash dkg into dkg, dkg-[recovery, promote, musig, pedpop] promote and pedpop require dleq, which don't support no-std. All three should be moved outside the Serai repository, per #597, as none are planned for use and worth covering under our BBP. --- .github/workflows/crypto-tests.yml | 4 + Cargo.lock | 54 +- Cargo.toml | 4 + crypto/dkg/Cargo.toml | 30 +- crypto/dkg/LICENSE | 2 +- crypto/dkg/README.md | 22 +- crypto/dkg/musig/Cargo.toml | 49 + crypto/dkg/musig/LICENSE | 21 + crypto/dkg/musig/README.md | 12 + crypto/dkg/musig/src/lib.rs | 162 +++ crypto/dkg/musig/src/tests.rs | 70 + crypto/dkg/pedpop/Cargo.toml | 37 + crypto/dkg/pedpop/LICENSE | 21 + crypto/dkg/pedpop/README.md | 12 + crypto/dkg/{ => pedpop}/src/encryption.rs | 4 +- .../dkg/{src/pedpop.rs => pedpop/src/lib.rs} | 128 +- crypto/dkg/pedpop/src/tests.rs | 345 +++++ crypto/dkg/promote/Cargo.toml | 34 + crypto/dkg/promote/LICENSE | 21 + crypto/dkg/promote/README.md | 12 + .../{src/promote.rs => promote/src/lib.rs} | 99 +- crypto/dkg/promote/src/tests.rs | 113 ++ crypto/dkg/recovery/Cargo.toml | 34 + crypto/dkg/recovery/LICENSE | 21 + crypto/dkg/recovery/README.md | 14 + crypto/dkg/recovery/src/lib.rs | 85 ++ crypto/dkg/src/lib.rs | 1163 +++++++++-------- crypto/dkg/src/musig.rs | 129 -- crypto/dkg/src/tests/mod.rs | 102 -- crypto/dkg/src/tests/musig.rs | 61 - crypto/dkg/src/tests/pedpop.rs | 331 ----- crypto/dkg/src/tests/promote.rs | 66 - crypto/frost/Cargo.toml | 6 +- tests/no-std/Cargo.toml | 2 + tests/no-std/src/lib.rs | 2 + 35 files changed, 1910 insertions(+), 1362 deletions(-) create mode 100644 crypto/dkg/musig/Cargo.toml create mode 100644 crypto/dkg/musig/LICENSE create mode 100644 crypto/dkg/musig/README.md create mode 100644 crypto/dkg/musig/src/lib.rs create mode 100644 crypto/dkg/musig/src/tests.rs create mode 100644 crypto/dkg/pedpop/Cargo.toml create mode 100644 crypto/dkg/pedpop/LICENSE create mode 100644 crypto/dkg/pedpop/README.md rename crypto/dkg/{ => pedpop}/src/encryption.rs (99%) rename crypto/dkg/{src/pedpop.rs => pedpop/src/lib.rs} (86%) create mode 100644 crypto/dkg/pedpop/src/tests.rs create mode 100644 crypto/dkg/promote/Cargo.toml create mode 100644 crypto/dkg/promote/LICENSE create mode 100644 crypto/dkg/promote/README.md rename crypto/dkg/{src/promote.rs => promote/src/lib.rs} (54%) create mode 100644 crypto/dkg/promote/src/tests.rs create mode 100644 crypto/dkg/recovery/Cargo.toml create mode 100644 crypto/dkg/recovery/LICENSE create mode 100644 crypto/dkg/recovery/README.md create mode 100644 crypto/dkg/recovery/src/lib.rs delete mode 100644 crypto/dkg/src/musig.rs delete mode 100644 crypto/dkg/src/tests/mod.rs delete mode 100644 crypto/dkg/src/tests/musig.rs delete mode 100644 crypto/dkg/src/tests/pedpop.rs delete mode 100644 crypto/dkg/src/tests/promote.rs diff --git a/.github/workflows/crypto-tests.yml b/.github/workflows/crypto-tests.yml index d9d1df08..2e853e71 100644 --- a/.github/workflows/crypto-tests.yml +++ b/.github/workflows/crypto-tests.yml @@ -36,5 +36,9 @@ jobs: -p schnorr-signatures \ -p dleq \ -p dkg \ + -p dkg-recovery \ + -p dkg-promote \ + -p dkg-musig \ + -p dkg-pedpop \ -p modular-frost \ -p frost-schnorrkel diff --git a/Cargo.lock b/Cargo.lock index 95a91959..e4009f87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2211,17 +2211,65 @@ dependencies = [ [[package]] name = "dkg" -version = "0.5.1" +version = "0.6.0" dependencies = [ "borsh", + "ciphersuite", + "std-shims", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-musig" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", + "dkg-recovery", + "multiexp", + "rand_core", + "std-shims", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-pedpop" +version = "0.6.0" +dependencies = [ "chacha20", "ciphersuite", + "dkg", "dleq", "flexible-transcript", "multiexp", "rand_core", "schnorr-signatures", - "std-shims", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-promote" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", + "dkg-recovery", + "dleq", + "flexible-transcript", + "rand_core", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-recovery" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", "thiserror 2.0.14", "zeroize", ] @@ -8324,6 +8372,8 @@ dependencies = [ "ciphersuite", "dalek-ff-group", "dkg", + "dkg-musig", + "dkg-recovery", "dleq", "flexible-transcript", "minimal-ed448", diff --git a/Cargo.toml b/Cargo.toml index 6361422d..d1b1862e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,10 @@ members = [ "crypto/schnorr", "crypto/dleq", "crypto/dkg", + "crypto/dkg/recovery", + "crypto/dkg/promote", + "crypto/dkg/musig", + "crypto/dkg/pedpop", "crypto/frost", "crypto/schnorrkel", diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index b144cb26..51dc9162 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dkg" -version = "0.5.1" +version = "0.6.0" description = "Distributed key generation over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" @@ -17,50 +17,28 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] +zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive", "alloc"] } + thiserror = { version = "2", default-features = false } -rand_core = { version = "0.6", default-features = false } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - std-shims = { version = "0.1", path = "../../common/std-shims", default-features = false } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true } -transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false, features = ["recommended"] } -chacha20 = { version = "0.9", default-features = false, features = ["zeroize"] } - -ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false } -multiexp = { path = "../multiexp", version = "0.4", default-features = false } - -schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false } -dleq = { path = "../dleq", version = "^0.4.1", default-features = false } +ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } [dev-dependencies] -rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] } [features] std = [ "thiserror/std", - "rand_core/std", - "std-shims/std", "borsh?/std", - "transcript/std", - "chacha20/std", - "ciphersuite/std", - "multiexp/std", - "multiexp/batch", - - "schnorr/std", - "dleq/std", - "dleq/serialize" ] borsh = ["dep:borsh"] -tests = ["rand_core/getrandom"] default = ["std"] diff --git a/crypto/dkg/LICENSE b/crypto/dkg/LICENSE index be67c32f..6f7adff3 100644 --- a/crypto/dkg/LICENSE +++ b/crypto/dkg/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2021-2023 Luke Parker +Copyright (c) 2021-2025 Luke Parker Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/crypto/dkg/README.md b/crypto/dkg/README.md index 27e3412a..eaad6ed5 100644 --- a/crypto/dkg/README.md +++ b/crypto/dkg/README.md @@ -1,16 +1,14 @@ # Distributed Key Generation -A collection of implementations of various distributed key generation protocols. +A crate implementing a type for keys, presumably the result of a distributed key generation +protocol, and utilities from there. -All included protocols resolve into the provided `Threshold` types, intended to -enable their modularity. Additional utilities around these types, such as -promotion from one generator to another, are also provided. +This crate used to host implementations of distributed key generation protocols as well (hence the +name). Those have been smashed into their own crates, such as +[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop) -Currently, the only included protocol is the two-round protocol from the -[FROST paper](https://eprint.iacr.org/2020/852). - -This library was -[audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf), -culminating in commit -[669d2dbffc1dafb82a09d9419ea182667115df06](https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06). -Any subsequent changes have not undergone auditing. +Before being smashed, this crate was [audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/musig/Cargo.toml b/crypto/dkg/musig/Cargo.toml new file mode 100644 index 00000000..e2a971e7 --- /dev/null +++ b/crypto/dkg/musig/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "dkg-musig" +version = "0.6.0" +description = "The MuSig key aggregation protocol" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/musig" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +thiserror = { version = "2", default-features = false } + +rand_core = { version = "0.6", default-features = false } + +zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } + +std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false } + +multiexp = { path = "../../multiexp", version = "0.4", default-features = false } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } +dkg = { path = "../", default-features = false } + +[dev-dependencies] +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] } + +[features] +std = [ + "thiserror/std", + + "rand_core/std", + + "std-shims/std", + + "multiexp/std", + "ciphersuite/std", + "dkg/std", +] +default = ["std"] diff --git a/crypto/dkg/musig/LICENSE b/crypto/dkg/musig/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/musig/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/musig/README.md b/crypto/dkg/musig/README.md new file mode 100644 index 00000000..9720e6f0 --- /dev/null +++ b/crypto/dkg/musig/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation - MuSig + +This implements the MuSig key aggregation protocol for the [`dkg`](https://docs.rs/dkg) crate's +types. + +This crate was originally part of the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit +[669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/musig/src/lib.rs b/crypto/dkg/musig/src/lib.rs new file mode 100644 index 00000000..36f4fd31 --- /dev/null +++ b/crypto/dkg/musig/src/lib.rs @@ -0,0 +1,162 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +#![cfg_attr(not(feature = "std"), no_std)] + +use core::ops::Deref; +use std_shims::{ + vec, + vec::Vec, + collections::{HashSet, HashMap}, +}; + +use zeroize::Zeroizing; + +use ciphersuite::{group::GroupEncoding, Ciphersuite}; + +pub use dkg::*; + +#[cfg(test)] +mod tests; + +/// Errors encountered when working with threshold keys. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum MusigError { + /// No keys were provided. + #[error("no keys provided")] + NoKeysProvided, + /// Too many keys were provided. + #[error("too many keys (allowed {max}, provided {provided})")] + TooManyKeysProvided { + /// The maximum amount of keys allowed. + max: u16, + /// The amount of keys provided. + provided: usize, + }, + /// A participant was duplicated. + #[error("a participant was duplicated")] + DuplicatedParticipant(C::G), + /// Participating, yet our public key wasn't found in the list of keys. + #[error("private key's public key wasn't present in the list of public keys")] + NotPresent, + /// An error propagated from the underlying `dkg` crate. + #[error("error from dkg ({0})")] + DkgError(DkgError), +} + +fn check_keys(keys: &[C::G]) -> Result> { + if keys.is_empty() { + Err(MusigError::NoKeysProvided)?; + } + + let keys_len = u16::try_from(keys.len()) + .map_err(|_| MusigError::TooManyKeysProvided { max: u16::MAX, provided: keys.len() })?; + + let mut set = HashSet::with_capacity(keys.len()); + for key in keys { + let bytes = key.to_bytes().as_ref().to_vec(); + if !set.insert(bytes) { + Err(MusigError::DuplicatedParticipant(*key))?; + } + } + + Ok(keys_len) +} + +fn binding_factor_transcript( + context: [u8; 32], + keys_len: u16, + keys: &[C::G], +) -> Vec { + debug_assert_eq!(usize::from(keys_len), keys.len()); + + let mut transcript = vec![]; + transcript.extend(&context); + transcript.extend(keys_len.to_le_bytes()); + for key in keys { + transcript.extend(key.to_bytes().as_ref()); + } + transcript +} + +fn binding_factor(mut transcript: Vec, i: u16) -> C::F { + transcript.extend(i.to_le_bytes()); + C::hash_to_F(b"dkg-musig", &transcript) +} + +#[allow(clippy::type_complexity)] +fn musig_key_multiexp( + context: [u8; 32], + keys: &[C::G], +) -> Result, MusigError> { + let keys_len = check_keys::(keys)?; + let transcript = binding_factor_transcript::(context, keys_len, keys); + let mut multiexp = Vec::with_capacity(keys.len()); + for i in 1 ..= keys_len { + multiexp.push((binding_factor::(transcript.clone(), i), keys[usize::from(i - 1)])); + } + Ok(multiexp) +} + +/// The group key resulting from using this library's MuSig key aggregation. +/// +/// This function executes in variable time and MUST NOT be used with secret data. +pub fn musig_key_vartime( + context: [u8; 32], + keys: &[C::G], +) -> Result> { + Ok(multiexp::multiexp_vartime(&musig_key_multiexp(context, keys)?)) +} + +/// The group key resulting from using this library's MuSig key aggregation. +pub fn musig_key(context: [u8; 32], keys: &[C::G]) -> Result> { + Ok(multiexp::multiexp(&musig_key_multiexp(context, keys)?)) +} + +/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key. +pub fn musig( + context: [u8; 32], + private_key: Zeroizing, + keys: &[C::G], +) -> Result, MusigError> { + let our_pub_key = C::generator() * private_key.deref(); + let Some(our_i) = keys.iter().position(|key| *key == our_pub_key) else { + Err(MusigError::DkgError(DkgError::NotParticipating))? + }; + + let keys_len: u16 = check_keys::(keys)?; + + let params = ThresholdParams::new( + keys_len, + keys_len, + // The `+ 1` won't fail as `keys.len() <= u16::MAX`, so any index is `< u16::MAX` + Participant::new( + u16::try_from(our_i).expect("keys.len() <= u16::MAX yet index of keys > u16::MAX?") + 1, + ) + .expect("i + 1 != 0"), + ) + .map_err(MusigError::DkgError)?; + + let transcript = binding_factor_transcript::(context, keys_len, keys); + let mut binding_factors = Vec::with_capacity(keys.len()); + let mut multiexp = Vec::with_capacity(keys.len()); + let mut verification_shares = HashMap::with_capacity(keys.len()); + for (i, key) in (1 ..= keys_len).zip(keys.iter().copied()) { + let binding_factor = binding_factor::(transcript.clone(), i); + binding_factors.push(binding_factor); + multiexp.push((binding_factor, key)); + + let i = Participant::new(i).expect("non-zero u16 wasn't a valid Participant index?"); + verification_shares.insert(i, key); + } + let group_key = multiexp::multiexp(&multiexp); + debug_assert_eq!(our_pub_key, verification_shares[¶ms.i()]); + debug_assert_eq!(musig_key_vartime::(context, keys).unwrap(), group_key); + + ThresholdKeys::new( + params, + Interpolation::Constant(binding_factors), + private_key, + verification_shares, + ) + .map_err(MusigError::DkgError) +} diff --git a/crypto/dkg/musig/src/tests.rs b/crypto/dkg/musig/src/tests.rs new file mode 100644 index 00000000..a48dda68 --- /dev/null +++ b/crypto/dkg/musig/src/tests.rs @@ -0,0 +1,70 @@ +use std::collections::HashMap; + +use zeroize::Zeroizing; +use rand_core::OsRng; + +use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto}; + +use dkg_recovery::recover_key; +use crate::*; + +/// Tests MuSig key generation. +#[test] +pub fn test_musig() { + const PARTICIPANTS: u16 = 5; + + let mut keys = vec![]; + let mut pub_keys = vec![]; + for _ in 0 .. PARTICIPANTS { + let key = Zeroizing::new(::F::random(&mut OsRng)); + pub_keys.push(::generator() * *key); + keys.push(key); + } + + const CONTEXT: [u8; 32] = *b"MuSig Test "; + + // Empty signing set + musig::(CONTEXT, Zeroizing::new(::F::ZERO), &[]) + .unwrap_err(); + // Signing set we're not part of + musig::( + CONTEXT, + Zeroizing::new(::F::ZERO), + &[::generator()], + ) + .unwrap_err(); + + // Test with n keys + { + let mut created_keys = HashMap::new(); + let mut verification_shares = HashMap::new(); + let group_key = musig_key::(CONTEXT, &pub_keys).unwrap(); + for (i, key) in keys.iter().enumerate() { + let these_keys = musig::(CONTEXT, key.clone(), &pub_keys).unwrap(); + assert_eq!(these_keys.params().t(), PARTICIPANTS); + assert_eq!(these_keys.params().n(), PARTICIPANTS); + assert_eq!(usize::from(u16::from(these_keys.params().i())), i + 1); + + verification_shares.insert( + these_keys.params().i(), + ::generator() * **these_keys.secret_share(), + ); + + assert_eq!(these_keys.group_key(), group_key); + + created_keys.insert(these_keys.params().i(), these_keys); + } + + for keys in created_keys.values() { + for (l, verification_share) in &verification_shares { + assert_eq!(keys.original_verification_share(*l), *verification_share); + } + } + + assert_eq!( + ::generator() * + *recover_key(&created_keys.values().cloned().collect::>()).unwrap(), + group_key + ); + } +} diff --git a/crypto/dkg/pedpop/Cargo.toml b/crypto/dkg/pedpop/Cargo.toml new file mode 100644 index 00000000..cfc128d1 --- /dev/null +++ b/crypto/dkg/pedpop/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "dkg-pedpop" +version = "0.6.0" +description = "The PedPoP distributed key generation protocol" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/pedpop" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +thiserror = { version = "2", default-features = false, features = ["std"] } + +zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } +rand_core = { version = "0.6", default-features = false, features = ["std"] } + +transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] } +chacha20 = { version = "0.9", default-features = false, features = ["std", "zeroize"] } + +multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["std"] } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } +schnorr = { package = "schnorr-signatures", path = "../../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } +dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } + +dkg = { path = "../", default-features = false, features = ["std"] } + +[dev-dependencies] +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } diff --git a/crypto/dkg/pedpop/LICENSE b/crypto/dkg/pedpop/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/pedpop/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/pedpop/README.md b/crypto/dkg/pedpop/README.md new file mode 100644 index 00000000..4ff801e0 --- /dev/null +++ b/crypto/dkg/pedpop/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation - PedPoP + +This implements the PedPoP distributed key generation protocol for the [`dkg`](https://docs.rs/dkg) +crate's types. + +This crate was originally part of the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit +[669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/pedpop/src/encryption.rs similarity index 99% rename from crypto/dkg/src/encryption.rs rename to crypto/dkg/pedpop/src/encryption.rs index 1ad721f6..ceec4b31 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/pedpop/src/encryption.rs @@ -21,7 +21,7 @@ use multiexp::BatchVerifier; use schnorr::SchnorrSignature; use dleq::DLEqProof; -use crate::{Participant, ThresholdParams}; +use dkg::{Participant, ThresholdParams}; mod sealed { use super::*; @@ -69,7 +69,7 @@ impl EncryptionKeyMessage { buf } - #[cfg(any(test, feature = "tests"))] + #[cfg(test)] pub(crate) fn enc_key(&self) -> C::G { self.enc_key } diff --git a/crypto/dkg/src/pedpop.rs b/crypto/dkg/pedpop/src/lib.rs similarity index 86% rename from crypto/dkg/src/pedpop.rs rename to crypto/dkg/pedpop/src/lib.rs index adfc6958..f9d609f8 100644 --- a/crypto/dkg/src/pedpop.rs +++ b/crypto/dkg/pedpop/src/lib.rs @@ -1,15 +1,20 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +// This crate requires `dleq` which doesn't support no-std via std-shims +// #![cfg_attr(not(feature = "std"), no_std)] + use core::{marker::PhantomData, ops::Deref, fmt}; use std::{ io::{self, Read, Write}, collections::HashMap, }; -use rand_core::{RngCore, CryptoRng}; - use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; +use rand_core::{RngCore, CryptoRng}; use transcript::{Transcript, RecommendedTranscript}; +use multiexp::{multiexp_vartime, BatchVerifier}; use ciphersuite::{ group::{ ff::{Field, PrimeField}, @@ -17,29 +22,75 @@ use ciphersuite::{ }, Ciphersuite, }; -use multiexp::{multiexp_vartime, BatchVerifier}; use schnorr::SchnorrSignature; -use crate::{ - Participant, DkgError, ThresholdParams, Interpolation, ThresholdCore, validate_map, - encryption::{ - ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, Decryption, EncryptionKeyProof, - DecryptionError, - }, -}; +pub use dkg::*; -type FrostError = DkgError>; +mod encryption; +pub use encryption::*; + +#[cfg(test)] +mod tests; + +/// Errors possible during key generation. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum PedPoPError { + /// An incorrect amount of participants was provided. + #[error("incorrect amount of participants (expected {expected}, found {found})")] + IncorrectAmountOfParticipants { expected: usize, found: usize }, + /// An invalid proof of knowledge was provided. + #[error("invalid proof of knowledge (participant {0})")] + InvalidCommitments(Participant), + /// An invalid DKG share was provided. + #[error("invalid share (participant {participant}, blame {blame})")] + InvalidShare { participant: Participant, blame: Option> }, + /// A participant was missing. + #[error("missing participant {0}")] + MissingParticipant(Participant), + /// An error propagated from the underlying `dkg` crate. + #[error("error from dkg ({0})")] + DkgError(DkgError), +} + +// Validate a map of values to have the expected included participants +fn validate_map( + map: &HashMap, + included: &[Participant], + ours: Participant, +) -> Result<(), PedPoPError> { + if (map.len() + 1) != included.len() { + Err(PedPoPError::IncorrectAmountOfParticipants { + expected: included.len(), + found: map.len() + 1, + })?; + } + + for included in included { + if *included == ours { + if map.contains_key(included) { + Err(PedPoPError::DkgError(DkgError::DuplicatedParticipant(*included)))?; + } + continue; + } + + if !map.contains_key(included) { + Err(PedPoPError::MissingParticipant(*included))?; + } + } + + Ok(()) +} #[allow(non_snake_case)] fn challenge(context: [u8; 32], l: Participant, R: &[u8], Am: &[u8]) -> C::F { - let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2"); + let mut transcript = RecommendedTranscript::new(b"DKG PedPoP v0.2"); transcript.domain_separate(b"schnorr_proof_of_knowledge"); transcript.append_message(b"context", context); transcript.append_message(b"participant", l.to_bytes()); transcript.append_message(b"nonce", R); transcript.append_message(b"commitments", Am); - C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr")) + C::hash_to_F(b"DKG-PedPoP-proof_of_knowledge-0", &transcript.challenge(b"schnorr")) } /// The commitments message, intended to be broadcast to all other parties. @@ -98,7 +149,7 @@ impl KeyGenMachine { KeyGenMachine { params, context, _curve: PhantomData } } - /// Start generating a key according to the FROST DKG spec. + /// Start generating a key according to the PedPoP DKG specification present in the FROST paper. /// /// Returns a commitments message to be sent to all parties over an authenticated channel. If any /// party submits multiple sets of commitments, they MUST be treated as malicious. @@ -106,7 +157,7 @@ impl KeyGenMachine { self, rng: &mut R, ) -> (SecretShareMachine, EncryptionKeyMessage>) { - let t = usize::from(self.params.t); + let t = usize::from(self.params.t()); let mut coefficients = Vec::with_capacity(t); let mut commitments = Vec::with_capacity(t); let mut cached_msg = vec![]; @@ -133,7 +184,7 @@ impl KeyGenMachine { ); // Additionally create an encryption mechanism to protect the secret shares - let encryption = Encryption::new(self.context, self.params.i, rng); + let encryption = Encryption::new(self.context, self.params.i(), rng); // Step 4: Broadcast let msg = @@ -250,21 +301,21 @@ impl SecretShareMachine { &mut self, rng: &mut R, mut commitment_msgs: HashMap>>, - ) -> Result>, FrostError> { + ) -> Result>, PedPoPError> { validate_map( &commitment_msgs, - &(1 ..= self.params.n()).map(Participant).collect::>(), + &self.params.all_participant_indexes().collect::>(), self.params.i(), )?; let mut batch = BatchVerifier::::new(commitment_msgs.len()); let mut commitments = HashMap::new(); - for l in (1 ..= self.params.n()).map(Participant) { + for l in self.params.all_participant_indexes() { let Some(msg) = commitment_msgs.remove(&l) else { continue }; let mut msg = self.encryption.register(l, msg); if msg.commitments.len() != self.params.t().into() { - Err(FrostError::InvalidCommitments(l))?; + Err(PedPoPError::InvalidCommitments(l))?; } // Step 5: Validate each proof of knowledge @@ -280,9 +331,9 @@ impl SecretShareMachine { commitments.insert(l, msg.commitments.drain(..).collect::>()); } - batch.verify_vartime_with_vartime_blame().map_err(FrostError::InvalidCommitments)?; + batch.verify_vartime_with_vartime_blame().map_err(PedPoPError::InvalidCommitments)?; - commitments.insert(self.params.i, self.our_commitments.drain(..).collect()); + commitments.insert(self.params.i(), self.our_commitments.drain(..).collect()); Ok(commitments) } @@ -299,13 +350,13 @@ impl SecretShareMachine { commitments: HashMap>>, ) -> Result< (KeyMachine, HashMap>>), - FrostError, + PedPoPError, > { let commitments = self.verify_r1(&mut *rng, commitments)?; // Step 1: Generate secret shares for all other parties let mut res = HashMap::new(); - for l in (1 ..= self.params.n()).map(Participant) { + for l in self.params.all_participant_indexes() { // Don't insert our own shares to the byte buffer which is meant to be sent around // An app developer could accidentally send it. Best to keep this black boxed if l == self.params.i() { @@ -413,10 +464,10 @@ impl KeyMachine { mut self, rng: &mut R, mut shares: HashMap>>, - ) -> Result, FrostError> { + ) -> Result, PedPoPError> { validate_map( &shares, - &(1 ..= self.params.n()).map(Participant).collect::>(), + &self.params.all_participant_indexes().collect::>(), self.params.i(), )?; @@ -427,7 +478,7 @@ impl KeyMachine { self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes); let share = Zeroizing::new(Option::::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| { - FrostError::InvalidShare { participant: l, blame: Some(blame.clone()) } + PedPoPError::InvalidShare { participant: l, blame: Some(blame.clone()) } })?); share_bytes.zeroize(); *self.secret += share.deref(); @@ -444,7 +495,7 @@ impl KeyMachine { BatchId::Decryption(l) => (l, None), BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())), }; - FrostError::InvalidShare { participant: l, blame } + PedPoPError::InvalidShare { participant: l, blame } })?; // Stripe commitments per t and sum them in advance. Calculating verification shares relies on @@ -458,7 +509,7 @@ impl KeyMachine { // Calculate each user's verification share let mut verification_shares = HashMap::new(); - for i in (1 ..= self.params.n()).map(Participant) { + for i in self.params.all_participant_indexes() { verification_shares.insert( i, if i == self.params.i() { @@ -473,13 +524,10 @@ impl KeyMachine { Ok(BlameMachine { commitments, encryption: encryption.into_decryption(), - result: Some(ThresholdCore { - params, - interpolation: Interpolation::Lagrange, - secret_share: secret, - group_key: stripes[0], - verification_shares, - }), + result: Some( + ThresholdKeys::new(params, Interpolation::Lagrange, secret, verification_shares) + .map_err(PedPoPError::DkgError)?, + ), }) } } @@ -488,7 +536,7 @@ impl KeyMachine { pub struct BlameMachine { commitments: HashMap>, encryption: Decryption, - result: Option>, + result: Option>, } impl fmt::Debug for BlameMachine { @@ -520,7 +568,7 @@ impl BlameMachine { /// territory of consensus protocols. This library does not handle that nor does it provide any /// tooling to do so. This function is solely intended to force users to acknowledge they're /// completing the protocol, not processing any blame. - pub fn complete(self) -> ThresholdCore { + pub fn complete(self) -> ThresholdKeys { self.result.unwrap() } @@ -602,12 +650,12 @@ impl AdditionalBlameMachine { context: [u8; 32], n: u16, mut commitment_msgs: HashMap>>, - ) -> Result> { + ) -> Result> { let mut commitments = HashMap::new(); let mut encryption = Decryption::new(context); for i in 1 ..= n { let i = Participant::new(i).unwrap(); - let Some(msg) = commitment_msgs.remove(&i) else { Err(DkgError::MissingParticipant(i))? }; + let Some(msg) = commitment_msgs.remove(&i) else { Err(PedPoPError::MissingParticipant(i))? }; commitments.insert(i, encryption.register(i, msg).commitments); } Ok(AdditionalBlameMachine(BlameMachine { commitments, encryption, result: None })) diff --git a/crypto/dkg/pedpop/src/tests.rs b/crypto/dkg/pedpop/src/tests.rs new file mode 100644 index 00000000..483b8b3b --- /dev/null +++ b/crypto/dkg/pedpop/src/tests.rs @@ -0,0 +1,345 @@ +use std::collections::HashMap; + +use rand_core::{RngCore, CryptoRng, OsRng}; + +use ciphersuite::{Ciphersuite, Ristretto}; + +use crate::*; + +const THRESHOLD: u16 = 3; +const PARTICIPANTS: u16 = 5; + +/// Clone a map without a specific value. +fn clone_without( + map: &HashMap, + without: &K, +) -> HashMap { + let mut res = map.clone(); + res.remove(without).unwrap(); + res +} + +type PedPoPEncryptedMessage = EncryptedMessage::F>>; +type PedPoPSecretShares = HashMap>; + +const CONTEXT: [u8; 32] = *b"DKG Test Key Generation "; + +// Commit, then return commitment messages, enc keys, and shares +#[allow(clippy::type_complexity)] +fn commit_enc_keys_and_shares( + rng: &mut R, +) -> ( + HashMap>, + HashMap>>, + HashMap, + HashMap>, +) { + let mut machines = HashMap::new(); + let mut commitments = HashMap::new(); + let mut enc_keys = HashMap::new(); + for i in (1 ..= PARTICIPANTS).map(|i| Participant::new(i).unwrap()) { + let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); + let machine = KeyGenMachine::::new(params, CONTEXT); + let (machine, these_commitments) = machine.generate_coefficients(rng); + machines.insert(i, machine); + + commitments.insert( + i, + EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params) + .unwrap(), + ); + enc_keys.insert(i, commitments[&i].enc_key()); + } + + let mut secret_shares = HashMap::new(); + let machines = machines + .drain() + .map(|(l, machine)| { + let (machine, mut shares) = + machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); + let shares = shares + .drain() + .map(|(l, share)| { + ( + l, + EncryptedMessage::read::<&[u8]>( + &mut share.serialize().as_ref(), + // Only t/n actually matters, so hardcode i to 1 here + ThresholdParams::new(THRESHOLD, PARTICIPANTS, Participant::new(1).unwrap()).unwrap(), + ) + .unwrap(), + ) + }) + .collect::>(); + secret_shares.insert(l, shares); + (l, machine) + }) + .collect::>(); + + (machines, commitments, enc_keys, secret_shares) +} + +fn generate_secret_shares( + shares: &HashMap>, + recipient: Participant, +) -> PedPoPSecretShares { + let mut our_secret_shares = HashMap::new(); + for (i, shares) in shares { + if recipient == *i { + continue; + } + our_secret_shares.insert(*i, shares[&recipient].clone()); + } + our_secret_shares +} + +/// Fully perform the PedPoP key generation algorithm. +fn pedpop_gen( + rng: &mut R, +) -> HashMap> { + let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng); + + let mut verification_shares = None; + let mut group_key = None; + machines + .drain() + .map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete(); + + // Verify the verification_shares are agreed upon + if verification_shares.is_none() { + verification_shares = Some( + these_keys + .params() + .all_participant_indexes() + .map(|i| (i, these_keys.original_verification_share(i))) + .collect::>(), + ); + } + assert_eq!( + verification_shares.as_ref().unwrap(), + &these_keys + .params() + .all_participant_indexes() + .map(|i| (i, these_keys.original_verification_share(i))) + .collect::>() + ); + + // Verify the group keys are agreed upon + if group_key.is_none() { + group_key = Some(these_keys.group_key()); + } + assert_eq!(group_key.unwrap(), these_keys.group_key()); + + (i, these_keys) + }) + .collect::>() +} + +const ONE: Participant = Participant::new(1).unwrap(); +const TWO: Participant = Participant::new(2).unwrap(); + +#[test] +fn test_pedpop() { + let _ = core::hint::black_box(pedpop_gen::<_, Ristretto>(&mut OsRng)); +} + +fn test_blame( + commitment_msgs: &HashMap>>, + machines: Vec>, + msg: &PedPoPEncryptedMessage, + blame: &Option>, +) { + for machine in machines { + let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone()); + assert_eq!(blamed, ONE); + // Verify additional blame also works + assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE); + + // Verify machines constructed with AdditionalBlameMachine::new work + assert_eq!( + AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame( + ONE, + TWO, + msg.clone(), + blame.clone() + ), + ONE, + ); + } +} + +// TODO: Write a macro which expands to the following +#[test] +fn invalid_encryption_pop_blame() { + let (mut machines, commitment_msgs, _, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + // Mutate the PoP of the encrypted message from 1 to 2 + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop(); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == TWO { + assert_eq!( + machine.err(), + Some(PedPoPError::InvalidShare { participant: ONE, blame: None }) + ); + // Explicitly declare we have a blame object, which happens to be None since invalid PoP + // is self-explainable + blame = Some(None); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); +} + +#[test] +fn invalid_ecdh_blame() { + let (mut machines, commitment_msgs, _, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + // Mutate the share to trigger a blame event + // Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass + // While here, 2 is malicious, this is so 1 creates the blame proof + // We then malleate 1's blame proof, so 1 ends up malicious + // Doesn't simply invalidate the PoP as that won't have a blame statement + // By mutating the encrypted data, we do ensure a blame statement is created + secret_shares + .get_mut(&TWO) + .unwrap() + .get_mut(&ONE) + .unwrap() + .invalidate_msg(&mut OsRng, CONTEXT, TWO); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == ONE { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + blame.as_mut().unwrap().as_mut().unwrap().invalidate_key(); + test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); +} + +// This should be largely equivalent to the prior test +#[test] +fn invalid_dleq_blame() { + let (mut machines, commitment_msgs, _, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + secret_shares + .get_mut(&TWO) + .unwrap() + .get_mut(&ONE) + .unwrap() + .invalidate_msg(&mut OsRng, CONTEXT, TWO); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == ONE { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq(); + test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); +} + +#[test] +fn invalid_share_serialization_blame() { + let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization( + &mut OsRng, + CONTEXT, + ONE, + enc_keys[&TWO], + ); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == TWO { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); +} + +#[test] +fn invalid_share_value_blame() { + let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value( + &mut OsRng, + CONTEXT, + ONE, + enc_keys[&TWO], + ); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == TWO { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); +} diff --git a/crypto/dkg/promote/Cargo.toml b/crypto/dkg/promote/Cargo.toml new file mode 100644 index 00000000..e5f57ce9 --- /dev/null +++ b/crypto/dkg/promote/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "dkg-promote" +version = "0.6.0" +description = "Promotions for keys from the dkg crate" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/promote" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +thiserror = { version = "2", default-features = false, features = ["std"] } + +rand_core = { version = "0.6", default-features = false, features = ["std"] } + +transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } +dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } + +dkg = { path = "../", default-features = false, features = ["std"] } + +[dev-dependencies] +zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] } diff --git a/crypto/dkg/promote/LICENSE b/crypto/dkg/promote/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/promote/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/promote/README.md b/crypto/dkg/promote/README.md new file mode 100644 index 00000000..a5f8a9e6 --- /dev/null +++ b/crypto/dkg/promote/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation - Promote + +This crate implements 'promotions' for keys from the [`dkg`](https://docs.rs/dkg) crate. A promotion +takes a set of keys and maps it to a different `Ciphersuite`. + +This crate was originally part of the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit +[669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/promote/src/lib.rs similarity index 54% rename from crypto/dkg/src/promote.rs rename to crypto/dkg/promote/src/lib.rs index d92b382d..6fb08807 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/promote/src/lib.rs @@ -1,25 +1,52 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +// This crate requires `dleq` which doesn't support no-std via std-shims +// #![cfg_attr(not(feature = "std"), no_std)] + use core::{marker::PhantomData, ops::Deref}; use std::{ io::{self, Read, Write}, - sync::Arc, collections::HashMap, }; use rand_core::{RngCore, CryptoRng}; -use ciphersuite::{ - group::{ff::Field, GroupEncoding}, - Ciphersuite, -}; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use transcript::{Transcript, RecommendedTranscript}; use dleq::DLEqProof; -use crate::{Participant, DkgError, ThresholdCore, ThresholdKeys, validate_map}; +pub use dkg::*; -/// Promote a set of keys to another Ciphersuite definition. -pub trait CiphersuitePromote { - fn promote(self) -> ThresholdKeys; +#[cfg(test)] +mod tests; + +/// Errors encountered when promoting keys. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum PromotionError { + /// Invalid participant identifier. + #[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")] + InvalidParticipant { + /// The total amount of participants. + n: u16, + /// The specified participant. + participant: Participant, + }, + + /// An incorrect amount of participants was specified. + #[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")] + IncorrectAmountOfParticipants { + /// The threshold required. + t: u16, + /// The total amount of participants. + n: u16, + /// The amount of participants specified. + amount: usize, + }, + + /// Participant provided an invalid proof. + #[error("invalid proof {0}")] + InvalidProof(Participant), } fn transcript(key: &G, i: Participant) -> RecommendedTranscript { @@ -68,8 +95,9 @@ pub struct GeneratorPromotion { } impl> GeneratorPromotion { - /// Begin promoting keys from one generator to another. Returns a proof this share was properly - /// promoted. + /// Begin promoting keys from one generator to another. + /// + /// Returns a proof this share was properly promoted. pub fn promote( rng: &mut R, base: ThresholdKeys, @@ -79,7 +107,7 @@ impl> GeneratorPromotion< share: C2::generator() * base.secret_share().deref(), proof: DLEqProof::prove( rng, - &mut transcript(&base.core.group_key(), base.params().i), + &mut transcript(&base.original_group_key(), base.params().i()), &[C1::generator(), C2::generator()], base.secret_share(), ), @@ -92,36 +120,49 @@ impl> GeneratorPromotion< pub fn complete( self, proofs: &HashMap>, - ) -> Result, DkgError<()>> { + ) -> Result, PromotionError> { let params = self.base.params(); - validate_map(proofs, &(1 ..= params.n).map(Participant).collect::>(), params.i)?; - - let original_shares = self.base.verification_shares(); + if proofs.len() != (usize::from(params.n()) - 1) { + Err(PromotionError::IncorrectAmountOfParticipants { + t: params.n(), + n: params.n(), + amount: proofs.len() + 1, + })?; + } + for i in proofs.keys().copied() { + if u16::from(i) > params.n() { + Err(PromotionError::InvalidParticipant { n: params.n(), participant: i })?; + } + } let mut verification_shares = HashMap::new(); - verification_shares.insert(params.i, self.proof.share); - for (i, proof) in proofs { - let i = *i; + verification_shares.insert(params.i(), self.proof.share); + for i in 1 ..= params.n() { + let i = Participant::new(i).unwrap(); + if i == params.i() { + continue; + } + + let proof = proofs.get(&i).unwrap(); proof .proof .verify( - &mut transcript(&self.base.core.group_key(), i), + &mut transcript(&self.base.original_group_key(), i), &[C1::generator(), C2::generator()], - &[original_shares[&i], proof.share], + &[self.base.original_verification_share(i), proof.share], ) - .map_err(|_| DkgError::InvalidCommitments(i))?; + .map_err(|_| PromotionError::InvalidProof(i))?; verification_shares.insert(i, proof.share); } - Ok(ThresholdKeys { - core: Arc::new(ThresholdCore::new( + Ok( + ThresholdKeys::new( params, - self.base.core.interpolation.clone(), + self.base.interpolation().clone(), self.base.secret_share().clone(), verification_shares, - )), - scalar: C2::F::ONE, - offset: C2::F::ZERO, - }) + ) + .unwrap(), + ) } } diff --git a/crypto/dkg/promote/src/tests.rs b/crypto/dkg/promote/src/tests.rs new file mode 100644 index 00000000..1cae60d9 --- /dev/null +++ b/crypto/dkg/promote/src/tests.rs @@ -0,0 +1,113 @@ +use core::marker::PhantomData; +use std::collections::HashMap; + +use zeroize::{Zeroize, Zeroizing}; +use rand_core::OsRng; + +use ciphersuite::{ + group::{ff::Field, Group}, + Ciphersuite, Ristretto, +}; + +use dkg::*; +use dkg_recovery::recover_key; +use crate::{GeneratorPromotion, GeneratorProof}; + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +struct AltGenerator { + _curve: PhantomData, +} + +impl Ciphersuite for AltGenerator { + type F = C::F; + type G = C::G; + type H = C::H; + + const ID: &'static [u8] = b"Alternate Ciphersuite"; + + fn generator() -> Self::G { + C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") + } + + fn reduce_512(scalar: [u8; 64]) -> Self::F { + ::reduce_512(scalar) + } + + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { + ::hash_to_F(dst, data) + } +} + +/// Clone a map without a specific value. +pub fn clone_without( + map: &HashMap, + without: &K, +) -> HashMap { + let mut res = map.clone(); + res.remove(without).unwrap(); + res +} + +// Test promotion of threshold keys to another generator +#[test] +fn test_generator_promotion() { + // Generate a set of `ThresholdKeys` + const PARTICIPANTS: u16 = 5; + let keys: [ThresholdKeys<_>; PARTICIPANTS as usize] = { + let shares: [::F; PARTICIPANTS as usize] = + core::array::from_fn(|_| ::F::random(&mut OsRng)); + let verification_shares = (0 .. PARTICIPANTS) + .map(|i| { + ( + Participant::new(i + 1).unwrap(), + ::generator() * shares[usize::from(i)], + ) + }) + .collect::>(); + core::array::from_fn(|i| { + ThresholdKeys::new( + ThresholdParams::new( + PARTICIPANTS, + PARTICIPANTS, + Participant::new(u16::try_from(i + 1).unwrap()).unwrap(), + ) + .unwrap(), + Interpolation::Constant(vec![::F::ONE; PARTICIPANTS as usize]), + Zeroizing::new(shares[i]), + verification_shares.clone(), + ) + .unwrap() + }) + }; + + // Perform the promotion + let mut promotions = HashMap::new(); + let mut proofs = HashMap::new(); + for keys in &keys { + let i = keys.params().i(); + let (promotion, proof) = + GeneratorPromotion::<_, AltGenerator>::promote(&mut OsRng, keys.clone()); + promotions.insert(i, promotion); + proofs.insert( + i, + GeneratorProof::::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap(), + ); + } + + // Complete the promotion, and verify it worked + let new_group_key = AltGenerator::::generator() * *recover_key(&keys).unwrap(); + for (i, promoting) in promotions.drain() { + let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); + assert_eq!(keys[usize::from(u16::from(i) - 1)].params(), promoted.params()); + assert_eq!(keys[usize::from(u16::from(i) - 1)].secret_share(), promoted.secret_share()); + assert_eq!(new_group_key, promoted.group_key()); + for l in 0 .. PARTICIPANTS { + let verification_share = + promoted.original_verification_share(Participant::new(l + 1).unwrap()); + assert_eq!( + AltGenerator::::generator() * **keys[usize::from(l)].secret_share(), + verification_share + ); + } + } +} diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml new file mode 100644 index 00000000..e2e7485c --- /dev/null +++ b/crypto/dkg/recovery/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "dkg-recovery" +version = "0.6.0" +description = "Recover a secret-shared key from a collection of dkg::ThresholdKeys" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recovery" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +zeroize = { version = "^1.5", default-features = false } + +thiserror = { version = "2", default-features = false } + +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } +dkg = { path = "../", default-features = false } + +[features] +std = [ + "zeroize/std", + "thiserror/std", + "ciphersuite/std", + "dkg/std", +] +default = ["std"] diff --git a/crypto/dkg/recovery/LICENSE b/crypto/dkg/recovery/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/recovery/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/recovery/README.md b/crypto/dkg/recovery/README.md new file mode 100644 index 00000000..eaad6ed5 --- /dev/null +++ b/crypto/dkg/recovery/README.md @@ -0,0 +1,14 @@ +# Distributed Key Generation + +A crate implementing a type for keys, presumably the result of a distributed key generation +protocol, and utilities from there. + +This crate used to host implementations of distributed key generation protocols as well (hence the +name). Those have been smashed into their own crates, such as +[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop) + +Before being smashed, this crate was [audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/recovery/src/lib.rs b/crypto/dkg/recovery/src/lib.rs new file mode 100644 index 00000000..bef0956b --- /dev/null +++ b/crypto/dkg/recovery/src/lib.rs @@ -0,0 +1,85 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +#![no_std] + +use core::ops::{Deref, DerefMut}; +extern crate alloc; +use alloc::vec::Vec; + +use zeroize::Zeroizing; + +use ciphersuite::Ciphersuite; + +pub use dkg::*; + +/// Errors encountered when recovering a secret-shared key from a collection of +/// `dkg::ThresholdKeys`. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum RecoveryError { + /// No keys were provided. + #[error("no keys provided")] + NoKeysProvided, + /// Not enough keys were provided. + #[error("not enough keys provided (threshold required {required}, provided {provided})")] + NotEnoughKeysProvided { required: u16, provided: usize }, + /// The keys had inconsistent parameters. + #[error("keys had inconsistent parameters")] + InconsistentParameters, + /// The keys are from distinct secret-sharing sessions or otherwise corrupt. + #[error("recovery failed")] + Failure, + /// An error propagated from the underlying `dkg` crate. + #[error("error from dkg ({0})")] + DkgError(DkgError), +} + +/// Recover a shared secret from a collection of `dkg::ThresholdKeys`. +pub fn recover_key( + keys: &[ThresholdKeys], +) -> Result, RecoveryError> { + let included = keys.iter().map(|keys| keys.params().i()).collect::>(); + + let keys_len = keys.len(); + let mut keys = keys.iter(); + let first_keys = keys.next().ok_or(RecoveryError::NoKeysProvided)?; + { + let t = first_keys.params().t(); + if keys_len < usize::from(t) { + Err(RecoveryError::NotEnoughKeysProvided { required: t, provided: keys_len })?; + } + } + { + let first_params = ( + first_keys.params().t(), + first_keys.params().n(), + first_keys.group_key(), + first_keys.current_scalar(), + first_keys.current_offset(), + ); + for keys in keys.clone() { + let params = ( + keys.params().t(), + keys.params().n(), + keys.group_key(), + keys.current_scalar(), + keys.current_offset(), + ); + if params != first_params { + Err(RecoveryError::InconsistentParameters)?; + } + } + } + + let mut res: Zeroizing<_> = + first_keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().clone(); + for keys in keys { + *res.deref_mut() += + keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().deref(); + } + + if (C::generator() * res.deref()) != first_keys.group_key() { + Err(RecoveryError::Failure)?; + } + + Ok(res) +} diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index b7dc4b17..b98236a8 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -2,39 +2,29 @@ #![doc = include_str!("../README.md")] #![cfg_attr(not(feature = "std"), no_std)] -use core::fmt::{self, Debug}; +use core::{ + ops::Deref, + fmt::{self, Debug}, +}; +use std_shims::{sync::Arc, vec, vec::Vec, collections::HashMap, io}; -use thiserror::Error; +use zeroize::{Zeroize, Zeroizing}; -use zeroize::Zeroize; - -/// MuSig-style key aggregation. -pub mod musig; - -/// Encryption types and utilities used to secure DKG messages. -#[cfg(feature = "std")] -pub mod encryption; - -/// The PedPoP distributed key generation protocol described in the -/// [FROST paper](https://eprint.iacr.org/2020/852), augmented to be verifiable. -#[cfg(feature = "std")] -pub mod pedpop; - -/// Promote keys between ciphersuites. -#[cfg(feature = "std")] -pub mod promote; - -/// Tests for application-provided curves and algorithms. -#[cfg(any(test, feature = "tests"))] -pub mod tests; +use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + GroupEncoding, + }, + Ciphersuite, +}; /// The ID of a participant, defined as a non-zero u16. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] -pub struct Participant(pub(crate) u16); +pub struct Participant(u16); impl Participant { /// Create a new Participant identifier from a u16. - pub fn new(i: u16) -> Option { + pub const fn new(i: u16) -> Option { if i == 0 { None } else { @@ -44,7 +34,7 @@ impl Participant { /// Convert a Participant identifier to bytes. #[allow(clippy::wrong_self_convention)] - pub fn to_bytes(&self) -> [u8; 2] { + pub const fn to_bytes(&self) -> [u8; 2] { self.0.to_le_bytes() } } @@ -61,574 +51,631 @@ impl fmt::Display for Participant { } } -/// Various errors possible during key generation. -#[derive(Clone, PartialEq, Eq, Debug, Error)] -pub enum DkgError { +/// Errors encountered when working with threshold keys. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum DkgError { /// A parameter was zero. - #[cfg_attr(feature = "std", error("a parameter was 0 (threshold {0}, participants {1})"))] - ZeroParameter(u16, u16), + #[error("a parameter was 0 (threshold {t}, participants {n})")] + ZeroParameter { + /// The specified threshold. + t: u16, + /// The specified total amount of participants. + n: u16, + }, + /// The threshold exceeded the amount of participants. - #[cfg_attr(feature = "std", error("invalid threshold (max {1}, got {0})"))] - InvalidThreshold(u16, u16), + #[error("invalid threshold (max {n}, got {t})")] + InvalidThreshold { + /// The specified threshold. + t: u16, + /// The specified total amount of participants. + n: u16, + }, + /// Invalid participant identifier. - #[cfg_attr( - feature = "std", - error("invalid participant (0 < participant <= {0}, yet participant is {1})") - )] - InvalidParticipant(u16, Participant), + #[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")] + InvalidParticipant { + /// The total amount of participants. + n: u16, + /// The specified participant. + participant: Participant, + }, + + /// An incorrect amount of participants was specified. + #[error("incorrect amount of verification shares (n = {n} yet {shares} provided)")] + IncorrectAmountOfVerificationShares { + /// The amount of participants. + n: u16, + /// The amount of shares provided. + shares: usize, + }, + + /// An inapplicable method of interpolation was specified. + #[error("inapplicable method of interpolation ({0})")] + InapplicableInterpolation(&'static str), + + /// An incorrect amount of participants was specified. + #[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")] + IncorrectAmountOfParticipants { + /// The threshold required. + t: u16, + /// The total amount of participants. + n: u16, + /// The amount of participants specified. + amount: usize, + }, - /// Invalid signing set. - #[cfg_attr(feature = "std", error("invalid signing set"))] - InvalidSigningSet, - /// Invalid amount of participants. - #[cfg_attr(feature = "std", error("invalid participant quantity (expected {0}, got {1})"))] - InvalidParticipantQuantity(usize, usize), /// A participant was duplicated. - #[cfg_attr(feature = "std", error("duplicated participant ({0})"))] + #[error("a participant ({0}) was duplicated")] DuplicatedParticipant(Participant), - /// A participant was missing. - #[cfg_attr(feature = "std", error("missing participant {0}"))] - MissingParticipant(Participant), - /// An invalid proof of knowledge was provided. - #[cfg_attr(feature = "std", error("invalid proof of knowledge (participant {0})"))] - InvalidCommitments(Participant), - /// An invalid DKG share was provided. - #[cfg_attr(feature = "std", error("invalid share (participant {participant}, blame {blame})"))] - InvalidShare { participant: Participant, blame: Option }, + /// Not participating in declared signing set. + #[error("not participating in declared signing set")] + NotParticipating, } -#[cfg(feature = "std")] -mod lib { - pub use super::*; +// Manually implements BorshDeserialize so we can enforce it's a valid index +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for Participant { + fn deserialize_reader(reader: &mut R) -> io::Result { + Participant::new(u16::deserialize_reader(reader)?) + .ok_or_else(|| io::Error::other("invalid participant")) + } +} - use core::ops::Deref; - use std::{io, sync::Arc, collections::HashMap}; +/// Parameters for a multisig. +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] +pub struct ThresholdParams { + /// Participants needed to sign on behalf of the group. + t: u16, + /// Amount of participants. + n: u16, + /// Index of the participant being acted for. + i: Participant, +} - use zeroize::Zeroizing; - - use ciphersuite::{ - group::{ - ff::{Field, PrimeField}, - GroupEncoding, - }, - Ciphersuite, - }; - - #[cfg(feature = "borsh")] - impl borsh::BorshDeserialize for Participant { - fn deserialize_reader(reader: &mut R) -> io::Result { - Participant::new(u16::deserialize_reader(reader)?) - .ok_or_else(|| io::Error::other("invalid participant")) +/// An iterator over all participant indexes. +struct AllParticipantIndexes { + i: u16, + n: u16, +} +impl Iterator for AllParticipantIndexes { + type Item = Participant; + fn next(&mut self) -> Option { + if self.i > self.n { + None?; } + let res = Participant::new(self.i).unwrap(); + + // If i == n == u16::MAX, we cause `i > n` by setting `n` to `0` so the iterator becomes empty + if self.i == u16::MAX { + self.n = 0; + } else { + self.i += 1; + } + + Some(res) + } +} + +impl ThresholdParams { + /// Create a new set of parameters. + pub const fn new(t: u16, n: u16, i: Participant) -> Result { + if (t == 0) || (n == 0) { + return Err(DkgError::ZeroParameter { t, n }); + } + + if t > n { + return Err(DkgError::InvalidThreshold { t, n }); + } + if i.0 > n { + return Err(DkgError::InvalidParticipant { n, participant: i }); + } + + Ok(ThresholdParams { t, n, i }) } - // Validate a map of values to have the expected included participants - pub(crate) fn validate_map( - map: &HashMap, - included: &[Participant], - ours: Participant, - ) -> Result<(), DkgError> { - if (map.len() + 1) != included.len() { - Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?; - } + /// The threshold for a multisig with these parameters. + pub const fn t(&self) -> u16 { + self.t + } + /// The amount of participants for a multisig with these parameters. + pub const fn n(&self) -> u16 { + self.n + } + /// The participant index of the share with these parameters. + pub const fn i(&self) -> Participant { + self.i + } - for included in included { - if *included == ours { - if map.contains_key(included) { - Err(DkgError::DuplicatedParticipant(*included))?; + /// An iterator over all participant indexes. + pub fn all_participant_indexes(&self) -> impl Iterator { + AllParticipantIndexes { i: 1, n: self.n } + } +} + +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for ThresholdParams { + fn deserialize_reader(reader: &mut R) -> io::Result { + let t = u16::deserialize_reader(reader)?; + let n = u16::deserialize_reader(reader)?; + let i = Participant::deserialize_reader(reader)?; + ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}"))) + } +} + +/// A method of interpolation. +#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] +pub enum Interpolation { + /// A list of constant coefficients, one for each of the secret key shares. + /* + There's no benefit to using a full linear combination here, as the additive term would have + an entirely known evaluation with a fixed, public coefficient of `1`. Accordingly, the entire + key can simply be offset with the additive term to achieve the same effect. + */ + Constant(Vec), + /// Lagrange interpolation. + Lagrange, +} + +impl Interpolation { + /// The interpolation factor for this participant, within this signing set. + fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F { + match self { + Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)], + Interpolation::Lagrange => { + let i_f = F::from(u64::from(u16::from(i))); + + let mut num = F::ONE; + let mut denom = F::ONE; + for l in included { + if i == *l { + continue; + } + + let share = F::from(u64::from(u16::from(*l))); + num *= share; + denom *= share - i_f; } - continue; - } - if !map.contains_key(included) { - Err(DkgError::MissingParticipant(*included))?; + // Safe as this will only be 0 if we're part of the above loop + // (which we have an if case to avoid) + num * denom.invert().unwrap() + } + } + } +} + +/// A key share for a thresholdized secret key. +/// +/// This is the 'core' structure containing all relevant data, expected to be wrapped into an +/// heap-allocated pointer to minimize copies on the stack (`ThresholdKeys`, the publicly exposed +/// type). +#[derive(Clone, PartialEq, Eq)] +struct ThresholdCore { + params: ThresholdParams, + group_key: C::G, + verification_shares: HashMap, + interpolation: Interpolation, + secret_share: Zeroizing, +} + +impl fmt::Debug for ThresholdCore { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("ThresholdCore") + .field("params", &self.params) + .field("group_key", &self.group_key) + .field("verification_shares", &self.verification_shares) + .field("interpolation", &self.interpolation) + .finish_non_exhaustive() + } +} + +impl Zeroize for ThresholdCore { + fn zeroize(&mut self) { + self.params.zeroize(); + self.group_key.zeroize(); + for share in self.verification_shares.values_mut() { + share.zeroize(); + } + self.interpolation.zeroize(); + self.secret_share.zeroize(); + } +} + +/// Threshold keys usable for signing. +#[derive(Clone, Debug, Zeroize)] +pub struct ThresholdKeys { + // Core keys. + #[zeroize(skip)] + core: Arc>>, + + // Scalar applied to these keys. + scalar: C::F, + // Offset applied to these keys. + offset: C::F, +} + +/// View of keys, interpolated and with the expected linear combination taken for usage. +#[derive(Clone)] +pub struct ThresholdView { + interpolation: Interpolation, + scalar: C::F, + offset: C::F, + group_key: C::G, + included: Vec, + secret_share: Zeroizing, + original_verification_shares: HashMap, + verification_shares: HashMap, +} + +impl fmt::Debug for ThresholdView { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("ThresholdView") + .field("interpolation", &self.interpolation) + .field("scalar", &self.scalar) + .field("offset", &self.offset) + .field("group_key", &self.group_key) + .field("included", &self.included) + .field("original_verification_shares", &self.original_verification_shares) + .field("verification_shares", &self.verification_shares) + .finish_non_exhaustive() + } +} + +impl Zeroize for ThresholdView { + fn zeroize(&mut self) { + self.scalar.zeroize(); + self.offset.zeroize(); + self.group_key.zeroize(); + self.included.zeroize(); + self.secret_share.zeroize(); + for share in self.original_verification_shares.values_mut() { + share.zeroize(); + } + for share in self.verification_shares.values_mut() { + share.zeroize(); + } + } +} + +impl ThresholdKeys { + /// Create a new set of ThresholdKeys. + pub fn new( + params: ThresholdParams, + interpolation: Interpolation, + secret_share: Zeroizing, + verification_shares: HashMap, + ) -> Result, DkgError> { + if verification_shares.len() != usize::from(params.n()) { + Err(DkgError::IncorrectAmountOfVerificationShares { + n: params.n(), + shares: verification_shares.len(), + })?; + } + for participant in verification_shares.keys().copied() { + if u16::from(participant) > params.n() { + Err(DkgError::InvalidParticipant { n: params.n(), participant })?; } } + match &interpolation { + Interpolation::Constant(_) => { + if params.t() != params.n() { + Err(DkgError::InapplicableInterpolation("constant interpolation for keys where t != n"))?; + } + } + Interpolation::Lagrange => {} + } + + let t = (1 ..= params.t()).map(Participant).collect::>(); + let group_key = + t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum(); + + Ok(ThresholdKeys { + core: Arc::new(Zeroizing::new(ThresholdCore { + params, + interpolation, + secret_share, + group_key, + verification_shares, + })), + scalar: C::F::ONE, + offset: C::F::ZERO, + }) + } + + /// Scale the keys by a given scalar to allow for various account and privacy schemes. + /// + /// This scalar is ephemeral and will not be included when these keys are serialized. The + /// scalar is applied on top of any already-existing scalar/offset. + /// + /// Returns `None` if the scalar is equal to `0`. + #[must_use] + pub fn scale(mut self, scalar: C::F) -> Option> { + if bool::from(scalar.is_zero()) { + None?; + } + self.scalar *= scalar; + self.offset *= scalar; + Some(self) + } + + /// Offset the keys by a given scalar to allow for various account and privacy schemes. + /// + /// This offset is ephemeral and will not be included when these keys are serialized. The + /// offset is applied on top of any already-existing scalar/offset. + #[must_use] + pub fn offset(mut self, offset: C::F) -> ThresholdKeys { + self.offset += offset; + self + } + + /// Return the current scalar in-use for these keys. + pub fn current_scalar(&self) -> C::F { + self.scalar + } + + /// Return the current offset in-use for these keys. + pub fn current_offset(&self) -> C::F { + self.offset + } + + /// Return the parameters for these keys. + pub fn params(&self) -> ThresholdParams { + self.core.params + } + + /// Return the original group key, without any tweaks applied. + pub fn original_group_key(&self) -> C::G { + self.core.group_key + } + + /// Return the interpolation method for these keys. + pub fn interpolation(&self) -> &Interpolation { + &self.core.interpolation + } + + /// Return the group key, with the expected linear combination taken. + pub fn group_key(&self) -> C::G { + (self.core.group_key * self.scalar) + (C::generator() * self.offset) + } + + /// Return the secret share for these keys. + pub fn secret_share(&self) -> &Zeroizing { + &self.core.secret_share + } + + /// Return the original (untweaked) verification share for the specified participant. + /// + /// This will panic if the participant index is invalid for these keys. + pub fn original_verification_share(&self, l: Participant) -> C::G { + self.core.verification_shares[&l] + } + + /// Obtain a view of these keys, interpolated for the specified signing set, with the specified + /// linear combination taken. + pub fn view(&self, mut included: Vec) -> Result, DkgError> { + if (included.len() < self.params().t.into()) || + (usize::from(self.params().n()) < included.len()) + { + Err(DkgError::IncorrectAmountOfParticipants { + t: self.params().t, + n: self.params().n, + amount: included.len(), + })?; + } + included.sort(); + { + let mut found = included[0] == self.params().i(); + for i in 1 .. included.len() { + if included[i - 1] == included[i] { + Err(DkgError::DuplicatedParticipant(included[i]))?; + } + found |= included[i] == self.params().i(); + } + if !found { + Err(DkgError::NotParticipating)?; + } + } + { + let last = *included.last().unwrap(); + if u16::from(last) > self.params().n() { + Err(DkgError::InvalidParticipant { n: self.params().n(), participant: last })?; + } + } + + // The interpolation occurs multiplicatively, letting us scale by the scalar now + let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref()); + let mut secret_share = Zeroizing::new( + self.core.interpolation.interpolation_factor(self.params().i(), &included) * + secret_share_scaled.deref(), + ); + + let mut verification_shares = HashMap::with_capacity(included.len()); + for i in &included { + let verification_share = self.core.verification_shares[i]; + let verification_share = verification_share * + self.scalar * + self.core.interpolation.interpolation_factor(*i, &included); + verification_shares.insert(*i, verification_share); + } + + /* + The offset is included by adding it to the participant with the lowest ID. + + This is done after interpolating to ensure, regardless of the method of interpolation, that + the method of interpolation does not scale the offset. For Lagrange interpolation, we could + add the offset to every key share before interpolating, yet for Constant interpolation, we + _have_ to add it as we do here (which also works even when we intend to perform Lagrange + interpolation). + */ + if included[0] == self.params().i() { + *secret_share += self.offset; + } + *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset; + + Ok(ThresholdView { + interpolation: self.core.interpolation.clone(), + scalar: self.scalar, + offset: self.offset, + group_key: self.group_key(), + secret_share, + original_verification_shares: self.core.verification_shares.clone(), + verification_shares, + included, + }) + } + + /// Write these keys to a type satisfying `std::io::Write`. + /// + /// This will not include the ephemeral scalar/offset. + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?; + writer.write_all(C::ID)?; + writer.write_all(&self.core.params.t.to_le_bytes())?; + writer.write_all(&self.core.params.n.to_le_bytes())?; + writer.write_all(&self.core.params.i.to_bytes())?; + match &self.core.interpolation { + Interpolation::Constant(c) => { + writer.write_all(&[0])?; + for c in c { + writer.write_all(c.to_repr().as_ref())?; + } + } + Interpolation::Lagrange => writer.write_all(&[1])?, + }; + let mut share_bytes = self.core.secret_share.to_repr(); + writer.write_all(share_bytes.as_ref())?; + share_bytes.as_mut().zeroize(); + for l in 1 ..= self.core.params.n { + writer.write_all( + self.core.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref(), + )?; + } Ok(()) } - /// Parameters for a multisig. - // These fields should not be made public as they should be static - #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] - #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] - pub struct ThresholdParams { - /// Participants needed to sign on behalf of the group. - pub(crate) t: u16, - /// Amount of participants. - pub(crate) n: u16, - /// Index of the participant being acted for. - pub(crate) i: Participant, + /// Serialize these keys to a `Vec`. + /// + /// This will not include the ephemeral scalar/offset. + pub fn serialize(&self) -> Zeroizing> { + let mut serialized = Zeroizing::new(vec![]); + self.write::>(serialized.as_mut()).unwrap(); + serialized } - impl ThresholdParams { - /// Create a new set of parameters. - pub fn new(t: u16, n: u16, i: Participant) -> Result> { - if (t == 0) || (n == 0) { - Err(DkgError::ZeroParameter(t, n))?; + /// Read keys from a type satisfying `std::io::Read`. + pub fn read(reader: &mut R) -> io::Result> { + { + let different = || io::Error::other("deserializing ThresholdKeys for another curve"); + + let mut id_len = [0; 4]; + reader.read_exact(&mut id_len)?; + if u32::try_from(C::ID.len()).unwrap().to_le_bytes() != id_len { + Err(different())?; } - if t > n { - Err(DkgError::InvalidThreshold(t, n))?; - } - if u16::from(i) > n { - Err(DkgError::InvalidParticipant(n, i))?; - } - - Ok(ThresholdParams { t, n, i }) - } - - /// Return the threshold for a multisig with these parameters. - pub fn t(&self) -> u16 { - self.t - } - /// Return the amount of participants for a multisig with these parameters. - pub fn n(&self) -> u16 { - self.n - } - /// Return the participant index of the share with these parameters. - pub fn i(&self) -> Participant { - self.i - } - } - - #[cfg(feature = "borsh")] - impl borsh::BorshDeserialize for ThresholdParams { - fn deserialize_reader(reader: &mut R) -> io::Result { - let t = u16::deserialize_reader(reader)?; - let n = u16::deserialize_reader(reader)?; - let i = Participant::deserialize_reader(reader)?; - ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}"))) - } - } - - #[derive(Clone, PartialEq, Eq, Debug, Zeroize)] - pub(crate) enum Interpolation { - Constant(Vec), - Lagrange, - } - - impl Interpolation { - pub(crate) fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F { - match self { - Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)], - Interpolation::Lagrange => { - let i_f = F::from(u64::from(u16::from(i))); - - let mut num = F::ONE; - let mut denom = F::ONE; - for l in included { - if i == *l { - continue; - } - - let share = F::from(u64::from(u16::from(*l))); - num *= share; - denom *= share - i_f; - } - - // Safe as this will only be 0 if we're part of the above loop - // (which we have an if case to avoid) - num * denom.invert().unwrap() - } + let mut id = vec![0; C::ID.len()]; + reader.read_exact(&mut id)?; + if id != C::ID { + Err(different())?; } } - } - /// Keys and verification shares generated by a DKG. - /// Called core as they're expected to be wrapped into an Arc before usage in various operations. - #[derive(Clone, PartialEq, Eq)] - pub struct ThresholdCore { - /// Threshold Parameters. - pub(crate) params: ThresholdParams, - /// The interpolation method used. - pub(crate) interpolation: Interpolation, - - /// Secret share key. - pub(crate) secret_share: Zeroizing, - /// Group key. - pub(crate) group_key: C::G, - /// Verification shares. - pub(crate) verification_shares: HashMap, - } - - impl fmt::Debug for ThresholdCore { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt - .debug_struct("ThresholdCore") - .field("params", &self.params) - .field("interpolation", &self.interpolation) - .field("group_key", &self.group_key) - .field("verification_shares", &self.verification_shares) - .finish_non_exhaustive() - } - } - - impl Zeroize for ThresholdCore { - fn zeroize(&mut self) { - self.params.zeroize(); - self.interpolation.zeroize(); - self.secret_share.zeroize(); - self.group_key.zeroize(); - for share in self.verification_shares.values_mut() { - share.zeroize(); - } - } - } - - impl ThresholdCore { - pub(crate) fn new( - params: ThresholdParams, - interpolation: Interpolation, - secret_share: Zeroizing, - verification_shares: HashMap, - ) -> ThresholdCore { - let t = (1 ..= params.t()).map(Participant).collect::>(); - let group_key = - t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum(); - ThresholdCore { params, interpolation, secret_share, group_key, verification_shares } - } - - /// Parameters for these keys. - pub fn params(&self) -> ThresholdParams { - self.params - } - - /// Secret share for these keys. - pub fn secret_share(&self) -> &Zeroizing { - &self.secret_share - } - - /// Group key for these keys. - pub fn group_key(&self) -> C::G { - self.group_key - } - - pub(crate) fn verification_shares(&self) -> HashMap { - self.verification_shares.clone() - } - - /// Write these keys to a type satisfying std::io::Write. - pub fn write(&self, writer: &mut W) -> io::Result<()> { - writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?; - writer.write_all(C::ID)?; - writer.write_all(&self.params.t.to_le_bytes())?; - writer.write_all(&self.params.n.to_le_bytes())?; - writer.write_all(&self.params.i.to_bytes())?; - match &self.interpolation { - Interpolation::Constant(c) => { - writer.write_all(&[0])?; - for c in c { - writer.write_all(c.to_repr().as_ref())?; - } - } - Interpolation::Lagrange => writer.write_all(&[1])?, + let (t, n, i) = { + let mut read_u16 = || -> io::Result { + let mut value = [0; 2]; + reader.read_exact(&mut value)?; + Ok(u16::from_le_bytes(value)) }; - let mut share_bytes = self.secret_share.to_repr(); - writer.write_all(share_bytes.as_ref())?; - share_bytes.as_mut().zeroize(); - for l in 1 ..= self.params.n { - writer - .write_all(self.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref())?; - } - Ok(()) - } + ( + read_u16()?, + read_u16()?, + Participant::new(read_u16()?).ok_or(io::Error::other("invalid participant index"))?, + ) + }; - /// Serialize these keys to a `Vec`. - pub fn serialize(&self) -> Zeroizing> { - let mut serialized = Zeroizing::new(vec![]); - self.write::>(serialized.as_mut()).unwrap(); - serialized - } - - /// Read keys from a type satisfying std::io::Read. - pub fn read(reader: &mut R) -> io::Result> { - { - let different = || io::Error::other("deserializing ThresholdCore for another curve"); - - let mut id_len = [0; 4]; - reader.read_exact(&mut id_len)?; - if u32::try_from(C::ID.len()).unwrap().to_le_bytes() != id_len { - Err(different())?; + let mut interpolation = [0]; + reader.read_exact(&mut interpolation)?; + let interpolation = match interpolation[0] { + 0 => Interpolation::Constant({ + let mut res = Vec::with_capacity(usize::from(n)); + for _ in 0 .. n { + res.push(C::read_F(reader)?); } + res + }), + 1 => Interpolation::Lagrange, + _ => Err(io::Error::other("invalid interpolation method"))?, + }; - let mut id = vec![0; C::ID.len()]; - reader.read_exact(&mut id)?; - if id != C::ID { - Err(different())?; - } - } + let secret_share = Zeroizing::new(C::read_F(reader)?); - let (t, n, i) = { - let mut read_u16 = || -> io::Result { - let mut value = [0; 2]; - reader.read_exact(&mut value)?; - Ok(u16::from_le_bytes(value)) - }; - ( - read_u16()?, - read_u16()?, - Participant::new(read_u16()?).ok_or(io::Error::other("invalid participant index"))?, - ) - }; - - let mut interpolation = [0]; - reader.read_exact(&mut interpolation)?; - let interpolation = match interpolation[0] { - 0 => Interpolation::Constant({ - let mut res = Vec::with_capacity(usize::from(n)); - for _ in 0 .. n { - res.push(C::read_F(reader)?); - } - res - }), - 1 => Interpolation::Lagrange, - _ => Err(io::Error::other("invalid interpolation method"))?, - }; - - let secret_share = Zeroizing::new(C::read_F(reader)?); - - let mut verification_shares = HashMap::new(); - for l in (1 ..= n).map(Participant) { - verification_shares.insert(l, ::read_G(reader)?); - } - - Ok(ThresholdCore::new( - ThresholdParams::new(t, n, i).map_err(|_| io::Error::other("invalid parameters"))?, - interpolation, - secret_share, - verification_shares, - )) - } - } - - /// Threshold keys usable for signing. - #[derive(Clone, Debug, Zeroize)] - pub struct ThresholdKeys { - // Core keys. - // If this is the last reference, the underlying keys will be dropped. When that happens, the - // private key present within it will be zeroed out (as it's within Zeroizing). - #[zeroize(skip)] - pub(crate) core: Arc>, - - // Scalar applied to these keys. - pub(crate) scalar: C::F, - // Offset applied to these keys. - pub(crate) offset: C::F, - } - - /// View of keys, interpolated and with the expected linear combination taken for usage. - #[derive(Clone)] - pub struct ThresholdView { - interpolation: Interpolation, - scalar: C::F, - offset: C::F, - group_key: C::G, - included: Vec, - secret_share: Zeroizing, - original_verification_shares: HashMap, - verification_shares: HashMap, - } - - impl fmt::Debug for ThresholdView { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt - .debug_struct("ThresholdView") - .field("interpolation", &self.interpolation) - .field("scalar", &self.scalar) - .field("offset", &self.offset) - .field("group_key", &self.group_key) - .field("included", &self.included) - .field("original_verification_shares", &self.original_verification_shares) - .field("verification_shares", &self.verification_shares) - .finish_non_exhaustive() - } - } - - impl Zeroize for ThresholdView { - fn zeroize(&mut self) { - self.scalar.zeroize(); - self.offset.zeroize(); - self.group_key.zeroize(); - self.included.zeroize(); - self.secret_share.zeroize(); - for share in self.original_verification_shares.values_mut() { - share.zeroize(); - } - for share in self.verification_shares.values_mut() { - share.zeroize(); - } - } - } - - impl ThresholdKeys { - /// Create a new set of ThresholdKeys from a ThresholdCore. - pub fn new(core: ThresholdCore) -> ThresholdKeys { - ThresholdKeys { core: Arc::new(core), scalar: C::F::ONE, offset: C::F::ZERO } + let mut verification_shares = HashMap::new(); + for l in (1 ..= n).map(Participant) { + verification_shares.insert(l, ::read_G(reader)?); } - /// Scale the keys by a given scalar to allow for various account and privacy schemes. - /// - /// This scalar is ephemeral and will not be included when these keys are serialized. The - /// scalar is applied on top of any already-existing scalar/offset. - /// - /// Returns `None` if the scalar is equal to `0`. - #[must_use] - pub fn scale(mut self, scalar: C::F) -> Option> { - if bool::from(scalar.is_zero()) { - None?; - } - self.scalar *= scalar; - self.offset *= scalar; - Some(self) - } - - /// Offset the keys by a given scalar to allow for various account and privacy schemes. - /// - /// This offset is ephemeral and will not be included when these keys are serialized. The - /// offset is applied on top of any already-existing scalar/offset. - #[must_use] - pub fn offset(mut self, offset: C::F) -> ThresholdKeys { - self.offset += offset; - self - } - - /// Return the current scalar in-use for these keys. - pub fn current_scalar(&self) -> C::F { - self.scalar - } - - /// Return the current offset in-use for these keys. - pub fn current_offset(&self) -> C::F { - self.offset - } - - /// Return the parameters for these keys. - pub fn params(&self) -> ThresholdParams { - self.core.params - } - - /// Return the secret share for these keys. - pub fn secret_share(&self) -> &Zeroizing { - &self.core.secret_share - } - - /// Return the group key, with the expected linear combination taken. - pub fn group_key(&self) -> C::G { - (self.core.group_key * self.scalar) + (C::generator() * self.offset) - } - - /// Return all participants' verification shares without any offsetting. - pub(crate) fn verification_shares(&self) -> HashMap { - self.core.verification_shares() - } - - /// Serialize these keys to a `Vec`. - pub fn serialize(&self) -> Zeroizing> { - self.core.serialize() - } - - /// Obtain a view of these keys, interpolated for the specified signing set, with the specified - /// linear combination taken. - pub fn view(&self, mut included: Vec) -> Result, DkgError<()>> { - if (included.len() < self.params().t.into()) || - (usize::from(self.params().n()) < included.len()) - { - Err(DkgError::InvalidSigningSet)?; - } - included.sort(); - - // The interpolation occurs multiplicatively, letting us scale by the scalar now - let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref()); - let mut secret_share = Zeroizing::new( - self.core.interpolation.interpolation_factor(self.params().i(), &included) * - secret_share_scaled.deref(), - ); - - let mut verification_shares = self.verification_shares(); - for (i, share) in &mut verification_shares { - *share *= self.scalar * self.core.interpolation.interpolation_factor(*i, &included); - } - - /* - The offset is included by adding it to the participant with the lowest ID. - - This is done after interpolating to ensure, regardless of the method of interpolation, that - the method of interpolation does not scale the offset. For Lagrange interpolation, we could - add the offset to every key share before interpolating, yet for Constant interpolation, we - _have_ to add it as we do here (which also works even when we intend to perform Lagrange - interpolation). - */ - if included[0] == self.params().i() { - *secret_share += self.offset; - } - *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset; - - Ok(ThresholdView { - interpolation: self.core.interpolation.clone(), - scalar: self.scalar, - offset: self.offset, - group_key: self.group_key(), - secret_share, - original_verification_shares: self.verification_shares(), - verification_shares, - included, - }) - } - } - - impl From> for ThresholdKeys { - fn from(keys: ThresholdCore) -> ThresholdKeys { - ThresholdKeys::new(keys) - } - } - - impl ThresholdView { - /// Return the scalar applied to this view. - pub fn scalar(&self) -> C::F { - self.scalar - } - - /// Return the offset applied to this view. - pub fn offset(&self) -> C::F { - self.offset - } - - /// Return the group key. - pub fn group_key(&self) -> C::G { - self.group_key - } - - /// Return the included signers. - pub fn included(&self) -> &[Participant] { - &self.included - } - - /// Return the interpolation factor for a signer. - pub fn interpolation_factor(&self, participant: Participant) -> Option { - if !self.included.contains(&participant) { - None? - } - Some(self.interpolation.interpolation_factor(participant, &self.included)) - } - - /// Return the interpolated secret share, with the expected linear combination taken. - pub fn secret_share(&self) -> &Zeroizing { - &self.secret_share - } - - /// Return the original verification share for the specified participant. - pub fn original_verification_share(&self, l: Participant) -> C::G { - self.original_verification_shares[&l] - } - - /// Return the interpolated verification share, with the expected linear combination taken, - /// for the specified participant. - pub fn verification_share(&self, l: Participant) -> C::G { - self.verification_shares[&l] - } + ThresholdKeys::new( + ThresholdParams::new(t, n, i).map_err(io::Error::other)?, + interpolation, + secret_share, + verification_shares, + ) + .map_err(io::Error::other) + } +} + +impl ThresholdView { + /// Return the scalar applied to this view. + pub fn scalar(&self) -> C::F { + self.scalar + } + + /// Return the offset applied to this view. + pub fn offset(&self) -> C::F { + self.offset + } + + /// Return the group key. + pub fn group_key(&self) -> C::G { + self.group_key + } + + /// Return the included signers. + pub fn included(&self) -> &[Participant] { + &self.included + } + + /// Return the interpolation factor for a signer. + pub fn interpolation_factor(&self, participant: Participant) -> Option { + if !self.included.contains(&participant) { + None? + } + Some(self.interpolation.interpolation_factor(participant, &self.included)) + } + + /// Return the interpolated secret share, with the expected linear combination taken. + pub fn secret_share(&self) -> &Zeroizing { + &self.secret_share + } + + /// Return the original (untweaked) verification share for the specified participant. + /// + /// This will panic if the participant index is invalid for these keys. + pub fn original_verification_share(&self, l: Participant) -> C::G { + self.original_verification_shares[&l] + } + + /// Return the interpolated verification share, with the expected linear combination taken, + /// for the specified participant. + /// + /// This will panic if the participant was not included in the signing set. + pub fn verification_share(&self, l: Participant) -> C::G { + self.verification_shares[&l] } } -#[cfg(feature = "std")] -pub use lib::*; diff --git a/crypto/dkg/src/musig.rs b/crypto/dkg/src/musig.rs deleted file mode 100644 index 82a755db..00000000 --- a/crypto/dkg/src/musig.rs +++ /dev/null @@ -1,129 +0,0 @@ -#[cfg(feature = "std")] -use core::ops::Deref; -use std_shims::{vec, vec::Vec, collections::HashSet}; -#[cfg(feature = "std")] -use std_shims::collections::HashMap; - -#[cfg(feature = "std")] -use zeroize::Zeroizing; - -use ciphersuite::{ - group::{Group, GroupEncoding}, - Ciphersuite, -}; - -use crate::DkgError; -#[cfg(feature = "std")] -use crate::{Participant, ThresholdParams, Interpolation, ThresholdCore}; - -fn check_keys(keys: &[C::G]) -> Result> { - if keys.is_empty() { - Err(DkgError::InvalidSigningSet)?; - } - // Too many signers - let keys_len = u16::try_from(keys.len()).map_err(|_| DkgError::InvalidSigningSet)?; - - // Duplicated public keys - if keys.iter().map(|key| key.to_bytes().as_ref().to_vec()).collect::>().len() != - keys.len() - { - Err(DkgError::InvalidSigningSet)?; - } - - Ok(keys_len) -} - -// This function panics if called with keys whose length exceed 2**16. -// This is fine since it's internal and all calls occur after calling check_keys, which does check -// the keys' length. -fn binding_factor_transcript( - context: &[u8], - keys: &[C::G], -) -> Result, DkgError<()>> { - let mut transcript = vec![]; - transcript.push(u8::try_from(context.len()).map_err(|_| DkgError::InvalidSigningSet)?); - transcript.extend(context); - transcript.extend(u16::try_from(keys.len()).unwrap().to_le_bytes()); - for key in keys { - transcript.extend(key.to_bytes().as_ref()); - } - Ok(transcript) -} - -fn binding_factor(mut transcript: Vec, i: u16) -> C::F { - transcript.extend(i.to_le_bytes()); - C::hash_to_F(b"musig", &transcript) -} - -/// The group key resulting from using this library's MuSig key gen. -/// -/// This function will return an error if the context is longer than 255 bytes. -/// -/// Creating an aggregate key with a list containing duplicated public keys will return an error. -pub fn musig_key(context: &[u8], keys: &[C::G]) -> Result> { - let keys_len = check_keys::(keys)?; - let transcript = binding_factor_transcript::(context, keys)?; - let mut res = C::G::identity(); - for i in 1 ..= keys_len { - // TODO: Calculate this with a multiexp - res += keys[usize::from(i - 1)] * binding_factor::(transcript.clone(), i); - } - Ok(res) -} - -/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key. -/// -/// Creating an aggregate key with a list containing duplicated public keys returns an error. -#[cfg(feature = "std")] -pub fn musig( - context: &[u8], - private_key: &Zeroizing, - keys: &[C::G], -) -> Result, DkgError<()>> { - let keys_len = check_keys::(keys)?; - - let our_pub_key = C::generator() * private_key.deref(); - let Some(pos) = keys.iter().position(|key| *key == our_pub_key) else { - // Not present in signing set - Err(DkgError::InvalidSigningSet)? - }; - let params = ThresholdParams::new( - keys_len, - keys_len, - // These errors shouldn't be possible, as pos is bounded to len - 1 - // Since len is prior guaranteed to be within u16::MAX, pos + 1 must also be - Participant::new((pos + 1).try_into().map_err(|_| DkgError::InvalidSigningSet)?) - .ok_or(DkgError::InvalidSigningSet)?, - )?; - - // Calculate the binding factor per-key - let transcript = binding_factor_transcript::(context, keys)?; - let mut binding = Vec::with_capacity(keys.len()); - for i in 1 ..= keys_len { - binding.push(binding_factor::(transcript.clone(), i)); - } - - // Our secret share is our private key - let secret_share = private_key.clone(); - - // Calculate verification shares - let mut verification_shares = HashMap::new(); - let mut group_key = C::G::identity(); - for l in 1 ..= keys_len { - let key = keys[usize::from(l) - 1]; - // TODO: Use a multiexp for this - group_key += key * binding[usize::from(l - 1)]; - - // These errors also shouldn't be possible, for the same reasons as documented above - verification_shares.insert(Participant::new(l).ok_or(DkgError::InvalidSigningSet)?, key); - } - debug_assert_eq!(C::generator() * secret_share.deref(), verification_shares[¶ms.i()]); - debug_assert_eq!(musig_key::(context, keys).unwrap(), group_key); - - Ok(ThresholdCore::new( - params, - Interpolation::Constant(binding), - secret_share, - verification_shares, - )) -} diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs deleted file mode 100644 index 0078020a..00000000 --- a/crypto/dkg/src/tests/mod.rs +++ /dev/null @@ -1,102 +0,0 @@ -use core::ops::Deref; -use std::collections::HashMap; - -use zeroize::Zeroizing; -use rand_core::{RngCore, CryptoRng}; - -use ciphersuite::{group::ff::Field, Ciphersuite}; - -use crate::{Participant, ThresholdCore, ThresholdKeys, musig::musig as musig_fn}; - -mod musig; -pub use musig::test_musig; - -/// FROST key generation testing utility. -pub mod pedpop; -use pedpop::pedpop_gen; - -// Promotion test. -mod promote; -use promote::test_generator_promotion; - -/// Constant amount of participants to use when testing. -pub const PARTICIPANTS: u16 = 5; -/// Constant threshold of participants to use when testing. -pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1; - -/// Clone a map without a specific value. -pub fn clone_without( - map: &HashMap, - without: &K, -) -> HashMap { - let mut res = map.clone(); - res.remove(without).unwrap(); - res -} - -/// Recover the secret from a collection of keys. -/// -/// This will panic if no keys, an insufficient amount of keys, or the wrong keys are provided. -pub fn recover_key(keys: &HashMap>) -> C::F { - let first = keys.values().next().expect("no keys provided"); - assert!(keys.len() >= first.params().t().into(), "not enough keys provided"); - let included = keys.keys().copied().collect::>(); - - let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| { - accum + - (first.core.interpolation.interpolation_factor(*i, &included) * keys.secret_share().deref()) - }); - assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys"); - group_private -} - -/// Generate threshold keys for tests. -pub fn key_gen( - rng: &mut R, -) -> HashMap> { - let res = pedpop_gen(rng) - .drain() - .map(|(i, core)| { - assert_eq!( - &ThresholdCore::::read::<&[u8]>(&mut core.serialize().as_ref()).unwrap(), - &core - ); - (i, ThresholdKeys::new(core)) - }) - .collect(); - assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key()); - res -} - -/// Generate MuSig keys for tests. -pub fn musig_key_gen( - rng: &mut R, -) -> HashMap> { - let mut keys = vec![]; - let mut pub_keys = vec![]; - for _ in 0 .. PARTICIPANTS { - let key = Zeroizing::new(C::F::random(&mut *rng)); - pub_keys.push(C::generator() * *key); - keys.push(key); - } - - let mut res = HashMap::new(); - for key in keys { - let these_keys = musig_fn::(b"Test MuSig Key Gen", &key, &pub_keys).unwrap(); - res.insert(these_keys.params().i(), ThresholdKeys::new(these_keys)); - } - - assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key()); - res -} - -/// Run the test suite on a ciphersuite. -pub fn test_ciphersuite(rng: &mut R) { - key_gen::<_, C>(rng); - test_generator_promotion::<_, C>(rng); -} - -#[test] -fn test_with_ristretto() { - test_ciphersuite::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng); -} diff --git a/crypto/dkg/src/tests/musig.rs b/crypto/dkg/src/tests/musig.rs deleted file mode 100644 index 086b26be..00000000 --- a/crypto/dkg/src/tests/musig.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::collections::HashMap; - -use zeroize::Zeroizing; -use rand_core::{RngCore, CryptoRng}; - -use ciphersuite::{group::ff::Field, Ciphersuite}; - -use crate::{ - ThresholdKeys, - musig::{musig_key, musig}, - tests::{PARTICIPANTS, recover_key}, -}; - -/// Tests MuSig key generation. -pub fn test_musig(rng: &mut R) { - let mut keys = vec![]; - let mut pub_keys = vec![]; - for _ in 0 .. PARTICIPANTS { - let key = Zeroizing::new(C::F::random(&mut *rng)); - pub_keys.push(C::generator() * *key); - keys.push(key); - } - - const CONTEXT: &[u8] = b"MuSig Test"; - - // Empty signing set - musig::(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).unwrap_err(); - // Signing set we're not part of - musig::(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).unwrap_err(); - - // Test with n keys - { - let mut created_keys = HashMap::new(); - let mut verification_shares = HashMap::new(); - let group_key = musig_key::(CONTEXT, &pub_keys).unwrap(); - for (i, key) in keys.iter().enumerate() { - let these_keys = musig::(CONTEXT, key, &pub_keys).unwrap(); - assert_eq!(these_keys.params().t(), PARTICIPANTS); - assert_eq!(these_keys.params().n(), PARTICIPANTS); - assert_eq!(usize::from(these_keys.params().i().0), i + 1); - - verification_shares - .insert(these_keys.params().i(), C::generator() * **these_keys.secret_share()); - - assert_eq!(these_keys.group_key(), group_key); - - created_keys.insert(these_keys.params().i(), ThresholdKeys::new(these_keys)); - } - - for keys in created_keys.values() { - assert_eq!(keys.verification_shares(), verification_shares); - } - - assert_eq!(C::generator() * recover_key(&created_keys), group_key); - } -} - -#[test] -fn musig_literal() { - test_musig::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng) -} diff --git a/crypto/dkg/src/tests/pedpop.rs b/crypto/dkg/src/tests/pedpop.rs deleted file mode 100644 index 42d7af67..00000000 --- a/crypto/dkg/src/tests/pedpop.rs +++ /dev/null @@ -1,331 +0,0 @@ -use std::collections::HashMap; - -use rand_core::{RngCore, CryptoRng}; - -use ciphersuite::Ciphersuite; - -use crate::{ - Participant, ThresholdParams, ThresholdCore, - pedpop::{Commitments, KeyGenMachine, SecretShare, KeyMachine}, - encryption::{EncryptionKeyMessage, EncryptedMessage}, - tests::{THRESHOLD, PARTICIPANTS, clone_without}, -}; - -type PedPoPEncryptedMessage = EncryptedMessage::F>>; -type PedPoPSecretShares = HashMap>; - -const CONTEXT: [u8; 32] = *b"DKG Test Key Generation "; - -// Commit, then return commitment messages, enc keys, and shares -#[allow(clippy::type_complexity)] -fn commit_enc_keys_and_shares( - rng: &mut R, -) -> ( - HashMap>, - HashMap>>, - HashMap, - HashMap>, -) { - let mut machines = HashMap::new(); - let mut commitments = HashMap::new(); - let mut enc_keys = HashMap::new(); - for i in (1 ..= PARTICIPANTS).map(Participant) { - let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); - let machine = KeyGenMachine::::new(params, CONTEXT); - let (machine, these_commitments) = machine.generate_coefficients(rng); - machines.insert(i, machine); - - commitments.insert( - i, - EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params) - .unwrap(), - ); - enc_keys.insert(i, commitments[&i].enc_key()); - } - - let mut secret_shares = HashMap::new(); - let machines = machines - .drain() - .map(|(l, machine)| { - let (machine, mut shares) = - machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); - let shares = shares - .drain() - .map(|(l, share)| { - ( - l, - EncryptedMessage::read::<&[u8]>( - &mut share.serialize().as_ref(), - // Only t/n actually matters, so hardcode i to 1 here - ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: Participant(1) }, - ) - .unwrap(), - ) - }) - .collect::>(); - secret_shares.insert(l, shares); - (l, machine) - }) - .collect::>(); - - (machines, commitments, enc_keys, secret_shares) -} - -fn generate_secret_shares( - shares: &HashMap>, - recipient: Participant, -) -> PedPoPSecretShares { - let mut our_secret_shares = HashMap::new(); - for (i, shares) in shares { - if recipient == *i { - continue; - } - our_secret_shares.insert(*i, shares[&recipient].clone()); - } - our_secret_shares -} - -/// Fully perform the PedPoP key generation algorithm. -pub fn pedpop_gen( - rng: &mut R, -) -> HashMap> { - let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng); - - let mut verification_shares = None; - let mut group_key = None; - machines - .drain() - .map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete(); - - // Verify the verification_shares are agreed upon - if verification_shares.is_none() { - verification_shares = Some(these_keys.verification_shares()); - } - assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares()); - - // Verify the group keys are agreed upon - if group_key.is_none() { - group_key = Some(these_keys.group_key()); - } - assert_eq!(group_key.unwrap(), these_keys.group_key()); - - (i, these_keys) - }) - .collect::>() -} - -#[cfg(test)] -mod literal { - use rand_core::OsRng; - - use ciphersuite::Ristretto; - - use crate::{ - DkgError, - encryption::EncryptionKeyProof, - pedpop::{BlameMachine, AdditionalBlameMachine}, - }; - - use super::*; - - const ONE: Participant = Participant(1); - const TWO: Participant = Participant(2); - - fn test_blame( - commitment_msgs: &HashMap>>, - machines: Vec>, - msg: &PedPoPEncryptedMessage, - blame: &Option>, - ) { - for machine in machines { - let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone()); - assert_eq!(blamed, ONE); - // Verify additional blame also works - assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE); - - // Verify machines constructed with AdditionalBlameMachine::new work - assert_eq!( - AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame( - ONE, - TWO, - msg.clone(), - blame.clone() - ), - ONE, - ); - } - } - - // TODO: Write a macro which expands to the following - #[test] - fn invalid_encryption_pop_blame() { - let (mut machines, commitment_msgs, _, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - // Mutate the PoP of the encrypted message from 1 to 2 - secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop(); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == TWO { - assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: ONE, blame: None })); - // Explicitly declare we have a blame object, which happens to be None since invalid PoP - // is self-explainable - blame = Some(None); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); - } - - #[test] - fn invalid_ecdh_blame() { - let (mut machines, commitment_msgs, _, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - // Mutate the share to trigger a blame event - // Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass - // While here, 2 is malicious, this is so 1 creates the blame proof - // We then malleate 1's blame proof, so 1 ends up malicious - // Doesn't simply invalidate the PoP as that won't have a blame statement - // By mutating the encrypted data, we do ensure a blame statement is created - secret_shares - .get_mut(&TWO) - .unwrap() - .get_mut(&ONE) - .unwrap() - .invalidate_msg(&mut OsRng, CONTEXT, TWO); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == ONE { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - blame.as_mut().unwrap().as_mut().unwrap().invalidate_key(); - test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); - } - - // This should be largely equivalent to the prior test - #[test] - fn invalid_dleq_blame() { - let (mut machines, commitment_msgs, _, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - secret_shares - .get_mut(&TWO) - .unwrap() - .get_mut(&ONE) - .unwrap() - .invalidate_msg(&mut OsRng, CONTEXT, TWO); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == ONE { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq(); - test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); - } - - #[test] - fn invalid_share_serialization_blame() { - let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization( - &mut OsRng, - CONTEXT, - ONE, - enc_keys[&TWO], - ); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == TWO { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); - } - - #[test] - fn invalid_share_value_blame() { - let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value( - &mut OsRng, - CONTEXT, - ONE, - enc_keys[&TWO], - ); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == TWO { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); - } -} diff --git a/crypto/dkg/src/tests/promote.rs b/crypto/dkg/src/tests/promote.rs deleted file mode 100644 index 242f085b..00000000 --- a/crypto/dkg/src/tests/promote.rs +++ /dev/null @@ -1,66 +0,0 @@ -use core::{marker::PhantomData, ops::Deref}; -use std::collections::HashMap; - -use rand_core::{RngCore, CryptoRng}; - -use zeroize::Zeroize; - -use ciphersuite::{group::Group, Ciphersuite}; - -use crate::{ - promote::{GeneratorPromotion, GeneratorProof}, - tests::{clone_without, key_gen, recover_key}, -}; - -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -struct AltGenerator { - _curve: PhantomData, -} - -impl Ciphersuite for AltGenerator { - type F = C::F; - type G = C::G; - type H = C::H; - - const ID: &'static [u8] = b"Alternate Ciphersuite"; - - fn generator() -> Self::G { - C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") - } - - fn reduce_512(scalar: [u8; 64]) -> Self::F { - ::reduce_512(scalar) - } - - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { - ::hash_to_F(dst, data) - } -} - -// Test promotion of threshold keys to another generator -pub(crate) fn test_generator_promotion(rng: &mut R) { - let keys = key_gen::<_, C>(&mut *rng); - - let mut promotions = HashMap::new(); - let mut proofs = HashMap::new(); - for (i, keys) in &keys { - let (promotion, proof) = - GeneratorPromotion::<_, AltGenerator>::promote(&mut *rng, keys.clone()); - promotions.insert(*i, promotion); - proofs.insert(*i, GeneratorProof::::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap()); - } - - let new_group_key = AltGenerator::::generator() * recover_key(&keys); - for (i, promoting) in promotions.drain() { - let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); - assert_eq!(keys[&i].params(), promoted.params()); - assert_eq!(keys[&i].secret_share(), promoted.secret_share()); - assert_eq!(new_group_key, promoted.group_key()); - for (l, verification_share) in promoted.verification_shares() { - assert_eq!( - AltGenerator::::generator() * keys[&l].secret_share().deref(), - verification_share - ); - } - } -} diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 1d030621..0b2171d6 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -39,13 +39,13 @@ multiexp = { path = "../multiexp", version = "0.4", default-features = false, fe schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } -dkg = { path = "../dkg", version = "^0.5.1", default-features = false, features = ["std"] } +dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] } [dev-dependencies] hex = "0.4" serde_json = { version = "1", default-features = false, features = ["std"] } -dkg = { path = "../dkg", features = ["tests"] } +dkg = { path = "../dkg" } [features] ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] @@ -56,4 +56,4 @@ p256 = ["ciphersuite/p256"] ed448 = ["minimal-ed448", "ciphersuite/ed448"] -tests = ["hex", "rand_core/getrandom", "dkg/tests"] +tests = ["hex", "rand_core/getrandom"] diff --git a/tests/no-std/Cargo.toml b/tests/no-std/Cargo.toml index 36ba85f4..da21e0ff 100644 --- a/tests/no-std/Cargo.toml +++ b/tests/no-std/Cargo.toml @@ -30,6 +30,8 @@ dleq = { path = "../../crypto/dleq", default-features = false } schnorr-signatures = { path = "../../crypto/schnorr", default-features = false } dkg = { path = "../../crypto/dkg", default-features = false } +dkg-recovery = { path = "../../crypto/dkg/recovery", default-features = false } +dkg-musig = { path = "../../crypto/dkg/musig", default-features = false } # modular-frost = { path = "../../crypto/frost", default-features = false } # frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false } diff --git a/tests/no-std/src/lib.rs b/tests/no-std/src/lib.rs index 8339da2e..7b9c2cca 100644 --- a/tests/no-std/src/lib.rs +++ b/tests/no-std/src/lib.rs @@ -13,6 +13,8 @@ pub use dleq; pub use schnorr_signatures; pub use dkg; +pub use dkg_recovery; +pub use dkg_musig; /* pub use modular_frost; pub use frost_schnorrkel;