Smash dkg into dkg, dkg-[recovery, promote, musig, pedpop]

promote and pedpop require dleq, which don't support no-std. All three should
be moved outside the Serai repository, per #597, as none are planned for use
and worth covering under our BBP.
This commit is contained in:
Luke Parker
2025-08-18 01:24:40 -04:00
parent 3919cf55ae
commit 9f84adf8b3
35 changed files with 1910 additions and 1362 deletions

View File

@@ -36,5 +36,9 @@ jobs:
-p schnorr-signatures \ -p schnorr-signatures \
-p dleq \ -p dleq \
-p dkg \ -p dkg \
-p dkg-recovery \
-p dkg-promote \
-p dkg-musig \
-p dkg-pedpop \
-p modular-frost \ -p modular-frost \
-p frost-schnorrkel -p frost-schnorrkel

54
Cargo.lock generated
View File

@@ -2211,17 +2211,65 @@ dependencies = [
[[package]] [[package]]
name = "dkg" name = "dkg"
version = "0.5.1" version = "0.6.0"
dependencies = [ dependencies = [
"borsh", "borsh",
"ciphersuite",
"std-shims",
"thiserror 2.0.14",
"zeroize",
]
[[package]]
name = "dkg-musig"
version = "0.6.0"
dependencies = [
"ciphersuite",
"dkg",
"dkg-recovery",
"multiexp",
"rand_core",
"std-shims",
"thiserror 2.0.14",
"zeroize",
]
[[package]]
name = "dkg-pedpop"
version = "0.6.0"
dependencies = [
"chacha20", "chacha20",
"ciphersuite", "ciphersuite",
"dkg",
"dleq", "dleq",
"flexible-transcript", "flexible-transcript",
"multiexp", "multiexp",
"rand_core", "rand_core",
"schnorr-signatures", "schnorr-signatures",
"std-shims", "thiserror 2.0.14",
"zeroize",
]
[[package]]
name = "dkg-promote"
version = "0.6.0"
dependencies = [
"ciphersuite",
"dkg",
"dkg-recovery",
"dleq",
"flexible-transcript",
"rand_core",
"thiserror 2.0.14",
"zeroize",
]
[[package]]
name = "dkg-recovery"
version = "0.6.0"
dependencies = [
"ciphersuite",
"dkg",
"thiserror 2.0.14", "thiserror 2.0.14",
"zeroize", "zeroize",
] ]
@@ -8324,6 +8372,8 @@ dependencies = [
"ciphersuite", "ciphersuite",
"dalek-ff-group", "dalek-ff-group",
"dkg", "dkg",
"dkg-musig",
"dkg-recovery",
"dleq", "dleq",
"flexible-transcript", "flexible-transcript",
"minimal-ed448", "minimal-ed448",

View File

@@ -34,6 +34,10 @@ members = [
"crypto/schnorr", "crypto/schnorr",
"crypto/dleq", "crypto/dleq",
"crypto/dkg", "crypto/dkg",
"crypto/dkg/recovery",
"crypto/dkg/promote",
"crypto/dkg/musig",
"crypto/dkg/pedpop",
"crypto/frost", "crypto/frost",
"crypto/schnorrkel", "crypto/schnorrkel",

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dkg" name = "dkg"
version = "0.5.1" version = "0.6.0"
description = "Distributed key generation over ff/group" description = "Distributed key generation over ff/group"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
@@ -17,50 +17,28 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true workspace = true
[dependencies] [dependencies]
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive", "alloc"] }
thiserror = { version = "2", default-features = false } thiserror = { version = "2", default-features = false }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
std-shims = { version = "0.1", path = "../../common/std-shims", default-features = false } std-shims = { version = "0.1", path = "../../common/std-shims", default-features = false }
borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true }
transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false, features = ["recommended"] } ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] }
chacha20 = { version = "0.9", default-features = false, features = ["zeroize"] }
ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false }
multiexp = { path = "../multiexp", version = "0.4", default-features = false }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false }
dleq = { path = "../dleq", version = "^0.4.1", default-features = false }
[dev-dependencies] [dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] } ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] }
[features] [features]
std = [ std = [
"thiserror/std", "thiserror/std",
"rand_core/std",
"std-shims/std", "std-shims/std",
"borsh?/std", "borsh?/std",
"transcript/std",
"chacha20/std",
"ciphersuite/std", "ciphersuite/std",
"multiexp/std",
"multiexp/batch",
"schnorr/std",
"dleq/std",
"dleq/serialize"
] ]
borsh = ["dep:borsh"] borsh = ["dep:borsh"]
tests = ["rand_core/getrandom"]
default = ["std"] default = ["std"]

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2021-2023 Luke Parker Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,16 +1,14 @@
# Distributed Key Generation # Distributed Key Generation
A collection of implementations of various distributed key generation protocols. A crate implementing a type for keys, presumably the result of a distributed key generation
protocol, and utilities from there.
All included protocols resolve into the provided `Threshold` types, intended to This crate used to host implementations of distributed key generation protocols as well (hence the
enable their modularity. Additional utilities around these types, such as name). Those have been smashed into their own crates, such as
promotion from one generator to another, are also provided. [`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop)
Currently, the only included protocol is the two-round protocol from the Before being smashed, this crate was [audited by Cypher Stack in March 2023](
[FROST paper](https://eprint.iacr.org/2020/852). https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06](
This library was https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
[audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf), ). Any subsequent changes have not undergone auditing.
culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06).
Any subsequent changes have not undergone auditing.

View File

@@ -0,0 +1,49 @@
[package]
name = "dkg-musig"
version = "0.6.0"
description = "The MuSig key aggregation protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/musig"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "2", default-features = false }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false }
multiexp = { path = "../../multiexp", version = "0.4", default-features = false }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false }
dkg = { path = "../", default-features = false }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] }
dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] }
[features]
std = [
"thiserror/std",
"rand_core/std",
"std-shims/std",
"multiexp/std",
"ciphersuite/std",
"dkg/std",
]
default = ["std"]

21
crypto/dkg/musig/LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,12 @@
# Distributed Key Generation - MuSig
This implements the MuSig key aggregation protocol for the [`dkg`](https://docs.rs/dkg) crate's
types.
This crate was originally part of the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

162
crypto/dkg/musig/src/lib.rs Normal file
View File

@@ -0,0 +1,162 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
use core::ops::Deref;
use std_shims::{
vec,
vec::Vec,
collections::{HashSet, HashMap},
};
use zeroize::Zeroizing;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
pub use dkg::*;
#[cfg(test)]
mod tests;
/// Errors encountered when working with threshold keys.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum MusigError<C: Ciphersuite> {
/// No keys were provided.
#[error("no keys provided")]
NoKeysProvided,
/// Too many keys were provided.
#[error("too many keys (allowed {max}, provided {provided})")]
TooManyKeysProvided {
/// The maximum amount of keys allowed.
max: u16,
/// The amount of keys provided.
provided: usize,
},
/// A participant was duplicated.
#[error("a participant was duplicated")]
DuplicatedParticipant(C::G),
/// Participating, yet our public key wasn't found in the list of keys.
#[error("private key's public key wasn't present in the list of public keys")]
NotPresent,
/// An error propagated from the underlying `dkg` crate.
#[error("error from dkg ({0})")]
DkgError(DkgError),
}
fn check_keys<C: Ciphersuite>(keys: &[C::G]) -> Result<u16, MusigError<C>> {
if keys.is_empty() {
Err(MusigError::NoKeysProvided)?;
}
let keys_len = u16::try_from(keys.len())
.map_err(|_| MusigError::TooManyKeysProvided { max: u16::MAX, provided: keys.len() })?;
let mut set = HashSet::with_capacity(keys.len());
for key in keys {
let bytes = key.to_bytes().as_ref().to_vec();
if !set.insert(bytes) {
Err(MusigError::DuplicatedParticipant(*key))?;
}
}
Ok(keys_len)
}
fn binding_factor_transcript<C: Ciphersuite>(
context: [u8; 32],
keys_len: u16,
keys: &[C::G],
) -> Vec<u8> {
debug_assert_eq!(usize::from(keys_len), keys.len());
let mut transcript = vec![];
transcript.extend(&context);
transcript.extend(keys_len.to_le_bytes());
for key in keys {
transcript.extend(key.to_bytes().as_ref());
}
transcript
}
fn binding_factor<C: Ciphersuite>(mut transcript: Vec<u8>, i: u16) -> C::F {
transcript.extend(i.to_le_bytes());
C::hash_to_F(b"dkg-musig", &transcript)
}
#[allow(clippy::type_complexity)]
fn musig_key_multiexp<C: Ciphersuite>(
context: [u8; 32],
keys: &[C::G],
) -> Result<Vec<(C::F, C::G)>, MusigError<C>> {
let keys_len = check_keys::<C>(keys)?;
let transcript = binding_factor_transcript::<C>(context, keys_len, keys);
let mut multiexp = Vec::with_capacity(keys.len());
for i in 1 ..= keys_len {
multiexp.push((binding_factor::<C>(transcript.clone(), i), keys[usize::from(i - 1)]));
}
Ok(multiexp)
}
/// The group key resulting from using this library's MuSig key aggregation.
///
/// This function executes in variable time and MUST NOT be used with secret data.
pub fn musig_key_vartime<C: Ciphersuite>(
context: [u8; 32],
keys: &[C::G],
) -> Result<C::G, MusigError<C>> {
Ok(multiexp::multiexp_vartime(&musig_key_multiexp(context, keys)?))
}
/// The group key resulting from using this library's MuSig key aggregation.
pub fn musig_key<C: Ciphersuite>(context: [u8; 32], keys: &[C::G]) -> Result<C::G, MusigError<C>> {
Ok(multiexp::multiexp(&musig_key_multiexp(context, keys)?))
}
/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key.
pub fn musig<C: Ciphersuite>(
context: [u8; 32],
private_key: Zeroizing<C::F>,
keys: &[C::G],
) -> Result<ThresholdKeys<C>, MusigError<C>> {
let our_pub_key = C::generator() * private_key.deref();
let Some(our_i) = keys.iter().position(|key| *key == our_pub_key) else {
Err(MusigError::DkgError(DkgError::NotParticipating))?
};
let keys_len: u16 = check_keys::<C>(keys)?;
let params = ThresholdParams::new(
keys_len,
keys_len,
// The `+ 1` won't fail as `keys.len() <= u16::MAX`, so any index is `< u16::MAX`
Participant::new(
u16::try_from(our_i).expect("keys.len() <= u16::MAX yet index of keys > u16::MAX?") + 1,
)
.expect("i + 1 != 0"),
)
.map_err(MusigError::DkgError)?;
let transcript = binding_factor_transcript::<C>(context, keys_len, keys);
let mut binding_factors = Vec::with_capacity(keys.len());
let mut multiexp = Vec::with_capacity(keys.len());
let mut verification_shares = HashMap::with_capacity(keys.len());
for (i, key) in (1 ..= keys_len).zip(keys.iter().copied()) {
let binding_factor = binding_factor::<C>(transcript.clone(), i);
binding_factors.push(binding_factor);
multiexp.push((binding_factor, key));
let i = Participant::new(i).expect("non-zero u16 wasn't a valid Participant index?");
verification_shares.insert(i, key);
}
let group_key = multiexp::multiexp(&multiexp);
debug_assert_eq!(our_pub_key, verification_shares[&params.i()]);
debug_assert_eq!(musig_key_vartime::<C>(context, keys).unwrap(), group_key);
ThresholdKeys::new(
params,
Interpolation::Constant(binding_factors),
private_key,
verification_shares,
)
.map_err(MusigError::DkgError)
}

View File

@@ -0,0 +1,70 @@
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::OsRng;
use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto};
use dkg_recovery::recover_key;
use crate::*;
/// Tests MuSig key generation.
#[test]
pub fn test_musig() {
const PARTICIPANTS: u16 = 5;
let mut keys = vec![];
let mut pub_keys = vec![];
for _ in 0 .. PARTICIPANTS {
let key = Zeroizing::new(<Ristretto as Ciphersuite>::F::random(&mut OsRng));
pub_keys.push(<Ristretto as Ciphersuite>::generator() * *key);
keys.push(key);
}
const CONTEXT: [u8; 32] = *b"MuSig Test ";
// Empty signing set
musig::<Ristretto>(CONTEXT, Zeroizing::new(<Ristretto as Ciphersuite>::F::ZERO), &[])
.unwrap_err();
// Signing set we're not part of
musig::<Ristretto>(
CONTEXT,
Zeroizing::new(<Ristretto as Ciphersuite>::F::ZERO),
&[<Ristretto as Ciphersuite>::generator()],
)
.unwrap_err();
// Test with n keys
{
let mut created_keys = HashMap::new();
let mut verification_shares = HashMap::new();
let group_key = musig_key::<Ristretto>(CONTEXT, &pub_keys).unwrap();
for (i, key) in keys.iter().enumerate() {
let these_keys = musig::<Ristretto>(CONTEXT, key.clone(), &pub_keys).unwrap();
assert_eq!(these_keys.params().t(), PARTICIPANTS);
assert_eq!(these_keys.params().n(), PARTICIPANTS);
assert_eq!(usize::from(u16::from(these_keys.params().i())), i + 1);
verification_shares.insert(
these_keys.params().i(),
<Ristretto as Ciphersuite>::generator() * **these_keys.secret_share(),
);
assert_eq!(these_keys.group_key(), group_key);
created_keys.insert(these_keys.params().i(), these_keys);
}
for keys in created_keys.values() {
for (l, verification_share) in &verification_shares {
assert_eq!(keys.original_verification_share(*l), *verification_share);
}
}
assert_eq!(
<Ristretto as Ciphersuite>::generator() *
*recover_key(&created_keys.values().cloned().collect::<Vec<_>>()).unwrap(),
group_key
);
}
}

View File

@@ -0,0 +1,37 @@
[package]
name = "dkg-pedpop"
version = "0.6.0"
description = "The PedPoP distributed key generation protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/pedpop"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "2", default-features = false, features = ["std"] }
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] }
chacha20 = { version = "0.9", default-features = false, features = ["std", "zeroize"] }
multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["std"] }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../../schnorr", version = "^0.5.1", default-features = false, features = ["std"] }
dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] }
dkg = { path = "../", default-features = false, features = ["std"] }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] }

21
crypto/dkg/pedpop/LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,12 @@
# Distributed Key Generation - PedPoP
This implements the PedPoP distributed key generation protocol for the [`dkg`](https://docs.rs/dkg)
crate's types.
This crate was originally part of the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -21,7 +21,7 @@ use multiexp::BatchVerifier;
use schnorr::SchnorrSignature; use schnorr::SchnorrSignature;
use dleq::DLEqProof; use dleq::DLEqProof;
use crate::{Participant, ThresholdParams}; use dkg::{Participant, ThresholdParams};
mod sealed { mod sealed {
use super::*; use super::*;
@@ -69,7 +69,7 @@ impl<C: Ciphersuite, M: Message> EncryptionKeyMessage<C, M> {
buf buf
} }
#[cfg(any(test, feature = "tests"))] #[cfg(test)]
pub(crate) fn enc_key(&self) -> C::G { pub(crate) fn enc_key(&self) -> C::G {
self.enc_key self.enc_key
} }

View File

@@ -1,15 +1,20 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
// This crate requires `dleq` which doesn't support no-std via std-shims
// #![cfg_attr(not(feature = "std"), no_std)]
use core::{marker::PhantomData, ops::Deref, fmt}; use core::{marker::PhantomData, ops::Deref, fmt};
use std::{ use std::{
io::{self, Read, Write}, io::{self, Read, Write},
collections::HashMap, collections::HashMap,
}; };
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use multiexp::{multiexp_vartime, BatchVerifier};
use ciphersuite::{ use ciphersuite::{
group::{ group::{
ff::{Field, PrimeField}, ff::{Field, PrimeField},
@@ -17,29 +22,75 @@ use ciphersuite::{
}, },
Ciphersuite, Ciphersuite,
}; };
use multiexp::{multiexp_vartime, BatchVerifier};
use schnorr::SchnorrSignature; use schnorr::SchnorrSignature;
use crate::{ pub use dkg::*;
Participant, DkgError, ThresholdParams, Interpolation, ThresholdCore, validate_map,
encryption::{
ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, Decryption, EncryptionKeyProof,
DecryptionError,
},
};
type FrostError<C> = DkgError<EncryptionKeyProof<C>>; mod encryption;
pub use encryption::*;
#[cfg(test)]
mod tests;
/// Errors possible during key generation.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum PedPoPError<C: Ciphersuite> {
/// An incorrect amount of participants was provided.
#[error("incorrect amount of participants (expected {expected}, found {found})")]
IncorrectAmountOfParticipants { expected: usize, found: usize },
/// An invalid proof of knowledge was provided.
#[error("invalid proof of knowledge (participant {0})")]
InvalidCommitments(Participant),
/// An invalid DKG share was provided.
#[error("invalid share (participant {participant}, blame {blame})")]
InvalidShare { participant: Participant, blame: Option<EncryptionKeyProof<C>> },
/// A participant was missing.
#[error("missing participant {0}")]
MissingParticipant(Participant),
/// An error propagated from the underlying `dkg` crate.
#[error("error from dkg ({0})")]
DkgError(DkgError),
}
// Validate a map of values to have the expected included participants
fn validate_map<T, C: Ciphersuite>(
map: &HashMap<Participant, T>,
included: &[Participant],
ours: Participant,
) -> Result<(), PedPoPError<C>> {
if (map.len() + 1) != included.len() {
Err(PedPoPError::IncorrectAmountOfParticipants {
expected: included.len(),
found: map.len() + 1,
})?;
}
for included in included {
if *included == ours {
if map.contains_key(included) {
Err(PedPoPError::DkgError(DkgError::DuplicatedParticipant(*included)))?;
}
continue;
}
if !map.contains_key(included) {
Err(PedPoPError::MissingParticipant(*included))?;
}
}
Ok(())
}
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn challenge<C: Ciphersuite>(context: [u8; 32], l: Participant, R: &[u8], Am: &[u8]) -> C::F { fn challenge<C: Ciphersuite>(context: [u8; 32], l: Participant, R: &[u8], Am: &[u8]) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2"); let mut transcript = RecommendedTranscript::new(b"DKG PedPoP v0.2");
transcript.domain_separate(b"schnorr_proof_of_knowledge"); transcript.domain_separate(b"schnorr_proof_of_knowledge");
transcript.append_message(b"context", context); transcript.append_message(b"context", context);
transcript.append_message(b"participant", l.to_bytes()); transcript.append_message(b"participant", l.to_bytes());
transcript.append_message(b"nonce", R); transcript.append_message(b"nonce", R);
transcript.append_message(b"commitments", Am); transcript.append_message(b"commitments", Am);
C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr")) C::hash_to_F(b"DKG-PedPoP-proof_of_knowledge-0", &transcript.challenge(b"schnorr"))
} }
/// The commitments message, intended to be broadcast to all other parties. /// The commitments message, intended to be broadcast to all other parties.
@@ -98,7 +149,7 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
KeyGenMachine { params, context, _curve: PhantomData } KeyGenMachine { params, context, _curve: PhantomData }
} }
/// Start generating a key according to the FROST DKG spec. /// Start generating a key according to the PedPoP DKG specification present in the FROST paper.
/// ///
/// Returns a commitments message to be sent to all parties over an authenticated channel. If any /// Returns a commitments message to be sent to all parties over an authenticated channel. If any
/// party submits multiple sets of commitments, they MUST be treated as malicious. /// party submits multiple sets of commitments, they MUST be treated as malicious.
@@ -106,7 +157,7 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
self, self,
rng: &mut R, rng: &mut R,
) -> (SecretShareMachine<C>, EncryptionKeyMessage<C, Commitments<C>>) { ) -> (SecretShareMachine<C>, EncryptionKeyMessage<C, Commitments<C>>) {
let t = usize::from(self.params.t); let t = usize::from(self.params.t());
let mut coefficients = Vec::with_capacity(t); let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t); let mut commitments = Vec::with_capacity(t);
let mut cached_msg = vec![]; let mut cached_msg = vec![];
@@ -133,7 +184,7 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
); );
// Additionally create an encryption mechanism to protect the secret shares // Additionally create an encryption mechanism to protect the secret shares
let encryption = Encryption::new(self.context, self.params.i, rng); let encryption = Encryption::new(self.context, self.params.i(), rng);
// Step 4: Broadcast // Step 4: Broadcast
let msg = let msg =
@@ -250,21 +301,21 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
&mut self, &mut self,
rng: &mut R, rng: &mut R,
mut commitment_msgs: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>, mut commitment_msgs: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<HashMap<Participant, Vec<C::G>>, FrostError<C>> { ) -> Result<HashMap<Participant, Vec<C::G>>, PedPoPError<C>> {
validate_map( validate_map(
&commitment_msgs, &commitment_msgs,
&(1 ..= self.params.n()).map(Participant).collect::<Vec<_>>(), &self.params.all_participant_indexes().collect::<Vec<_>>(),
self.params.i(), self.params.i(),
)?; )?;
let mut batch = BatchVerifier::<Participant, C::G>::new(commitment_msgs.len()); let mut batch = BatchVerifier::<Participant, C::G>::new(commitment_msgs.len());
let mut commitments = HashMap::new(); let mut commitments = HashMap::new();
for l in (1 ..= self.params.n()).map(Participant) { for l in self.params.all_participant_indexes() {
let Some(msg) = commitment_msgs.remove(&l) else { continue }; let Some(msg) = commitment_msgs.remove(&l) else { continue };
let mut msg = self.encryption.register(l, msg); let mut msg = self.encryption.register(l, msg);
if msg.commitments.len() != self.params.t().into() { if msg.commitments.len() != self.params.t().into() {
Err(FrostError::InvalidCommitments(l))?; Err(PedPoPError::InvalidCommitments(l))?;
} }
// Step 5: Validate each proof of knowledge // Step 5: Validate each proof of knowledge
@@ -280,9 +331,9 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
commitments.insert(l, msg.commitments.drain(..).collect::<Vec<_>>()); commitments.insert(l, msg.commitments.drain(..).collect::<Vec<_>>());
} }
batch.verify_vartime_with_vartime_blame().map_err(FrostError::InvalidCommitments)?; batch.verify_vartime_with_vartime_blame().map_err(PedPoPError::InvalidCommitments)?;
commitments.insert(self.params.i, self.our_commitments.drain(..).collect()); commitments.insert(self.params.i(), self.our_commitments.drain(..).collect());
Ok(commitments) Ok(commitments)
} }
@@ -299,13 +350,13 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
commitments: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>, commitments: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result< ) -> Result<
(KeyMachine<C>, HashMap<Participant, EncryptedMessage<C, SecretShare<C::F>>>), (KeyMachine<C>, HashMap<Participant, EncryptedMessage<C, SecretShare<C::F>>>),
FrostError<C>, PedPoPError<C>,
> { > {
let commitments = self.verify_r1(&mut *rng, commitments)?; let commitments = self.verify_r1(&mut *rng, commitments)?;
// Step 1: Generate secret shares for all other parties // Step 1: Generate secret shares for all other parties
let mut res = HashMap::new(); let mut res = HashMap::new();
for l in (1 ..= self.params.n()).map(Participant) { for l in self.params.all_participant_indexes() {
// Don't insert our own shares to the byte buffer which is meant to be sent around // Don't insert our own shares to the byte buffer which is meant to be sent around
// An app developer could accidentally send it. Best to keep this black boxed // An app developer could accidentally send it. Best to keep this black boxed
if l == self.params.i() { if l == self.params.i() {
@@ -413,10 +464,10 @@ impl<C: Ciphersuite> KeyMachine<C> {
mut self, mut self,
rng: &mut R, rng: &mut R,
mut shares: HashMap<Participant, EncryptedMessage<C, SecretShare<C::F>>>, mut shares: HashMap<Participant, EncryptedMessage<C, SecretShare<C::F>>>,
) -> Result<BlameMachine<C>, FrostError<C>> { ) -> Result<BlameMachine<C>, PedPoPError<C>> {
validate_map( validate_map(
&shares, &shares,
&(1 ..= self.params.n()).map(Participant).collect::<Vec<_>>(), &self.params.all_participant_indexes().collect::<Vec<_>>(),
self.params.i(), self.params.i(),
)?; )?;
@@ -427,7 +478,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes); self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes);
let share = let share =
Zeroizing::new(Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| { Zeroizing::new(Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| {
FrostError::InvalidShare { participant: l, blame: Some(blame.clone()) } PedPoPError::InvalidShare { participant: l, blame: Some(blame.clone()) }
})?); })?);
share_bytes.zeroize(); share_bytes.zeroize();
*self.secret += share.deref(); *self.secret += share.deref();
@@ -444,7 +495,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
BatchId::Decryption(l) => (l, None), BatchId::Decryption(l) => (l, None),
BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())), BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())),
}; };
FrostError::InvalidShare { participant: l, blame } PedPoPError::InvalidShare { participant: l, blame }
})?; })?;
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on // Stripe commitments per t and sum them in advance. Calculating verification shares relies on
@@ -458,7 +509,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
// Calculate each user's verification share // Calculate each user's verification share
let mut verification_shares = HashMap::new(); let mut verification_shares = HashMap::new();
for i in (1 ..= self.params.n()).map(Participant) { for i in self.params.all_participant_indexes() {
verification_shares.insert( verification_shares.insert(
i, i,
if i == self.params.i() { if i == self.params.i() {
@@ -473,13 +524,10 @@ impl<C: Ciphersuite> KeyMachine<C> {
Ok(BlameMachine { Ok(BlameMachine {
commitments, commitments,
encryption: encryption.into_decryption(), encryption: encryption.into_decryption(),
result: Some(ThresholdCore { result: Some(
params, ThresholdKeys::new(params, Interpolation::Lagrange, secret, verification_shares)
interpolation: Interpolation::Lagrange, .map_err(PedPoPError::DkgError)?,
secret_share: secret, ),
group_key: stripes[0],
verification_shares,
}),
}) })
} }
} }
@@ -488,7 +536,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
pub struct BlameMachine<C: Ciphersuite> { pub struct BlameMachine<C: Ciphersuite> {
commitments: HashMap<Participant, Vec<C::G>>, commitments: HashMap<Participant, Vec<C::G>>,
encryption: Decryption<C>, encryption: Decryption<C>,
result: Option<ThresholdCore<C>>, result: Option<ThresholdKeys<C>>,
} }
impl<C: Ciphersuite> fmt::Debug for BlameMachine<C> { impl<C: Ciphersuite> fmt::Debug for BlameMachine<C> {
@@ -520,7 +568,7 @@ impl<C: Ciphersuite> BlameMachine<C> {
/// territory of consensus protocols. This library does not handle that nor does it provide any /// territory of consensus protocols. This library does not handle that nor does it provide any
/// tooling to do so. This function is solely intended to force users to acknowledge they're /// tooling to do so. This function is solely intended to force users to acknowledge they're
/// completing the protocol, not processing any blame. /// completing the protocol, not processing any blame.
pub fn complete(self) -> ThresholdCore<C> { pub fn complete(self) -> ThresholdKeys<C> {
self.result.unwrap() self.result.unwrap()
} }
@@ -602,12 +650,12 @@ impl<C: Ciphersuite> AdditionalBlameMachine<C> {
context: [u8; 32], context: [u8; 32],
n: u16, n: u16,
mut commitment_msgs: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>, mut commitment_msgs: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<Self, FrostError<C>> { ) -> Result<Self, PedPoPError<C>> {
let mut commitments = HashMap::new(); let mut commitments = HashMap::new();
let mut encryption = Decryption::new(context); let mut encryption = Decryption::new(context);
for i in 1 ..= n { for i in 1 ..= n {
let i = Participant::new(i).unwrap(); let i = Participant::new(i).unwrap();
let Some(msg) = commitment_msgs.remove(&i) else { Err(DkgError::MissingParticipant(i))? }; let Some(msg) = commitment_msgs.remove(&i) else { Err(PedPoPError::MissingParticipant(i))? };
commitments.insert(i, encryption.register(i, msg).commitments); commitments.insert(i, encryption.register(i, msg).commitments);
} }
Ok(AdditionalBlameMachine(BlameMachine { commitments, encryption, result: None })) Ok(AdditionalBlameMachine(BlameMachine { commitments, encryption, result: None }))

View File

@@ -0,0 +1,345 @@
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng, OsRng};
use ciphersuite::{Ciphersuite, Ristretto};
use crate::*;
const THRESHOLD: u16 = 3;
const PARTICIPANTS: u16 = 5;
/// Clone a map without a specific value.
fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
type PedPoPEncryptedMessage<C> = EncryptedMessage<C, SecretShare<<C as Ciphersuite>::F>>;
type PedPoPSecretShares<C> = HashMap<Participant, PedPoPEncryptedMessage<C>>;
const CONTEXT: [u8; 32] = *b"DKG Test Key Generation ";
// Commit, then return commitment messages, enc keys, and shares
#[allow(clippy::type_complexity)]
fn commit_enc_keys_and_shares<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> (
HashMap<Participant, KeyMachine<C>>,
HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
HashMap<Participant, C::G>,
HashMap<Participant, PedPoPSecretShares<C>>,
) {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
let mut enc_keys = HashMap::new();
for i in (1 ..= PARTICIPANTS).map(|i| Participant::new(i).unwrap()) {
let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap();
let machine = KeyGenMachine::<C>::new(params, CONTEXT);
let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine);
commitments.insert(
i,
EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params)
.unwrap(),
);
enc_keys.insert(i, commitments[&i].enc_key());
}
let mut secret_shares = HashMap::new();
let machines = machines
.drain()
.map(|(l, machine)| {
let (machine, mut shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
let shares = shares
.drain()
.map(|(l, share)| {
(
l,
EncryptedMessage::read::<&[u8]>(
&mut share.serialize().as_ref(),
// Only t/n actually matters, so hardcode i to 1 here
ThresholdParams::new(THRESHOLD, PARTICIPANTS, Participant::new(1).unwrap()).unwrap(),
)
.unwrap(),
)
})
.collect::<HashMap<_, _>>();
secret_shares.insert(l, shares);
(l, machine)
})
.collect::<HashMap<_, _>>();
(machines, commitments, enc_keys, secret_shares)
}
fn generate_secret_shares<C: Ciphersuite>(
shares: &HashMap<Participant, PedPoPSecretShares<C>>,
recipient: Participant,
) -> PedPoPSecretShares<C> {
let mut our_secret_shares = HashMap::new();
for (i, shares) in shares {
if recipient == *i {
continue;
}
our_secret_shares.insert(*i, shares[&recipient].clone());
}
our_secret_shares
}
/// Fully perform the PedPoP key generation algorithm.
fn pedpop_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdKeys<C>> {
let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng);
let mut verification_shares = None;
let mut group_key = None;
machines
.drain()
.map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete();
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(
these_keys
.params()
.all_participant_indexes()
.map(|i| (i, these_keys.original_verification_share(i)))
.collect::<HashMap<_, _>>(),
);
}
assert_eq!(
verification_shares.as_ref().unwrap(),
&these_keys
.params()
.all_participant_indexes()
.map(|i| (i, these_keys.original_verification_share(i)))
.collect::<HashMap<_, _>>()
);
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, these_keys)
})
.collect::<HashMap<_, _>>()
}
const ONE: Participant = Participant::new(1).unwrap();
const TWO: Participant = Participant::new(2).unwrap();
#[test]
fn test_pedpop() {
let _ = core::hint::black_box(pedpop_gen::<_, Ristretto>(&mut OsRng));
}
fn test_blame(
commitment_msgs: &HashMap<Participant, EncryptionKeyMessage<Ristretto, Commitments<Ristretto>>>,
machines: Vec<BlameMachine<Ristretto>>,
msg: &PedPoPEncryptedMessage<Ristretto>,
blame: &Option<EncryptionKeyProof<Ristretto>>,
) {
for machine in machines {
let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone());
assert_eq!(blamed, ONE);
// Verify additional blame also works
assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE);
// Verify machines constructed with AdditionalBlameMachine::new work
assert_eq!(
AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame(
ONE,
TWO,
msg.clone(),
blame.clone()
),
ONE,
);
}
}
// TODO: Write a macro which expands to the following
#[test]
fn invalid_encryption_pop_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the PoP of the encrypted message from 1 to 2
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop();
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
assert_eq!(
machine.err(),
Some(PedPoPError::InvalidShare { participant: ONE, blame: None })
);
// Explicitly declare we have a blame object, which happens to be None since invalid PoP
// is self-explainable
blame = Some(None);
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_ecdh_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the share to trigger a blame event
// Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass
// While here, 2 is malicious, this is so 1 creates the blame proof
// We then malleate 1's blame proof, so 1 ends up malicious
// Doesn't simply invalidate the PoP as that won't have a blame statement
// By mutating the encrypted data, we do ensure a blame statement is created
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_key();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
// This should be largely equivalent to the prior test
#[test]
fn invalid_dleq_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_serialization_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_value_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}

View File

@@ -0,0 +1,34 @@
[package]
name = "dkg-promote"
version = "0.6.0"
description = "Promotions for keys from the dkg crate"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/promote"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "2", default-features = false, features = ["std"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] }
dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] }
dkg = { path = "../", default-features = false, features = ["std"] }
[dev-dependencies]
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] }
dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] }

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,12 @@
# Distributed Key Generation - Promote
This crate implements 'promotions' for keys from the [`dkg`](https://docs.rs/dkg) crate. A promotion
takes a set of keys and maps it to a different `Ciphersuite`.
This crate was originally part of the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -1,25 +1,52 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
// This crate requires `dleq` which doesn't support no-std via std-shims
// #![cfg_attr(not(feature = "std"), no_std)]
use core::{marker::PhantomData, ops::Deref}; use core::{marker::PhantomData, ops::Deref};
use std::{ use std::{
io::{self, Read, Write}, io::{self, Read, Write},
sync::Arc,
collections::HashMap, collections::HashMap,
}; };
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use ciphersuite::{ use ciphersuite::{group::GroupEncoding, Ciphersuite};
group::{ff::Field, GroupEncoding},
Ciphersuite,
};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use dleq::DLEqProof; use dleq::DLEqProof;
use crate::{Participant, DkgError, ThresholdCore, ThresholdKeys, validate_map}; pub use dkg::*;
/// Promote a set of keys to another Ciphersuite definition. #[cfg(test)]
pub trait CiphersuitePromote<C2: Ciphersuite> { mod tests;
fn promote(self) -> ThresholdKeys<C2>;
/// Errors encountered when promoting keys.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum PromotionError {
/// Invalid participant identifier.
#[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")]
InvalidParticipant {
/// The total amount of participants.
n: u16,
/// The specified participant.
participant: Participant,
},
/// An incorrect amount of participants was specified.
#[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")]
IncorrectAmountOfParticipants {
/// The threshold required.
t: u16,
/// The total amount of participants.
n: u16,
/// The amount of participants specified.
amount: usize,
},
/// Participant provided an invalid proof.
#[error("invalid proof {0}")]
InvalidProof(Participant),
} }
fn transcript<G: GroupEncoding>(key: &G, i: Participant) -> RecommendedTranscript { fn transcript<G: GroupEncoding>(key: &G, i: Participant) -> RecommendedTranscript {
@@ -68,8 +95,9 @@ pub struct GeneratorPromotion<C1: Ciphersuite, C2: Ciphersuite> {
} }
impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<C1, C2> { impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<C1, C2> {
/// Begin promoting keys from one generator to another. Returns a proof this share was properly /// Begin promoting keys from one generator to another.
/// promoted. ///
/// Returns a proof this share was properly promoted.
pub fn promote<R: RngCore + CryptoRng>( pub fn promote<R: RngCore + CryptoRng>(
rng: &mut R, rng: &mut R,
base: ThresholdKeys<C1>, base: ThresholdKeys<C1>,
@@ -79,7 +107,7 @@ impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<
share: C2::generator() * base.secret_share().deref(), share: C2::generator() * base.secret_share().deref(),
proof: DLEqProof::prove( proof: DLEqProof::prove(
rng, rng,
&mut transcript(&base.core.group_key(), base.params().i), &mut transcript(&base.original_group_key(), base.params().i()),
&[C1::generator(), C2::generator()], &[C1::generator(), C2::generator()],
base.secret_share(), base.secret_share(),
), ),
@@ -92,36 +120,49 @@ impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<
pub fn complete( pub fn complete(
self, self,
proofs: &HashMap<Participant, GeneratorProof<C1>>, proofs: &HashMap<Participant, GeneratorProof<C1>>,
) -> Result<ThresholdKeys<C2>, DkgError<()>> { ) -> Result<ThresholdKeys<C2>, PromotionError> {
let params = self.base.params(); let params = self.base.params();
validate_map(proofs, &(1 ..= params.n).map(Participant).collect::<Vec<_>>(), params.i)?; if proofs.len() != (usize::from(params.n()) - 1) {
Err(PromotionError::IncorrectAmountOfParticipants {
let original_shares = self.base.verification_shares(); t: params.n(),
n: params.n(),
amount: proofs.len() + 1,
})?;
}
for i in proofs.keys().copied() {
if u16::from(i) > params.n() {
Err(PromotionError::InvalidParticipant { n: params.n(), participant: i })?;
}
}
let mut verification_shares = HashMap::new(); let mut verification_shares = HashMap::new();
verification_shares.insert(params.i, self.proof.share); verification_shares.insert(params.i(), self.proof.share);
for (i, proof) in proofs { for i in 1 ..= params.n() {
let i = *i; let i = Participant::new(i).unwrap();
if i == params.i() {
continue;
}
let proof = proofs.get(&i).unwrap();
proof proof
.proof .proof
.verify( .verify(
&mut transcript(&self.base.core.group_key(), i), &mut transcript(&self.base.original_group_key(), i),
&[C1::generator(), C2::generator()], &[C1::generator(), C2::generator()],
&[original_shares[&i], proof.share], &[self.base.original_verification_share(i), proof.share],
) )
.map_err(|_| DkgError::InvalidCommitments(i))?; .map_err(|_| PromotionError::InvalidProof(i))?;
verification_shares.insert(i, proof.share); verification_shares.insert(i, proof.share);
} }
Ok(ThresholdKeys { Ok(
core: Arc::new(ThresholdCore::new( ThresholdKeys::new(
params, params,
self.base.core.interpolation.clone(), self.base.interpolation().clone(),
self.base.secret_share().clone(), self.base.secret_share().clone(),
verification_shares, verification_shares,
)), )
scalar: C2::F::ONE, .unwrap(),
offset: C2::F::ZERO, )
})
} }
} }

View File

@@ -0,0 +1,113 @@
use core::marker::PhantomData;
use std::collections::HashMap;
use zeroize::{Zeroize, Zeroizing};
use rand_core::OsRng;
use ciphersuite::{
group::{ff::Field, Group},
Ciphersuite, Ristretto,
};
use dkg::*;
use dkg_recovery::recover_key;
use crate::{GeneratorPromotion, GeneratorProof};
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltGenerator<C: Ciphersuite> {
_curve: PhantomData<C>,
}
impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {
type F = C::F;
type G = C::G;
type H = C::H;
const ID: &'static [u8] = b"Alternate Ciphersuite";
fn generator() -> Self::G {
C::G::generator() * <C as Ciphersuite>::hash_to_F(b"DKG Promotion Test", b"generator")
}
fn reduce_512(scalar: [u8; 64]) -> Self::F {
<C as Ciphersuite>::reduce_512(scalar)
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
<C as Ciphersuite>::hash_to_F(dst, data)
}
}
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
// Test promotion of threshold keys to another generator
#[test]
fn test_generator_promotion() {
// Generate a set of `ThresholdKeys`
const PARTICIPANTS: u16 = 5;
let keys: [ThresholdKeys<_>; PARTICIPANTS as usize] = {
let shares: [<Ristretto as Ciphersuite>::F; PARTICIPANTS as usize] =
core::array::from_fn(|_| <Ristretto as Ciphersuite>::F::random(&mut OsRng));
let verification_shares = (0 .. PARTICIPANTS)
.map(|i| {
(
Participant::new(i + 1).unwrap(),
<Ristretto as Ciphersuite>::generator() * shares[usize::from(i)],
)
})
.collect::<HashMap<_, _>>();
core::array::from_fn(|i| {
ThresholdKeys::new(
ThresholdParams::new(
PARTICIPANTS,
PARTICIPANTS,
Participant::new(u16::try_from(i + 1).unwrap()).unwrap(),
)
.unwrap(),
Interpolation::Constant(vec![<Ristretto as Ciphersuite>::F::ONE; PARTICIPANTS as usize]),
Zeroizing::new(shares[i]),
verification_shares.clone(),
)
.unwrap()
})
};
// Perform the promotion
let mut promotions = HashMap::new();
let mut proofs = HashMap::new();
for keys in &keys {
let i = keys.params().i();
let (promotion, proof) =
GeneratorPromotion::<_, AltGenerator<Ristretto>>::promote(&mut OsRng, keys.clone());
promotions.insert(i, promotion);
proofs.insert(
i,
GeneratorProof::<Ristretto>::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap(),
);
}
// Complete the promotion, and verify it worked
let new_group_key = AltGenerator::<Ristretto>::generator() * *recover_key(&keys).unwrap();
for (i, promoting) in promotions.drain() {
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
assert_eq!(keys[usize::from(u16::from(i) - 1)].params(), promoted.params());
assert_eq!(keys[usize::from(u16::from(i) - 1)].secret_share(), promoted.secret_share());
assert_eq!(new_group_key, promoted.group_key());
for l in 0 .. PARTICIPANTS {
let verification_share =
promoted.original_verification_share(Participant::new(l + 1).unwrap());
assert_eq!(
AltGenerator::<Ristretto>::generator() * **keys[usize::from(l)].secret_share(),
verification_share
);
}
}
}

View File

@@ -0,0 +1,34 @@
[package]
name = "dkg-recovery"
version = "0.6.0"
description = "Recover a secret-shared key from a collection of dkg::ThresholdKeys"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recovery"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
zeroize = { version = "^1.5", default-features = false }
thiserror = { version = "2", default-features = false }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] }
dkg = { path = "../", default-features = false }
[features]
std = [
"zeroize/std",
"thiserror/std",
"ciphersuite/std",
"dkg/std",
]
default = ["std"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,14 @@
# Distributed Key Generation
A crate implementing a type for keys, presumably the result of a distributed key generation
protocol, and utilities from there.
This crate used to host implementations of distributed key generation protocols as well (hence the
name). Those have been smashed into their own crates, such as
[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop)
Before being smashed, this crate was [audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -0,0 +1,85 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![no_std]
use core::ops::{Deref, DerefMut};
extern crate alloc;
use alloc::vec::Vec;
use zeroize::Zeroizing;
use ciphersuite::Ciphersuite;
pub use dkg::*;
/// Errors encountered when recovering a secret-shared key from a collection of
/// `dkg::ThresholdKeys`.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum RecoveryError {
/// No keys were provided.
#[error("no keys provided")]
NoKeysProvided,
/// Not enough keys were provided.
#[error("not enough keys provided (threshold required {required}, provided {provided})")]
NotEnoughKeysProvided { required: u16, provided: usize },
/// The keys had inconsistent parameters.
#[error("keys had inconsistent parameters")]
InconsistentParameters,
/// The keys are from distinct secret-sharing sessions or otherwise corrupt.
#[error("recovery failed")]
Failure,
/// An error propagated from the underlying `dkg` crate.
#[error("error from dkg ({0})")]
DkgError(DkgError),
}
/// Recover a shared secret from a collection of `dkg::ThresholdKeys`.
pub fn recover_key<C: Ciphersuite>(
keys: &[ThresholdKeys<C>],
) -> Result<Zeroizing<C::F>, RecoveryError> {
let included = keys.iter().map(|keys| keys.params().i()).collect::<Vec<_>>();
let keys_len = keys.len();
let mut keys = keys.iter();
let first_keys = keys.next().ok_or(RecoveryError::NoKeysProvided)?;
{
let t = first_keys.params().t();
if keys_len < usize::from(t) {
Err(RecoveryError::NotEnoughKeysProvided { required: t, provided: keys_len })?;
}
}
{
let first_params = (
first_keys.params().t(),
first_keys.params().n(),
first_keys.group_key(),
first_keys.current_scalar(),
first_keys.current_offset(),
);
for keys in keys.clone() {
let params = (
keys.params().t(),
keys.params().n(),
keys.group_key(),
keys.current_scalar(),
keys.current_offset(),
);
if params != first_params {
Err(RecoveryError::InconsistentParameters)?;
}
}
}
let mut res: Zeroizing<_> =
first_keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().clone();
for keys in keys {
*res.deref_mut() +=
keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().deref();
}
if (C::generator() * res.deref()) != first_keys.group_key() {
Err(RecoveryError::Failure)?;
}
Ok(res)
}

View File

@@ -2,39 +2,29 @@
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::{self, Debug}; use core::{
ops::Deref,
fmt::{self, Debug},
};
use std_shims::{sync::Arc, vec, vec::Vec, collections::HashMap, io};
use thiserror::Error; use zeroize::{Zeroize, Zeroizing};
use zeroize::Zeroize; use ciphersuite::{
group::{
/// MuSig-style key aggregation. ff::{Field, PrimeField},
pub mod musig; GroupEncoding,
},
/// Encryption types and utilities used to secure DKG messages. Ciphersuite,
#[cfg(feature = "std")] };
pub mod encryption;
/// The PedPoP distributed key generation protocol described in the
/// [FROST paper](https://eprint.iacr.org/2020/852), augmented to be verifiable.
#[cfg(feature = "std")]
pub mod pedpop;
/// Promote keys between ciphersuites.
#[cfg(feature = "std")]
pub mod promote;
/// Tests for application-provided curves and algorithms.
#[cfg(any(test, feature = "tests"))]
pub mod tests;
/// The ID of a participant, defined as a non-zero u16. /// The ID of a participant, defined as a non-zero u16.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))]
pub struct Participant(pub(crate) u16); pub struct Participant(u16);
impl Participant { impl Participant {
/// Create a new Participant identifier from a u16. /// Create a new Participant identifier from a u16.
pub fn new(i: u16) -> Option<Participant> { pub const fn new(i: u16) -> Option<Participant> {
if i == 0 { if i == 0 {
None None
} else { } else {
@@ -44,7 +34,7 @@ impl Participant {
/// Convert a Participant identifier to bytes. /// Convert a Participant identifier to bytes.
#[allow(clippy::wrong_self_convention)] #[allow(clippy::wrong_self_convention)]
pub fn to_bytes(&self) -> [u8; 2] { pub const fn to_bytes(&self) -> [u8; 2] {
self.0.to_le_bytes() self.0.to_le_bytes()
} }
} }
@@ -61,156 +51,177 @@ impl fmt::Display for Participant {
} }
} }
/// Various errors possible during key generation. /// Errors encountered when working with threshold keys.
#[derive(Clone, PartialEq, Eq, Debug, Error)] #[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum DkgError<B: Clone + PartialEq + Eq + Debug> { pub enum DkgError {
/// A parameter was zero. /// A parameter was zero.
#[cfg_attr(feature = "std", error("a parameter was 0 (threshold {0}, participants {1})"))] #[error("a parameter was 0 (threshold {t}, participants {n})")]
ZeroParameter(u16, u16), ZeroParameter {
/// The specified threshold.
t: u16,
/// The specified total amount of participants.
n: u16,
},
/// The threshold exceeded the amount of participants. /// The threshold exceeded the amount of participants.
#[cfg_attr(feature = "std", error("invalid threshold (max {1}, got {0})"))] #[error("invalid threshold (max {n}, got {t})")]
InvalidThreshold(u16, u16), InvalidThreshold {
/// The specified threshold.
t: u16,
/// The specified total amount of participants.
n: u16,
},
/// Invalid participant identifier. /// Invalid participant identifier.
#[cfg_attr( #[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")]
feature = "std", InvalidParticipant {
error("invalid participant (0 < participant <= {0}, yet participant is {1})") /// The total amount of participants.
)] n: u16,
InvalidParticipant(u16, Participant), /// The specified participant.
participant: Participant,
},
/// An incorrect amount of participants was specified.
#[error("incorrect amount of verification shares (n = {n} yet {shares} provided)")]
IncorrectAmountOfVerificationShares {
/// The amount of participants.
n: u16,
/// The amount of shares provided.
shares: usize,
},
/// An inapplicable method of interpolation was specified.
#[error("inapplicable method of interpolation ({0})")]
InapplicableInterpolation(&'static str),
/// An incorrect amount of participants was specified.
#[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")]
IncorrectAmountOfParticipants {
/// The threshold required.
t: u16,
/// The total amount of participants.
n: u16,
/// The amount of participants specified.
amount: usize,
},
/// Invalid signing set.
#[cfg_attr(feature = "std", error("invalid signing set"))]
InvalidSigningSet,
/// Invalid amount of participants.
#[cfg_attr(feature = "std", error("invalid participant quantity (expected {0}, got {1})"))]
InvalidParticipantQuantity(usize, usize),
/// A participant was duplicated. /// A participant was duplicated.
#[cfg_attr(feature = "std", error("duplicated participant ({0})"))] #[error("a participant ({0}) was duplicated")]
DuplicatedParticipant(Participant), DuplicatedParticipant(Participant),
/// A participant was missing.
#[cfg_attr(feature = "std", error("missing participant {0}"))]
MissingParticipant(Participant),
/// An invalid proof of knowledge was provided. /// Not participating in declared signing set.
#[cfg_attr(feature = "std", error("invalid proof of knowledge (participant {0})"))] #[error("not participating in declared signing set")]
InvalidCommitments(Participant), NotParticipating,
/// An invalid DKG share was provided.
#[cfg_attr(feature = "std", error("invalid share (participant {participant}, blame {blame})"))]
InvalidShare { participant: Participant, blame: Option<B> },
} }
#[cfg(feature = "std")] // Manually implements BorshDeserialize so we can enforce it's a valid index
mod lib { #[cfg(feature = "borsh")]
pub use super::*; impl borsh::BorshDeserialize for Participant {
use core::ops::Deref;
use std::{io, sync::Arc, collections::HashMap};
use zeroize::Zeroizing;
use ciphersuite::{
group::{
ff::{Field, PrimeField},
GroupEncoding,
},
Ciphersuite,
};
#[cfg(feature = "borsh")]
impl borsh::BorshDeserialize for Participant {
fn deserialize_reader<R: io::Read>(reader: &mut R) -> io::Result<Self> { fn deserialize_reader<R: io::Read>(reader: &mut R) -> io::Result<Self> {
Participant::new(u16::deserialize_reader(reader)?) Participant::new(u16::deserialize_reader(reader)?)
.ok_or_else(|| io::Error::other("invalid participant")) .ok_or_else(|| io::Error::other("invalid participant"))
} }
} }
// Validate a map of values to have the expected included participants /// Parameters for a multisig.
pub(crate) fn validate_map<T, B: Clone + PartialEq + Eq + Debug>( #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
map: &HashMap<Participant, T>, #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))]
included: &[Participant], pub struct ThresholdParams {
ours: Participant,
) -> Result<(), DkgError<B>> {
if (map.len() + 1) != included.len() {
Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;
}
for included in included {
if *included == ours {
if map.contains_key(included) {
Err(DkgError::DuplicatedParticipant(*included))?;
}
continue;
}
if !map.contains_key(included) {
Err(DkgError::MissingParticipant(*included))?;
}
}
Ok(())
}
/// Parameters for a multisig.
// These fields should not be made public as they should be static
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))]
pub struct ThresholdParams {
/// Participants needed to sign on behalf of the group. /// Participants needed to sign on behalf of the group.
pub(crate) t: u16, t: u16,
/// Amount of participants. /// Amount of participants.
pub(crate) n: u16, n: u16,
/// Index of the participant being acted for. /// Index of the participant being acted for.
pub(crate) i: Participant, i: Participant,
}
/// An iterator over all participant indexes.
struct AllParticipantIndexes {
i: u16,
n: u16,
}
impl Iterator for AllParticipantIndexes {
type Item = Participant;
fn next(&mut self) -> Option<Participant> {
if self.i > self.n {
None?;
}
let res = Participant::new(self.i).unwrap();
// If i == n == u16::MAX, we cause `i > n` by setting `n` to `0` so the iterator becomes empty
if self.i == u16::MAX {
self.n = 0;
} else {
self.i += 1;
} }
impl ThresholdParams { Some(res)
}
}
impl ThresholdParams {
/// Create a new set of parameters. /// Create a new set of parameters.
pub fn new(t: u16, n: u16, i: Participant) -> Result<ThresholdParams, DkgError<()>> { pub const fn new(t: u16, n: u16, i: Participant) -> Result<ThresholdParams, DkgError> {
if (t == 0) || (n == 0) { if (t == 0) || (n == 0) {
Err(DkgError::ZeroParameter(t, n))?; return Err(DkgError::ZeroParameter { t, n });
} }
if t > n { if t > n {
Err(DkgError::InvalidThreshold(t, n))?; return Err(DkgError::InvalidThreshold { t, n });
} }
if u16::from(i) > n { if i.0 > n {
Err(DkgError::InvalidParticipant(n, i))?; return Err(DkgError::InvalidParticipant { n, participant: i });
} }
Ok(ThresholdParams { t, n, i }) Ok(ThresholdParams { t, n, i })
} }
/// Return the threshold for a multisig with these parameters. /// The threshold for a multisig with these parameters.
pub fn t(&self) -> u16 { pub const fn t(&self) -> u16 {
self.t self.t
} }
/// Return the amount of participants for a multisig with these parameters. /// The amount of participants for a multisig with these parameters.
pub fn n(&self) -> u16 { pub const fn n(&self) -> u16 {
self.n self.n
} }
/// Return the participant index of the share with these parameters. /// The participant index of the share with these parameters.
pub fn i(&self) -> Participant { pub const fn i(&self) -> Participant {
self.i self.i
} }
}
#[cfg(feature = "borsh")] /// An iterator over all participant indexes.
impl borsh::BorshDeserialize for ThresholdParams { pub fn all_participant_indexes(&self) -> impl Iterator<Item = Participant> {
AllParticipantIndexes { i: 1, n: self.n }
}
}
#[cfg(feature = "borsh")]
impl borsh::BorshDeserialize for ThresholdParams {
fn deserialize_reader<R: io::Read>(reader: &mut R) -> io::Result<Self> { fn deserialize_reader<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let t = u16::deserialize_reader(reader)?; let t = u16::deserialize_reader(reader)?;
let n = u16::deserialize_reader(reader)?; let n = u16::deserialize_reader(reader)?;
let i = Participant::deserialize_reader(reader)?; let i = Participant::deserialize_reader(reader)?;
ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}"))) ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}")))
} }
} }
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] /// A method of interpolation.
pub(crate) enum Interpolation<F: Zeroize + PrimeField> { #[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub enum Interpolation<F: Zeroize + PrimeField> {
/// A list of constant coefficients, one for each of the secret key shares.
/*
There's no benefit to using a full linear combination here, as the additive term would have
an entirely known evaluation with a fixed, public coefficient of `1`. Accordingly, the entire
key can simply be offset with the additive term to achieve the same effect.
*/
Constant(Vec<F>), Constant(Vec<F>),
/// Lagrange interpolation.
Lagrange, Lagrange,
} }
impl<F: Zeroize + PrimeField> Interpolation<F> { impl<F: Zeroize + PrimeField> Interpolation<F> {
pub(crate) fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F { /// The interpolation factor for this participant, within this signing set.
fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F {
match self { match self {
Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)], Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)],
Interpolation::Lagrange => { Interpolation::Lagrange => {
@@ -234,89 +245,301 @@ mod lib {
} }
} }
} }
} }
/// Keys and verification shares generated by a DKG. /// A key share for a thresholdized secret key.
/// Called core as they're expected to be wrapped into an Arc before usage in various operations. ///
#[derive(Clone, PartialEq, Eq)] /// This is the 'core' structure containing all relevant data, expected to be wrapped into an
pub struct ThresholdCore<C: Ciphersuite> { /// heap-allocated pointer to minimize copies on the stack (`ThresholdKeys`, the publicly exposed
/// Threshold Parameters. /// type).
pub(crate) params: ThresholdParams, #[derive(Clone, PartialEq, Eq)]
/// The interpolation method used. struct ThresholdCore<C: Ciphersuite> {
pub(crate) interpolation: Interpolation<C::F>, params: ThresholdParams,
group_key: C::G,
verification_shares: HashMap<Participant, C::G>,
interpolation: Interpolation<C::F>,
secret_share: Zeroizing<C::F>,
}
/// Secret share key. impl<C: Ciphersuite> fmt::Debug for ThresholdCore<C> {
pub(crate) secret_share: Zeroizing<C::F>,
/// Group key.
pub(crate) group_key: C::G,
/// Verification shares.
pub(crate) verification_shares: HashMap<Participant, C::G>,
}
impl<C: Ciphersuite> fmt::Debug for ThresholdCore<C> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt fmt
.debug_struct("ThresholdCore") .debug_struct("ThresholdCore")
.field("params", &self.params) .field("params", &self.params)
.field("interpolation", &self.interpolation)
.field("group_key", &self.group_key) .field("group_key", &self.group_key)
.field("verification_shares", &self.verification_shares) .field("verification_shares", &self.verification_shares)
.field("interpolation", &self.interpolation)
.finish_non_exhaustive() .finish_non_exhaustive()
} }
} }
impl<C: Ciphersuite> Zeroize for ThresholdCore<C> { impl<C: Ciphersuite> Zeroize for ThresholdCore<C> {
fn zeroize(&mut self) { fn zeroize(&mut self) {
self.params.zeroize(); self.params.zeroize();
self.interpolation.zeroize();
self.secret_share.zeroize();
self.group_key.zeroize(); self.group_key.zeroize();
for share in self.verification_shares.values_mut() { for share in self.verification_shares.values_mut() {
share.zeroize(); share.zeroize();
} }
self.interpolation.zeroize();
self.secret_share.zeroize();
} }
} }
impl<C: Ciphersuite> ThresholdCore<C> { /// Threshold keys usable for signing.
pub(crate) fn new( #[derive(Clone, Debug, Zeroize)]
pub struct ThresholdKeys<C: Ciphersuite> {
// Core keys.
#[zeroize(skip)]
core: Arc<Zeroizing<ThresholdCore<C>>>,
// Scalar applied to these keys.
scalar: C::F,
// Offset applied to these keys.
offset: C::F,
}
/// View of keys, interpolated and with the expected linear combination taken for usage.
#[derive(Clone)]
pub struct ThresholdView<C: Ciphersuite> {
interpolation: Interpolation<C::F>,
scalar: C::F,
offset: C::F,
group_key: C::G,
included: Vec<Participant>,
secret_share: Zeroizing<C::F>,
original_verification_shares: HashMap<Participant, C::G>,
verification_shares: HashMap<Participant, C::G>,
}
impl<C: Ciphersuite> fmt::Debug for ThresholdView<C> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt
.debug_struct("ThresholdView")
.field("interpolation", &self.interpolation)
.field("scalar", &self.scalar)
.field("offset", &self.offset)
.field("group_key", &self.group_key)
.field("included", &self.included)
.field("original_verification_shares", &self.original_verification_shares)
.field("verification_shares", &self.verification_shares)
.finish_non_exhaustive()
}
}
impl<C: Ciphersuite> Zeroize for ThresholdView<C> {
fn zeroize(&mut self) {
self.scalar.zeroize();
self.offset.zeroize();
self.group_key.zeroize();
self.included.zeroize();
self.secret_share.zeroize();
for share in self.original_verification_shares.values_mut() {
share.zeroize();
}
for share in self.verification_shares.values_mut() {
share.zeroize();
}
}
}
impl<C: Ciphersuite> ThresholdKeys<C> {
/// Create a new set of ThresholdKeys.
pub fn new(
params: ThresholdParams, params: ThresholdParams,
interpolation: Interpolation<C::F>, interpolation: Interpolation<C::F>,
secret_share: Zeroizing<C::F>, secret_share: Zeroizing<C::F>,
verification_shares: HashMap<Participant, C::G>, verification_shares: HashMap<Participant, C::G>,
) -> ThresholdCore<C> { ) -> Result<ThresholdKeys<C>, DkgError> {
if verification_shares.len() != usize::from(params.n()) {
Err(DkgError::IncorrectAmountOfVerificationShares {
n: params.n(),
shares: verification_shares.len(),
})?;
}
for participant in verification_shares.keys().copied() {
if u16::from(participant) > params.n() {
Err(DkgError::InvalidParticipant { n: params.n(), participant })?;
}
}
match &interpolation {
Interpolation::Constant(_) => {
if params.t() != params.n() {
Err(DkgError::InapplicableInterpolation("constant interpolation for keys where t != n"))?;
}
}
Interpolation::Lagrange => {}
}
let t = (1 ..= params.t()).map(Participant).collect::<Vec<_>>(); let t = (1 ..= params.t()).map(Participant).collect::<Vec<_>>();
let group_key = let group_key =
t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum(); t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum();
ThresholdCore { params, interpolation, secret_share, group_key, verification_shares }
Ok(ThresholdKeys {
core: Arc::new(Zeroizing::new(ThresholdCore {
params,
interpolation,
secret_share,
group_key,
verification_shares,
})),
scalar: C::F::ONE,
offset: C::F::ZERO,
})
} }
/// Parameters for these keys. /// Scale the keys by a given scalar to allow for various account and privacy schemes.
///
/// This scalar is ephemeral and will not be included when these keys are serialized. The
/// scalar is applied on top of any already-existing scalar/offset.
///
/// Returns `None` if the scalar is equal to `0`.
#[must_use]
pub fn scale(mut self, scalar: C::F) -> Option<ThresholdKeys<C>> {
if bool::from(scalar.is_zero()) {
None?;
}
self.scalar *= scalar;
self.offset *= scalar;
Some(self)
}
/// Offset the keys by a given scalar to allow for various account and privacy schemes.
///
/// This offset is ephemeral and will not be included when these keys are serialized. The
/// offset is applied on top of any already-existing scalar/offset.
#[must_use]
pub fn offset(mut self, offset: C::F) -> ThresholdKeys<C> {
self.offset += offset;
self
}
/// Return the current scalar in-use for these keys.
pub fn current_scalar(&self) -> C::F {
self.scalar
}
/// Return the current offset in-use for these keys.
pub fn current_offset(&self) -> C::F {
self.offset
}
/// Return the parameters for these keys.
pub fn params(&self) -> ThresholdParams { pub fn params(&self) -> ThresholdParams {
self.params self.core.params
} }
/// Secret share for these keys. /// Return the original group key, without any tweaks applied.
pub fn secret_share(&self) -> &Zeroizing<C::F> { pub fn original_group_key(&self) -> C::G {
&self.secret_share self.core.group_key
} }
/// Group key for these keys. /// Return the interpolation method for these keys.
pub fn interpolation(&self) -> &Interpolation<C::F> {
&self.core.interpolation
}
/// Return the group key, with the expected linear combination taken.
pub fn group_key(&self) -> C::G { pub fn group_key(&self) -> C::G {
self.group_key (self.core.group_key * self.scalar) + (C::generator() * self.offset)
} }
pub(crate) fn verification_shares(&self) -> HashMap<Participant, C::G> { /// Return the secret share for these keys.
self.verification_shares.clone() pub fn secret_share(&self) -> &Zeroizing<C::F> {
&self.core.secret_share
} }
/// Write these keys to a type satisfying std::io::Write. /// Return the original (untweaked) verification share for the specified participant.
///
/// This will panic if the participant index is invalid for these keys.
pub fn original_verification_share(&self, l: Participant) -> C::G {
self.core.verification_shares[&l]
}
/// Obtain a view of these keys, interpolated for the specified signing set, with the specified
/// linear combination taken.
pub fn view(&self, mut included: Vec<Participant>) -> Result<ThresholdView<C>, DkgError> {
if (included.len() < self.params().t.into()) ||
(usize::from(self.params().n()) < included.len())
{
Err(DkgError::IncorrectAmountOfParticipants {
t: self.params().t,
n: self.params().n,
amount: included.len(),
})?;
}
included.sort();
{
let mut found = included[0] == self.params().i();
for i in 1 .. included.len() {
if included[i - 1] == included[i] {
Err(DkgError::DuplicatedParticipant(included[i]))?;
}
found |= included[i] == self.params().i();
}
if !found {
Err(DkgError::NotParticipating)?;
}
}
{
let last = *included.last().unwrap();
if u16::from(last) > self.params().n() {
Err(DkgError::InvalidParticipant { n: self.params().n(), participant: last })?;
}
}
// The interpolation occurs multiplicatively, letting us scale by the scalar now
let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref());
let mut secret_share = Zeroizing::new(
self.core.interpolation.interpolation_factor(self.params().i(), &included) *
secret_share_scaled.deref(),
);
let mut verification_shares = HashMap::with_capacity(included.len());
for i in &included {
let verification_share = self.core.verification_shares[i];
let verification_share = verification_share *
self.scalar *
self.core.interpolation.interpolation_factor(*i, &included);
verification_shares.insert(*i, verification_share);
}
/*
The offset is included by adding it to the participant with the lowest ID.
This is done after interpolating to ensure, regardless of the method of interpolation, that
the method of interpolation does not scale the offset. For Lagrange interpolation, we could
add the offset to every key share before interpolating, yet for Constant interpolation, we
_have_ to add it as we do here (which also works even when we intend to perform Lagrange
interpolation).
*/
if included[0] == self.params().i() {
*secret_share += self.offset;
}
*verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset;
Ok(ThresholdView {
interpolation: self.core.interpolation.clone(),
scalar: self.scalar,
offset: self.offset,
group_key: self.group_key(),
secret_share,
original_verification_shares: self.core.verification_shares.clone(),
verification_shares,
included,
})
}
/// Write these keys to a type satisfying `std::io::Write`.
///
/// This will not include the ephemeral scalar/offset.
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> { pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?; writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?;
writer.write_all(C::ID)?; writer.write_all(C::ID)?;
writer.write_all(&self.params.t.to_le_bytes())?; writer.write_all(&self.core.params.t.to_le_bytes())?;
writer.write_all(&self.params.n.to_le_bytes())?; writer.write_all(&self.core.params.n.to_le_bytes())?;
writer.write_all(&self.params.i.to_bytes())?; writer.write_all(&self.core.params.i.to_bytes())?;
match &self.interpolation { match &self.core.interpolation {
Interpolation::Constant(c) => { Interpolation::Constant(c) => {
writer.write_all(&[0])?; writer.write_all(&[0])?;
for c in c { for c in c {
@@ -325,27 +548,30 @@ mod lib {
} }
Interpolation::Lagrange => writer.write_all(&[1])?, Interpolation::Lagrange => writer.write_all(&[1])?,
}; };
let mut share_bytes = self.secret_share.to_repr(); let mut share_bytes = self.core.secret_share.to_repr();
writer.write_all(share_bytes.as_ref())?; writer.write_all(share_bytes.as_ref())?;
share_bytes.as_mut().zeroize(); share_bytes.as_mut().zeroize();
for l in 1 ..= self.params.n { for l in 1 ..= self.core.params.n {
writer writer.write_all(
.write_all(self.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref())?; self.core.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref(),
)?;
} }
Ok(()) Ok(())
} }
/// Serialize these keys to a `Vec<u8>`. /// Serialize these keys to a `Vec<u8>`.
///
/// This will not include the ephemeral scalar/offset.
pub fn serialize(&self) -> Zeroizing<Vec<u8>> { pub fn serialize(&self) -> Zeroizing<Vec<u8>> {
let mut serialized = Zeroizing::new(vec![]); let mut serialized = Zeroizing::new(vec![]);
self.write::<Vec<u8>>(serialized.as_mut()).unwrap(); self.write::<Vec<u8>>(serialized.as_mut()).unwrap();
serialized serialized
} }
/// Read keys from a type satisfying std::io::Read. /// Read keys from a type satisfying `std::io::Read`.
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<ThresholdCore<C>> { pub fn read<R: io::Read>(reader: &mut R) -> io::Result<ThresholdKeys<C>> {
{ {
let different = || io::Error::other("deserializing ThresholdCore for another curve"); let different = || io::Error::other("deserializing ThresholdKeys for another curve");
let mut id_len = [0; 4]; let mut id_len = [0; 4];
reader.read_exact(&mut id_len)?; reader.read_exact(&mut id_len)?;
@@ -394,197 +620,17 @@ mod lib {
verification_shares.insert(l, <C as Ciphersuite>::read_G(reader)?); verification_shares.insert(l, <C as Ciphersuite>::read_G(reader)?);
} }
Ok(ThresholdCore::new( ThresholdKeys::new(
ThresholdParams::new(t, n, i).map_err(|_| io::Error::other("invalid parameters"))?, ThresholdParams::new(t, n, i).map_err(io::Error::other)?,
interpolation, interpolation,
secret_share, secret_share,
verification_shares, verification_shares,
)) )
} .map_err(io::Error::other)
} }
}
/// Threshold keys usable for signing. impl<C: Ciphersuite> ThresholdView<C> {
#[derive(Clone, Debug, Zeroize)]
pub struct ThresholdKeys<C: Ciphersuite> {
// Core keys.
// If this is the last reference, the underlying keys will be dropped. When that happens, the
// private key present within it will be zeroed out (as it's within Zeroizing).
#[zeroize(skip)]
pub(crate) core: Arc<ThresholdCore<C>>,
// Scalar applied to these keys.
pub(crate) scalar: C::F,
// Offset applied to these keys.
pub(crate) offset: C::F,
}
/// View of keys, interpolated and with the expected linear combination taken for usage.
#[derive(Clone)]
pub struct ThresholdView<C: Ciphersuite> {
interpolation: Interpolation<C::F>,
scalar: C::F,
offset: C::F,
group_key: C::G,
included: Vec<Participant>,
secret_share: Zeroizing<C::F>,
original_verification_shares: HashMap<Participant, C::G>,
verification_shares: HashMap<Participant, C::G>,
}
impl<C: Ciphersuite> fmt::Debug for ThresholdView<C> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt
.debug_struct("ThresholdView")
.field("interpolation", &self.interpolation)
.field("scalar", &self.scalar)
.field("offset", &self.offset)
.field("group_key", &self.group_key)
.field("included", &self.included)
.field("original_verification_shares", &self.original_verification_shares)
.field("verification_shares", &self.verification_shares)
.finish_non_exhaustive()
}
}
impl<C: Ciphersuite> Zeroize for ThresholdView<C> {
fn zeroize(&mut self) {
self.scalar.zeroize();
self.offset.zeroize();
self.group_key.zeroize();
self.included.zeroize();
self.secret_share.zeroize();
for share in self.original_verification_shares.values_mut() {
share.zeroize();
}
for share in self.verification_shares.values_mut() {
share.zeroize();
}
}
}
impl<C: Ciphersuite> ThresholdKeys<C> {
/// Create a new set of ThresholdKeys from a ThresholdCore.
pub fn new(core: ThresholdCore<C>) -> ThresholdKeys<C> {
ThresholdKeys { core: Arc::new(core), scalar: C::F::ONE, offset: C::F::ZERO }
}
/// Scale the keys by a given scalar to allow for various account and privacy schemes.
///
/// This scalar is ephemeral and will not be included when these keys are serialized. The
/// scalar is applied on top of any already-existing scalar/offset.
///
/// Returns `None` if the scalar is equal to `0`.
#[must_use]
pub fn scale(mut self, scalar: C::F) -> Option<ThresholdKeys<C>> {
if bool::from(scalar.is_zero()) {
None?;
}
self.scalar *= scalar;
self.offset *= scalar;
Some(self)
}
/// Offset the keys by a given scalar to allow for various account and privacy schemes.
///
/// This offset is ephemeral and will not be included when these keys are serialized. The
/// offset is applied on top of any already-existing scalar/offset.
#[must_use]
pub fn offset(mut self, offset: C::F) -> ThresholdKeys<C> {
self.offset += offset;
self
}
/// Return the current scalar in-use for these keys.
pub fn current_scalar(&self) -> C::F {
self.scalar
}
/// Return the current offset in-use for these keys.
pub fn current_offset(&self) -> C::F {
self.offset
}
/// Return the parameters for these keys.
pub fn params(&self) -> ThresholdParams {
self.core.params
}
/// Return the secret share for these keys.
pub fn secret_share(&self) -> &Zeroizing<C::F> {
&self.core.secret_share
}
/// Return the group key, with the expected linear combination taken.
pub fn group_key(&self) -> C::G {
(self.core.group_key * self.scalar) + (C::generator() * self.offset)
}
/// Return all participants' verification shares without any offsetting.
pub(crate) fn verification_shares(&self) -> HashMap<Participant, C::G> {
self.core.verification_shares()
}
/// Serialize these keys to a `Vec<u8>`.
pub fn serialize(&self) -> Zeroizing<Vec<u8>> {
self.core.serialize()
}
/// Obtain a view of these keys, interpolated for the specified signing set, with the specified
/// linear combination taken.
pub fn view(&self, mut included: Vec<Participant>) -> Result<ThresholdView<C>, DkgError<()>> {
if (included.len() < self.params().t.into()) ||
(usize::from(self.params().n()) < included.len())
{
Err(DkgError::InvalidSigningSet)?;
}
included.sort();
// The interpolation occurs multiplicatively, letting us scale by the scalar now
let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref());
let mut secret_share = Zeroizing::new(
self.core.interpolation.interpolation_factor(self.params().i(), &included) *
secret_share_scaled.deref(),
);
let mut verification_shares = self.verification_shares();
for (i, share) in &mut verification_shares {
*share *= self.scalar * self.core.interpolation.interpolation_factor(*i, &included);
}
/*
The offset is included by adding it to the participant with the lowest ID.
This is done after interpolating to ensure, regardless of the method of interpolation, that
the method of interpolation does not scale the offset. For Lagrange interpolation, we could
add the offset to every key share before interpolating, yet for Constant interpolation, we
_have_ to add it as we do here (which also works even when we intend to perform Lagrange
interpolation).
*/
if included[0] == self.params().i() {
*secret_share += self.offset;
}
*verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset;
Ok(ThresholdView {
interpolation: self.core.interpolation.clone(),
scalar: self.scalar,
offset: self.offset,
group_key: self.group_key(),
secret_share,
original_verification_shares: self.verification_shares(),
verification_shares,
included,
})
}
}
impl<C: Ciphersuite> From<ThresholdCore<C>> for ThresholdKeys<C> {
fn from(keys: ThresholdCore<C>) -> ThresholdKeys<C> {
ThresholdKeys::new(keys)
}
}
impl<C: Ciphersuite> ThresholdView<C> {
/// Return the scalar applied to this view. /// Return the scalar applied to this view.
pub fn scalar(&self) -> C::F { pub fn scalar(&self) -> C::F {
self.scalar self.scalar
@@ -618,17 +664,18 @@ mod lib {
&self.secret_share &self.secret_share
} }
/// Return the original verification share for the specified participant. /// Return the original (untweaked) verification share for the specified participant.
///
/// This will panic if the participant index is invalid for these keys.
pub fn original_verification_share(&self, l: Participant) -> C::G { pub fn original_verification_share(&self, l: Participant) -> C::G {
self.original_verification_shares[&l] self.original_verification_shares[&l]
} }
/// Return the interpolated verification share, with the expected linear combination taken, /// Return the interpolated verification share, with the expected linear combination taken,
/// for the specified participant. /// for the specified participant.
///
/// This will panic if the participant was not included in the signing set.
pub fn verification_share(&self, l: Participant) -> C::G { pub fn verification_share(&self, l: Participant) -> C::G {
self.verification_shares[&l] self.verification_shares[&l]
} }
}
} }
#[cfg(feature = "std")]
pub use lib::*;

View File

@@ -1,129 +0,0 @@
#[cfg(feature = "std")]
use core::ops::Deref;
use std_shims::{vec, vec::Vec, collections::HashSet};
#[cfg(feature = "std")]
use std_shims::collections::HashMap;
#[cfg(feature = "std")]
use zeroize::Zeroizing;
use ciphersuite::{
group::{Group, GroupEncoding},
Ciphersuite,
};
use crate::DkgError;
#[cfg(feature = "std")]
use crate::{Participant, ThresholdParams, Interpolation, ThresholdCore};
fn check_keys<C: Ciphersuite>(keys: &[C::G]) -> Result<u16, DkgError<()>> {
if keys.is_empty() {
Err(DkgError::InvalidSigningSet)?;
}
// Too many signers
let keys_len = u16::try_from(keys.len()).map_err(|_| DkgError::InvalidSigningSet)?;
// Duplicated public keys
if keys.iter().map(|key| key.to_bytes().as_ref().to_vec()).collect::<HashSet<_>>().len() !=
keys.len()
{
Err(DkgError::InvalidSigningSet)?;
}
Ok(keys_len)
}
// This function panics if called with keys whose length exceed 2**16.
// This is fine since it's internal and all calls occur after calling check_keys, which does check
// the keys' length.
fn binding_factor_transcript<C: Ciphersuite>(
context: &[u8],
keys: &[C::G],
) -> Result<Vec<u8>, DkgError<()>> {
let mut transcript = vec![];
transcript.push(u8::try_from(context.len()).map_err(|_| DkgError::InvalidSigningSet)?);
transcript.extend(context);
transcript.extend(u16::try_from(keys.len()).unwrap().to_le_bytes());
for key in keys {
transcript.extend(key.to_bytes().as_ref());
}
Ok(transcript)
}
fn binding_factor<C: Ciphersuite>(mut transcript: Vec<u8>, i: u16) -> C::F {
transcript.extend(i.to_le_bytes());
C::hash_to_F(b"musig", &transcript)
}
/// The group key resulting from using this library's MuSig key gen.
///
/// This function will return an error if the context is longer than 255 bytes.
///
/// Creating an aggregate key with a list containing duplicated public keys will return an error.
pub fn musig_key<C: Ciphersuite>(context: &[u8], keys: &[C::G]) -> Result<C::G, DkgError<()>> {
let keys_len = check_keys::<C>(keys)?;
let transcript = binding_factor_transcript::<C>(context, keys)?;
let mut res = C::G::identity();
for i in 1 ..= keys_len {
// TODO: Calculate this with a multiexp
res += keys[usize::from(i - 1)] * binding_factor::<C>(transcript.clone(), i);
}
Ok(res)
}
/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key.
///
/// Creating an aggregate key with a list containing duplicated public keys returns an error.
#[cfg(feature = "std")]
pub fn musig<C: Ciphersuite>(
context: &[u8],
private_key: &Zeroizing<C::F>,
keys: &[C::G],
) -> Result<ThresholdCore<C>, DkgError<()>> {
let keys_len = check_keys::<C>(keys)?;
let our_pub_key = C::generator() * private_key.deref();
let Some(pos) = keys.iter().position(|key| *key == our_pub_key) else {
// Not present in signing set
Err(DkgError::InvalidSigningSet)?
};
let params = ThresholdParams::new(
keys_len,
keys_len,
// These errors shouldn't be possible, as pos is bounded to len - 1
// Since len is prior guaranteed to be within u16::MAX, pos + 1 must also be
Participant::new((pos + 1).try_into().map_err(|_| DkgError::InvalidSigningSet)?)
.ok_or(DkgError::InvalidSigningSet)?,
)?;
// Calculate the binding factor per-key
let transcript = binding_factor_transcript::<C>(context, keys)?;
let mut binding = Vec::with_capacity(keys.len());
for i in 1 ..= keys_len {
binding.push(binding_factor::<C>(transcript.clone(), i));
}
// Our secret share is our private key
let secret_share = private_key.clone();
// Calculate verification shares
let mut verification_shares = HashMap::new();
let mut group_key = C::G::identity();
for l in 1 ..= keys_len {
let key = keys[usize::from(l) - 1];
// TODO: Use a multiexp for this
group_key += key * binding[usize::from(l - 1)];
// These errors also shouldn't be possible, for the same reasons as documented above
verification_shares.insert(Participant::new(l).ok_or(DkgError::InvalidSigningSet)?, key);
}
debug_assert_eq!(C::generator() * secret_share.deref(), verification_shares[&params.i()]);
debug_assert_eq!(musig_key::<C>(context, keys).unwrap(), group_key);
Ok(ThresholdCore::new(
params,
Interpolation::Constant(binding),
secret_share,
verification_shares,
))
}

View File

@@ -1,102 +0,0 @@
use core::ops::Deref;
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use ciphersuite::{group::ff::Field, Ciphersuite};
use crate::{Participant, ThresholdCore, ThresholdKeys, musig::musig as musig_fn};
mod musig;
pub use musig::test_musig;
/// FROST key generation testing utility.
pub mod pedpop;
use pedpop::pedpop_gen;
// Promotion test.
mod promote;
use promote::test_generator_promotion;
/// Constant amount of participants to use when testing.
pub const PARTICIPANTS: u16 = 5;
/// Constant threshold of participants to use when testing.
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
/// Recover the secret from a collection of keys.
///
/// This will panic if no keys, an insufficient amount of keys, or the wrong keys are provided.
pub fn recover_key<C: Ciphersuite>(keys: &HashMap<Participant, ThresholdKeys<C>>) -> C::F {
let first = keys.values().next().expect("no keys provided");
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
let included = keys.keys().copied().collect::<Vec<_>>();
let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| {
accum +
(first.core.interpolation.interpolation_factor(*i, &included) * keys.secret_share().deref())
});
assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys");
group_private
}
/// Generate threshold keys for tests.
pub fn key_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdKeys<C>> {
let res = pedpop_gen(rng)
.drain()
.map(|(i, core)| {
assert_eq!(
&ThresholdCore::<C>::read::<&[u8]>(&mut core.serialize().as_ref()).unwrap(),
&core
);
(i, ThresholdKeys::new(core))
})
.collect();
assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key());
res
}
/// Generate MuSig keys for tests.
pub fn musig_key_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdKeys<C>> {
let mut keys = vec![];
let mut pub_keys = vec![];
for _ in 0 .. PARTICIPANTS {
let key = Zeroizing::new(C::F::random(&mut *rng));
pub_keys.push(C::generator() * *key);
keys.push(key);
}
let mut res = HashMap::new();
for key in keys {
let these_keys = musig_fn::<C>(b"Test MuSig Key Gen", &key, &pub_keys).unwrap();
res.insert(these_keys.params().i(), ThresholdKeys::new(these_keys));
}
assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key());
res
}
/// Run the test suite on a ciphersuite.
pub fn test_ciphersuite<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
key_gen::<_, C>(rng);
test_generator_promotion::<_, C>(rng);
}
#[test]
fn test_with_ristretto() {
test_ciphersuite::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng);
}

View File

@@ -1,61 +0,0 @@
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use ciphersuite::{group::ff::Field, Ciphersuite};
use crate::{
ThresholdKeys,
musig::{musig_key, musig},
tests::{PARTICIPANTS, recover_key},
};
/// Tests MuSig key generation.
pub fn test_musig<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
let mut keys = vec![];
let mut pub_keys = vec![];
for _ in 0 .. PARTICIPANTS {
let key = Zeroizing::new(C::F::random(&mut *rng));
pub_keys.push(C::generator() * *key);
keys.push(key);
}
const CONTEXT: &[u8] = b"MuSig Test";
// Empty signing set
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).unwrap_err();
// Signing set we're not part of
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).unwrap_err();
// Test with n keys
{
let mut created_keys = HashMap::new();
let mut verification_shares = HashMap::new();
let group_key = musig_key::<C>(CONTEXT, &pub_keys).unwrap();
for (i, key) in keys.iter().enumerate() {
let these_keys = musig::<C>(CONTEXT, key, &pub_keys).unwrap();
assert_eq!(these_keys.params().t(), PARTICIPANTS);
assert_eq!(these_keys.params().n(), PARTICIPANTS);
assert_eq!(usize::from(these_keys.params().i().0), i + 1);
verification_shares
.insert(these_keys.params().i(), C::generator() * **these_keys.secret_share());
assert_eq!(these_keys.group_key(), group_key);
created_keys.insert(these_keys.params().i(), ThresholdKeys::new(these_keys));
}
for keys in created_keys.values() {
assert_eq!(keys.verification_shares(), verification_shares);
}
assert_eq!(C::generator() * recover_key(&created_keys), group_key);
}
}
#[test]
fn musig_literal() {
test_musig::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng)
}

View File

@@ -1,331 +0,0 @@
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
use ciphersuite::Ciphersuite;
use crate::{
Participant, ThresholdParams, ThresholdCore,
pedpop::{Commitments, KeyGenMachine, SecretShare, KeyMachine},
encryption::{EncryptionKeyMessage, EncryptedMessage},
tests::{THRESHOLD, PARTICIPANTS, clone_without},
};
type PedPoPEncryptedMessage<C> = EncryptedMessage<C, SecretShare<<C as Ciphersuite>::F>>;
type PedPoPSecretShares<C> = HashMap<Participant, PedPoPEncryptedMessage<C>>;
const CONTEXT: [u8; 32] = *b"DKG Test Key Generation ";
// Commit, then return commitment messages, enc keys, and shares
#[allow(clippy::type_complexity)]
fn commit_enc_keys_and_shares<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> (
HashMap<Participant, KeyMachine<C>>,
HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
HashMap<Participant, C::G>,
HashMap<Participant, PedPoPSecretShares<C>>,
) {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
let mut enc_keys = HashMap::new();
for i in (1 ..= PARTICIPANTS).map(Participant) {
let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap();
let machine = KeyGenMachine::<C>::new(params, CONTEXT);
let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine);
commitments.insert(
i,
EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params)
.unwrap(),
);
enc_keys.insert(i, commitments[&i].enc_key());
}
let mut secret_shares = HashMap::new();
let machines = machines
.drain()
.map(|(l, machine)| {
let (machine, mut shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
let shares = shares
.drain()
.map(|(l, share)| {
(
l,
EncryptedMessage::read::<&[u8]>(
&mut share.serialize().as_ref(),
// Only t/n actually matters, so hardcode i to 1 here
ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: Participant(1) },
)
.unwrap(),
)
})
.collect::<HashMap<_, _>>();
secret_shares.insert(l, shares);
(l, machine)
})
.collect::<HashMap<_, _>>();
(machines, commitments, enc_keys, secret_shares)
}
fn generate_secret_shares<C: Ciphersuite>(
shares: &HashMap<Participant, PedPoPSecretShares<C>>,
recipient: Participant,
) -> PedPoPSecretShares<C> {
let mut our_secret_shares = HashMap::new();
for (i, shares) in shares {
if recipient == *i {
continue;
}
our_secret_shares.insert(*i, shares[&recipient].clone());
}
our_secret_shares
}
/// Fully perform the PedPoP key generation algorithm.
pub fn pedpop_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdCore<C>> {
let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng);
let mut verification_shares = None;
let mut group_key = None;
machines
.drain()
.map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete();
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(these_keys.verification_shares());
}
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, these_keys)
})
.collect::<HashMap<_, _>>()
}
#[cfg(test)]
mod literal {
use rand_core::OsRng;
use ciphersuite::Ristretto;
use crate::{
DkgError,
encryption::EncryptionKeyProof,
pedpop::{BlameMachine, AdditionalBlameMachine},
};
use super::*;
const ONE: Participant = Participant(1);
const TWO: Participant = Participant(2);
fn test_blame(
commitment_msgs: &HashMap<Participant, EncryptionKeyMessage<Ristretto, Commitments<Ristretto>>>,
machines: Vec<BlameMachine<Ristretto>>,
msg: &PedPoPEncryptedMessage<Ristretto>,
blame: &Option<EncryptionKeyProof<Ristretto>>,
) {
for machine in machines {
let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone());
assert_eq!(blamed, ONE);
// Verify additional blame also works
assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE);
// Verify machines constructed with AdditionalBlameMachine::new work
assert_eq!(
AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame(
ONE,
TWO,
msg.clone(),
blame.clone()
),
ONE,
);
}
}
// TODO: Write a macro which expands to the following
#[test]
fn invalid_encryption_pop_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the PoP of the encrypted message from 1 to 2
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop();
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: ONE, blame: None }));
// Explicitly declare we have a blame object, which happens to be None since invalid PoP
// is self-explainable
blame = Some(None);
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_ecdh_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the share to trigger a blame event
// Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass
// While here, 2 is malicious, this is so 1 creates the blame proof
// We then malleate 1's blame proof, so 1 ends up malicious
// Doesn't simply invalidate the PoP as that won't have a blame statement
// By mutating the encrypted data, we do ensure a blame statement is created
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_key();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
// This should be largely equivalent to the prior test
#[test]
fn invalid_dleq_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_serialization_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_value_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
}

View File

@@ -1,66 +0,0 @@
use core::{marker::PhantomData, ops::Deref};
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use ciphersuite::{group::Group, Ciphersuite};
use crate::{
promote::{GeneratorPromotion, GeneratorProof},
tests::{clone_without, key_gen, recover_key},
};
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltGenerator<C: Ciphersuite> {
_curve: PhantomData<C>,
}
impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {
type F = C::F;
type G = C::G;
type H = C::H;
const ID: &'static [u8] = b"Alternate Ciphersuite";
fn generator() -> Self::G {
C::G::generator() * <C as Ciphersuite>::hash_to_F(b"DKG Promotion Test", b"generator")
}
fn reduce_512(scalar: [u8; 64]) -> Self::F {
<C as Ciphersuite>::reduce_512(scalar)
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
<C as Ciphersuite>::hash_to_F(dst, data)
}
}
// Test promotion of threshold keys to another generator
pub(crate) fn test_generator_promotion<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
let keys = key_gen::<_, C>(&mut *rng);
let mut promotions = HashMap::new();
let mut proofs = HashMap::new();
for (i, keys) in &keys {
let (promotion, proof) =
GeneratorPromotion::<_, AltGenerator<C>>::promote(&mut *rng, keys.clone());
promotions.insert(*i, promotion);
proofs.insert(*i, GeneratorProof::<C>::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap());
}
let new_group_key = AltGenerator::<C>::generator() * recover_key(&keys);
for (i, promoting) in promotions.drain() {
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
assert_eq!(keys[&i].params(), promoted.params());
assert_eq!(keys[&i].secret_share(), promoted.secret_share());
assert_eq!(new_group_key, promoted.group_key());
for (l, verification_share) in promoted.verification_shares() {
assert_eq!(
AltGenerator::<C>::generator() * keys[&l].secret_share().deref(),
verification_share
);
}
}
}

View File

@@ -39,13 +39,13 @@ multiexp = { path = "../multiexp", version = "0.4", default-features = false, fe
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] }
dkg = { path = "../dkg", version = "^0.5.1", default-features = false, features = ["std"] } dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] }
[dev-dependencies] [dev-dependencies]
hex = "0.4" hex = "0.4"
serde_json = { version = "1", default-features = false, features = ["std"] } serde_json = { version = "1", default-features = false, features = ["std"] }
dkg = { path = "../dkg", features = ["tests"] } dkg = { path = "../dkg" }
[features] [features]
ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"]
@@ -56,4 +56,4 @@ p256 = ["ciphersuite/p256"]
ed448 = ["minimal-ed448", "ciphersuite/ed448"] ed448 = ["minimal-ed448", "ciphersuite/ed448"]
tests = ["hex", "rand_core/getrandom", "dkg/tests"] tests = ["hex", "rand_core/getrandom"]

View File

@@ -30,6 +30,8 @@ dleq = { path = "../../crypto/dleq", default-features = false }
schnorr-signatures = { path = "../../crypto/schnorr", default-features = false } schnorr-signatures = { path = "../../crypto/schnorr", default-features = false }
dkg = { path = "../../crypto/dkg", default-features = false } dkg = { path = "../../crypto/dkg", default-features = false }
dkg-recovery = { path = "../../crypto/dkg/recovery", default-features = false }
dkg-musig = { path = "../../crypto/dkg/musig", default-features = false }
# modular-frost = { path = "../../crypto/frost", default-features = false } # modular-frost = { path = "../../crypto/frost", default-features = false }
# frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false } # frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false }

View File

@@ -13,6 +13,8 @@ pub use dleq;
pub use schnorr_signatures; pub use schnorr_signatures;
pub use dkg; pub use dkg;
pub use dkg_recovery;
pub use dkg_musig;
/* /*
pub use modular_frost; pub use modular_frost;
pub use frost_schnorrkel; pub use frost_schnorrkel;