Create a dedicated crate for the DKG (#141)

* Add dkg crate

* Remove F_len and G_len

They're generally no longer used.

* Replace hash_to_vec with a provided method around associated type H: Digest

Part of trying to minimize this trait so it can be moved elsewhere. Vec, 
which isn't std, may have been a blocker.

* Encrypt secret shares within the FROST library

Reduces requirements on callers in order to be correct.

* Update usage of Zeroize within FROST

* Inline functions in key_gen

There was no reason to have them separated as they were. sign probably 
has the same statement available, yet that isn't the focus right now.

* Add a ciphersuite package which provides hash_to_F

* Set the Ciphersuite version to something valid

* Have ed448 export Scalar/FieldElement/Point at the top level

* Move FROST over to Ciphersuite

* Correct usage of ff in ciphersuite

* Correct documentation handling

* Move Schnorr signatures to their own crate

* Remove unused feature from schnorr

* Fix Schnorr tests

* Split DKG into a separate crate

* Add serialize to Commitments and SecretShare

Helper for buf = vec![]; .write(buf).unwrap(); buf

* Move FROST over to the new dkg crate

* Update Monero lib to latest FROST

* Correct ethereum's usage of features

* Add serialize to GeneratorProof

* Add serialize helper function to FROST

* Rename AddendumSerialize to WriteAddendum

* Update processor

* Slight fix to processor
This commit is contained in:
Luke Parker
2022-10-29 03:54:42 -05:00
committed by GitHub
parent cbceaff678
commit 2379855b31
50 changed files with 2076 additions and 1601 deletions

View File

@@ -3,27 +3,25 @@ use std::io::{self, Read, Write};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use transcript::Transcript;
use crate::{Curve, FrostError, FrostView, schnorr};
use crate::{Curve, FrostError, ThresholdView};
pub use schnorr::SchnorrSignature;
/// Serialize an addendum to a writer.
pub trait AddendumSerialize {
/// Write an addendum to a writer.
pub trait WriteAddendum {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
}
impl AddendumSerialize for () {
impl WriteAddendum for () {
fn write<W: Write>(&self, _: &mut W) -> io::Result<()> {
Ok(())
}
}
/// Trait alias for the requirements to be used as an addendum.
pub trait Addendum: Clone + PartialEq + Debug + Zeroize + AddendumSerialize {}
impl<A: Clone + PartialEq + Debug + Zeroize + AddendumSerialize> Addendum for A {}
pub trait Addendum: Clone + PartialEq + Debug + WriteAddendum {}
impl<A: Clone + PartialEq + Debug + WriteAddendum> Addendum for A {}
/// Algorithm trait usable by the FROST signing machine to produce signatures..
pub trait Algorithm<C: Curve>: Clone {
@@ -46,7 +44,7 @@ pub trait Algorithm<C: Curve>: Clone {
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
params: &FrostView<C>,
params: &ThresholdView<C>,
) -> Self::Addendum;
/// Read an addendum from a reader.
@@ -55,7 +53,7 @@ pub trait Algorithm<C: Curve>: Clone {
/// Proccess the addendum for the specified participant. Guaranteed to be called in order.
fn process_addendum(
&mut self,
params: &FrostView<C>,
params: &ThresholdView<C>,
l: u16,
reader: Self::Addendum,
) -> Result<(), FrostError>;
@@ -66,7 +64,7 @@ pub trait Algorithm<C: Curve>: Clone {
/// The nonce will already have been processed into the combined form d + (e * p).
fn sign_share(
&mut self,
params: &FrostView<C>,
params: &ThresholdView<C>,
nonce_sums: &[Vec<C::G>],
nonces: &[C::F],
msg: &[u8],
@@ -149,44 +147,36 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
vec![vec![C::generator()]]
}
fn preprocess_addendum<R: RngCore + CryptoRng>(&mut self, _: &mut R, _: &FrostView<C>) {}
fn preprocess_addendum<R: RngCore + CryptoRng>(&mut self, _: &mut R, _: &ThresholdView<C>) {}
fn read_addendum<R: Read>(&self, _: &mut R) -> io::Result<Self::Addendum> {
Ok(())
}
fn process_addendum(&mut self, _: &FrostView<C>, _: u16, _: ()) -> Result<(), FrostError> {
fn process_addendum(&mut self, _: &ThresholdView<C>, _: u16, _: ()) -> Result<(), FrostError> {
Ok(())
}
fn sign_share(
&mut self,
params: &FrostView<C>,
params: &ThresholdView<C>,
nonce_sums: &[Vec<C::G>],
nonces: &[C::F],
msg: &[u8],
) -> C::F {
let c = H::hram(&nonce_sums[0][0], &params.group_key(), msg);
self.c = Some(c);
schnorr::sign::<C>(params.secret_share(), nonces[0], c).s
SchnorrSignature::<C>::sign(params.secret_share(), nonces[0], c).s
}
#[must_use]
fn verify(&self, group_key: C::G, nonces: &[Vec<C::G>], sum: C::F) -> Option<Self::Signature> {
let sig = SchnorrSignature { R: nonces[0][0], s: sum };
if schnorr::verify::<C>(group_key, self.c.unwrap(), &sig) {
Some(sig)
} else {
None
}
Some(sig).filter(|sig| sig.verify(group_key, self.c.unwrap()))
}
#[must_use]
fn verify_share(&self, verification_share: C::G, nonces: &[Vec<C::G>], share: C::F) -> bool {
schnorr::verify::<C>(
verification_share,
self.c.unwrap(),
&SchnorrSignature { R: nonces[0][0], s: share },
)
SchnorrSignature::<C> { R: nonces[0][0], s: share }.verify(verification_share, self.c.unwrap())
}
}

View File

@@ -1,10 +1,9 @@
use zeroize::Zeroize;
use digest::Digest;
use sha2::{Digest, Sha512};
use group::Group;
use dalek_ff_group::Scalar;
use ciphersuite::Ciphersuite;
use crate::{curve::Curve, algorithm::Hram};
macro_rules! dalek_curve {
@@ -13,49 +12,22 @@ macro_rules! dalek_curve {
$Curve: ident,
$Hram: ident,
$Point: ident,
$ID: literal,
$CONTEXT: literal,
$chal: literal,
$chal: literal
) => {
use dalek_ff_group::$Point;
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Curve;
impl $Curve {
fn hash(dst: &[u8], data: &[u8]) -> Sha512 {
Sha512::new().chain_update(&[$CONTEXT.as_ref(), dst, data].concat())
}
}
pub use ciphersuite::$Curve;
impl Curve for $Curve {
type F = Scalar;
type G = $Point;
const ID: &'static [u8] = $ID;
fn generator() -> Self::G {
$Point::generator()
}
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
Self::hash(dst, data).finalize().to_vec()
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
Scalar::from_hash(Self::hash(dst, data))
}
const CONTEXT: &'static [u8] = $CONTEXT;
}
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
#[derive(Copy, Clone)]
pub struct $Hram;
impl Hram<$Curve> for $Hram {
#[allow(non_snake_case)]
fn hram(R: &$Point, A: &$Point, m: &[u8]) -> Scalar {
let mut hash = Sha512::new();
fn hram(R: &<$Curve as Ciphersuite>::G, A: &<$Curve as Ciphersuite>::G, m: &[u8]) -> Scalar {
let mut hash = <$Curve as Ciphersuite>::H::new();
if $chal.len() != 0 {
hash.update(&[$CONTEXT.as_ref(), $chal].concat());
}
@@ -67,24 +39,8 @@ macro_rules! dalek_curve {
};
}
#[cfg(any(test, feature = "ristretto"))]
dalek_curve!(
"ristretto",
Ristretto,
IetfRistrettoHram,
RistrettoPoint,
b"ristretto",
b"FROST-RISTRETTO255-SHA512-v11",
b"chal",
);
#[cfg(feature = "ristretto")]
dalek_curve!("ristretto", Ristretto, IetfRistrettoHram, b"FROST-RISTRETTO255-SHA512-v11", b"chal");
#[cfg(feature = "ed25519")]
dalek_curve!(
"ed25519",
Ed25519,
IetfEd25519Hram,
EdwardsPoint,
b"edwards25519",
b"FROST-ED25519-SHA512-v11",
b"",
);
dalek_curve!("ed25519", Ed25519, IetfEd25519Hram, b"FROST-ED25519-SHA512-v11", b"");

View File

@@ -1,41 +1,17 @@
use zeroize::Zeroize;
use digest::Digest;
use sha3::{digest::ExtendableOutput, Shake256};
use group::GroupEncoding;
use group::{Group, GroupEncoding};
use minimal_ed448::{scalar::Scalar, point::Point};
use minimal_ed448::{Scalar, Point};
pub use ciphersuite::{Shake256_114, Ed448};
use crate::{curve::Curve, algorithm::Hram};
const CONTEXT: &[u8] = b"FROST-ED448-SHAKE256-v11";
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct Ed448;
impl Ed448 {
fn hash(prefix: &[u8], context: &[u8], dst: &[u8], data: &[u8]) -> [u8; 114] {
let mut res = [0; 114];
Shake256::digest_xof(&[prefix, context, dst, data].concat(), &mut res);
res
}
}
impl Curve for Ed448 {
type F = Scalar;
type G = Point;
const ID: &'static [u8] = b"ed448";
fn generator() -> Self::G {
Point::generator()
}
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
Self::hash(b"", CONTEXT, dst, data).as_ref().to_vec()
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
Scalar::wide_reduce(Self::hash(b"", CONTEXT, dst, data))
}
const CONTEXT: &'static [u8] = CONTEXT;
}
#[derive(Copy, Clone)]
@@ -43,12 +19,19 @@ pub struct Ietf8032Ed448Hram;
impl Ietf8032Ed448Hram {
#[allow(non_snake_case)]
pub fn hram(context: &[u8], R: &Point, A: &Point, m: &[u8]) -> Scalar {
Scalar::wide_reduce(Ed448::hash(
&[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(),
context,
b"",
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
))
Scalar::wide_reduce(
Shake256_114::digest(
&[
&[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(),
context,
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
]
.concat(),
)
.as_ref()
.try_into()
.unwrap(),
)
}
}

View File

@@ -1,17 +1,6 @@
use zeroize::Zeroize;
use group::GroupEncoding;
use sha2::{Digest, Sha256};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
use elliptic_curve::{
generic_array::GenericArray,
bigint::{Encoding, U384},
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
};
use ciphersuite::Ciphersuite;
use crate::{curve::Curve, algorithm::Hram};
@@ -19,87 +8,37 @@ macro_rules! kp_curve {
(
$feature: literal,
$lib: ident,
$Curve: ident,
$Hram: ident,
$ID: literal,
$CONTEXT: literal
) => {
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Curve;
impl $Curve {
fn hash(dst: &[u8], data: &[u8]) -> Sha256 {
Sha256::new().chain_update(&[$CONTEXT.as_ref(), dst, data].concat())
}
}
pub use ciphersuite::$Curve;
impl Curve for $Curve {
type F = $lib::Scalar;
type G = $lib::ProjectivePoint;
const ID: &'static [u8] = $ID;
fn generator() -> Self::G {
$lib::ProjectivePoint::GENERATOR
}
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
Self::hash(dst, data).finalize().to_vec()
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
let mut dst = &[$CONTEXT, dst].concat();
let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat()).to_vec();
if dst.len() > 255 {
dst = &oversize;
}
// While one of these two libraries does support directly hashing to the Scalar field, the
// other doesn't. While that's probably an oversight, this is a universally working method
let mut modulus = vec![0; 16];
modulus.extend((Self::F::zero() - Self::F::one()).to_bytes());
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
let mut unreduced = U384::from_be_bytes({
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
bytes
})
.reduce(&modulus)
.unwrap()
.to_be_bytes();
let mut array = *GenericArray::from_slice(&unreduced[16 ..]);
let res = $lib::Scalar::from_repr(array).unwrap();
unreduced.zeroize();
array.zeroize();
res
}
const CONTEXT: &'static [u8] = $CONTEXT;
}
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
#[derive(Clone)]
pub struct $Hram;
impl Hram<$Curve> for $Hram {
#[allow(non_snake_case)]
fn hram(R: &$lib::ProjectivePoint, A: &$lib::ProjectivePoint, m: &[u8]) -> $lib::Scalar {
$Curve::hash_to_F(b"chal", &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat())
fn hram(
R: &<$Curve as Ciphersuite>::G,
A: &<$Curve as Ciphersuite>::G,
m: &[u8],
) -> <$Curve as Ciphersuite>::F {
<$Curve as Curve>::hash_to_F(
b"chal",
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
)
}
}
};
}
#[cfg(feature = "p256")]
kp_curve!("p256", p256, P256, IetfP256Hram, b"P-256", b"FROST-P256-SHA256-v11");
kp_curve!("p256", P256, IetfP256Hram, b"FROST-P256-SHA256-v11");
#[cfg(feature = "secp256k1")]
kp_curve!(
"secp256k1",
k256,
Secp256k1,
IetfSecp256k1Hram,
b"secp256k1",
b"FROST-secp256k1-SHA256-v11"
);
kp_curve!("secp256k1", Secp256k1, IetfSecp256k1Hram, b"FROST-secp256k1-SHA256-v11");

View File

@@ -1,4 +1,3 @@
use core::fmt::Debug;
use std::io::{self, Read};
use rand_core::{RngCore, CryptoRng};
@@ -6,17 +5,23 @@ use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use subtle::ConstantTimeEq;
use ff::{Field, PrimeField, PrimeFieldBits};
use group::{Group, GroupOps, GroupEncoding, prime::PrimeGroup};
use digest::Digest;
#[cfg(any(test, feature = "dalek"))]
use group::{
ff::{Field, PrimeField},
Group,
};
pub use ciphersuite::Ciphersuite;
#[cfg(any(feature = "ristretto", feature = "ed25519"))]
mod dalek;
#[cfg(any(test, feature = "ristretto"))]
#[cfg(feature = "ristretto")]
pub use dalek::{Ristretto, IetfRistrettoHram};
#[cfg(feature = "ed25519")]
pub use dalek::{Ed25519, IetfEd25519Hram};
#[cfg(feature = "kp256")]
#[cfg(any(feature = "secp256k1", feature = "p256"))]
mod kp256;
#[cfg(feature = "secp256k1")]
pub use kp256::{Secp256k1, IetfSecp256k1Hram};
@@ -28,33 +33,23 @@ mod ed448;
#[cfg(feature = "ed448")]
pub use ed448::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram};
/// Unified trait to manage an elliptic curve.
// This should be moved into its own crate if the need for generic cryptography over ff/group
// continues, which is the exact reason ff/group exists (to provide a generic interface)
// elliptic-curve exists, yet it doesn't really serve the same role, nor does it use &[u8]/Vec<u8>
// It uses GenericArray which will hopefully be deprecated as Rust evolves and doesn't offer enough
// advantages in the modern day to be worth the hassle -- Kayaba
pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
/// Scalar field element type.
// This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses
type F: PrimeField + PrimeFieldBits + Zeroize;
/// Group element type.
type G: Group<Scalar = Self::F> + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq;
/// ID for this curve.
const ID: &'static [u8];
/// Generator for the group.
// While group does provide this in its API, privacy coins may want to use a custom basepoint
fn generator() -> Self::G;
/// FROST Ciphersuite, except for the signing algorithm specific H2, making this solely the curve,
/// its associated hash function, and the functions derived from it.
pub trait Curve: Ciphersuite {
/// Context string for this curve.
const CONTEXT: &'static [u8];
/// Hash the given dst and data to a byte vector. Used to instantiate H4 and H5.
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8>;
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
Self::H::digest(&[Self::CONTEXT, dst, data].concat()).as_ref().to_vec()
}
/// Field element from hash. Used during key gen and by other crates under Serai as a general
/// utility. Used to instantiate H1 and H3.
#[allow(non_snake_case)]
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F;
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
<Self as Ciphersuite>::hash_to_F(&[Self::CONTEXT, dst].concat(), msg)
}
/// Hash the message for the binding factor. H4 from the IETF draft.
fn hash_msg(msg: &[u8]) -> Vec<u8> {
@@ -68,17 +63,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
/// Hash the commitments and message to calculate the binding factor. H1 from the IETF draft.
fn hash_binding_factor(binding: &[u8]) -> Self::F {
Self::hash_to_F(b"rho", binding)
}
#[allow(non_snake_case)]
fn random_F<R: RngCore + CryptoRng>(rng: &mut R) -> Self::F {
let mut res;
while {
res = Self::F::random(&mut *rng);
res.ct_eq(&Self::F::zero()).into()
} {}
res
<Self as Curve>::hash_to_F(b"rho", binding)
}
/// Securely generate a random nonce. H3 from the IETF draft.
@@ -92,7 +77,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
let mut res;
while {
seed.extend(repr.as_ref());
res = Self::hash_to_F(b"nonce", &seed);
res = <Self as Curve>::hash_to_F(b"nonce", &seed);
res.ct_eq(&Self::F::zero()).into()
} {
rng.fill_bytes(&mut seed);
@@ -106,40 +91,11 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
}
#[allow(non_snake_case)]
fn F_len() -> usize {
<Self::F as PrimeField>::Repr::default().as_ref().len()
}
#[allow(non_snake_case)]
fn G_len() -> usize {
<Self::G as GroupEncoding>::Repr::default().as_ref().len()
}
#[allow(non_snake_case)]
fn read_F<R: Read>(r: &mut R) -> io::Result<Self::F> {
let mut encoding = <Self::F as PrimeField>::Repr::default();
r.read_exact(encoding.as_mut())?;
// ff mandates this is canonical
let res = Option::<Self::F>::from(Self::F::from_repr(encoding))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar"));
for b in encoding.as_mut() {
b.zeroize();
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
let res = <Self as Ciphersuite>::read_G(reader)?;
if res.is_identity().into() {
Err(io::Error::new(io::ErrorKind::Other, "identity point"))?;
}
res
}
#[allow(non_snake_case)]
fn read_G<R: Read>(r: &mut R) -> io::Result<Self::G> {
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
r.read_exact(encoding.as_mut())?;
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
// Ban the identity, per the FROST spec, and non-canonical points
if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical or identity point"))?;
}
Ok(point)
Ok(res)
}
}

View File

@@ -1,363 +0,0 @@
use std::{
marker::PhantomData,
io::{self, Read, Write},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
use multiexp::{multiexp_vartime, BatchVerifier};
use crate::{
curve::Curve,
FrostError, FrostParams, FrostCore,
schnorr::{self, SchnorrSignature},
validate_map,
};
#[allow(non_snake_case)]
fn challenge<C: Curve>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
const DST: &[u8] = b"FROST Schnorr Proof of Knowledge";
// Uses hash_msg to get a fixed size value out of the context string
let mut transcript = C::hash_msg(context.as_bytes());
transcript.extend(l.to_be_bytes());
transcript.extend(R);
transcript.extend(Am);
C::hash_to_F(DST, &transcript)
}
/// Commitments message to be broadcast to all other parties.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Commitments<C: Curve>(Vec<C::G>, Vec<u8>, SchnorrSignature<C>);
impl<C: Curve> Commitments<C> {
pub fn read<R: Read>(reader: &mut R, params: FrostParams) -> io::Result<Self> {
let mut commitments = Vec::with_capacity(params.t().into());
let mut serialized = Vec::with_capacity(usize::from(params.t()) * C::G_len());
for _ in 0 .. params.t() {
let mut buf = <C::G as GroupEncoding>::Repr::default();
reader.read_exact(buf.as_mut())?;
commitments.push(C::read_G(&mut buf.as_ref())?);
serialized.extend(buf.as_ref());
}
Ok(Commitments(commitments, serialized, SchnorrSignature::read(reader)?))
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.1)?;
self.2.write(writer)
}
}
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
// the commitments to be broadcasted over an authenticated channel to all parties
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: &FrostParams,
context: &str,
) -> (Vec<C::F>, Vec<C::G>, Commitments<C>) {
let t = usize::from(params.t);
let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t);
let mut serialized = Vec::with_capacity(t * C::G_len());
for i in 0 .. t {
// Step 1: Generate t random values to form a polynomial with
coefficients.push(C::random_F(&mut *rng));
// Step 3: Generate public commitments
commitments.push(C::generator() * coefficients[i]);
serialized.extend(commitments[i].to_bytes().as_ref());
}
// Step 2: Provide a proof of knowledge
let mut r = C::random_F(rng);
let sig = schnorr::sign::<C>(
coefficients[0],
// This could be deterministic as the PoK is a singleton never opened up to cooperative
// discussion
// There's no reason to spend the time and effort to make this deterministic besides a
// general obsession with canonicity and determinism though
r,
challenge::<C>(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized),
);
r.zeroize();
// Step 4: Broadcast
(coefficients, commitments.clone(), Commitments(commitments, serialized, sig))
}
// Verify the received data from the first round of key generation
fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: &FrostParams,
context: &str,
our_commitments: Vec<C::G>,
mut msgs: HashMap<u16, Commitments<C>>,
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
validate_map(&msgs, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
let mut signatures = Vec::with_capacity(usize::from(params.n() - 1));
let mut commitments = msgs
.drain()
.map(|(l, msg)| {
// Step 5: Validate each proof of knowledge
// This is solely the prep step for the latter batch verification
signatures.push((
l,
msg.0[0],
challenge::<C>(context, l, msg.2.R.to_bytes().as_ref(), &msg.1),
msg.2,
));
(l, msg.0)
})
.collect::<HashMap<_, _>>();
schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?;
commitments.insert(params.i, our_commitments);
Ok(commitments)
}
fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
let l = F::from(u64::from(l));
let mut share = F::zero();
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
share += coefficient;
if idx != (coefficients.len() - 1) {
share *= l;
}
}
share
}
/// Secret share, to be sent only to the party it's intended for, over an encrypted and
/// authenticated channel.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct SecretShare<C: Curve>(C::F);
impl<C: Curve> SecretShare<C> {
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
Ok(SecretShare(C::read_F(reader)?))
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.0.to_repr().as_ref())
}
}
impl<C: Curve> Drop for SecretShare<C> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<C: Curve> ZeroizeOnDrop for SecretShare<C> {}
// Calls round 1, step 5 and implements round 2, step 1 of FROST key generation
// Returns our secret share part, commitments for the next step, and a vector for each
// counterparty to receive
fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: &FrostParams,
context: &str,
coefficients: &mut Vec<C::F>,
our_commitments: Vec<C::G>,
msgs: HashMap<u16, Commitments<C>>,
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, SecretShare<C>>), FrostError> {
let commitments = verify_r1::<_, C>(rng, params, context, our_commitments, msgs)?;
// Step 1: Generate secret shares for all other parties
let mut res = HashMap::new();
for l in 1 ..= params.n() {
// Don't insert our own shares to the byte buffer which is meant to be sent around
// An app developer could accidentally send it. Best to keep this black boxed
if l == params.i() {
continue;
}
res.insert(l, SecretShare(polynomial(coefficients, l)));
}
// Calculate our own share
let share = polynomial(coefficients, params.i());
coefficients.zeroize();
Ok((share, commitments, res))
}
// Finishes round 2 and returns the keys.
// This key MUST NOT be considered usable until all parties confirm they have completed the
// protocol without issue.
fn complete_r2<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: FrostParams,
mut secret_share: C::F,
commitments: &mut HashMap<u16, Vec<C::G>>,
mut shares: HashMap<u16, SecretShare<C>>,
) -> Result<FrostCore<C>, FrostError> {
validate_map(&shares, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
// Calculate the exponent for a given participant and apply it to a series of commitments
// Initially used with the actual commitments to verify the secret share, later used with stripes
// to generate the verification shares
let exponential = |i: u16, values: &[_]| {
let i = C::F::from(i.into());
let mut res = Vec::with_capacity(params.t().into());
(0 .. usize::from(params.t())).into_iter().fold(C::F::one(), |exp, l| {
res.push((exp, values[l]));
exp * i
});
res
};
let mut batch = BatchVerifier::new(shares.len());
for (l, mut share) in shares.drain() {
secret_share += share.0;
// This can be insecurely linearized from n * t to just n using the below sums for a given
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
// ensure that malleability isn't present is to use this n * t algorithm, which runs
// per sender and not as an aggregate of all senders, which also enables blame
let mut values = exponential(params.i, &commitments[&l]);
values.push((-share.0, C::generator()));
share.zeroize();
batch.queue(rng, l, values);
}
batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?;
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
// these sums so preprocessing them is a massive speedup
// If these weren't just sums, yet the tables used in multiexp, this would be further optimized
// As of right now, each multiexp will regenerate them
let mut stripes = Vec::with_capacity(usize::from(params.t()));
for t in 0 .. usize::from(params.t()) {
stripes.push(commitments.values().map(|commitments| commitments[t]).sum());
}
// Calculate each user's verification share
let mut verification_shares = HashMap::new();
for i in 1 ..= params.n() {
verification_shares.insert(i, multiexp_vartime(&exponential(i, &stripes)));
}
// Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t)
debug_assert_eq!(C::generator() * secret_share, verification_shares[&params.i()]);
Ok(FrostCore { params, secret_share, group_key: stripes[0], verification_shares })
}
/// State machine to begin the key generation protocol.
pub struct KeyGenMachine<C: Curve> {
params: FrostParams,
context: String,
_curve: PhantomData<C>,
}
/// Advancement of the key generation state machine.
#[derive(Zeroize)]
pub struct SecretShareMachine<C: Curve> {
#[zeroize(skip)]
params: FrostParams,
context: String,
coefficients: Vec<C::F>,
#[zeroize(skip)]
our_commitments: Vec<C::G>,
}
impl<C: Curve> Drop for SecretShareMachine<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> ZeroizeOnDrop for SecretShareMachine<C> {}
/// Final step of the key generation protocol.
#[derive(Zeroize)]
pub struct KeyMachine<C: Curve> {
#[zeroize(skip)]
params: FrostParams,
secret: C::F,
#[zeroize(skip)]
commitments: HashMap<u16, Vec<C::G>>,
}
impl<C: Curve> Drop for KeyMachine<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> ZeroizeOnDrop for KeyMachine<C> {}
impl<C: Curve> KeyGenMachine<C> {
/// Creates a new machine to generate a key for the specified curve in the specified multisig.
// The context string should be unique among multisigs.
pub fn new(params: FrostParams, context: String) -> KeyGenMachine<C> {
KeyGenMachine { params, context, _curve: PhantomData }
}
/// Start generating a key according to the FROST DKG spec.
/// Returns a commitments message to be sent to all parties over an authenticated
/// channel. If any party submits multiple sets of commitments, they MUST be treated as
/// malicious.
pub fn generate_coefficients<R: RngCore + CryptoRng>(
self,
rng: &mut R,
) -> (SecretShareMachine<C>, Commitments<C>) {
let (coefficients, our_commitments, commitments) =
generate_key_r1::<_, C>(rng, &self.params, &self.context);
(
SecretShareMachine {
params: self.params,
context: self.context,
coefficients,
our_commitments,
},
commitments,
)
}
}
impl<C: Curve> SecretShareMachine<C> {
/// Continue generating a key.
/// Takes in everyone else's commitments. Returns a HashMap of secret shares.
/// These MUST be encrypted and only then sent to their respective participants.
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
commitments: HashMap<u16, Commitments<C>>,
) -> Result<(KeyMachine<C>, HashMap<u16, SecretShare<C>>), FrostError> {
let (secret, commitments, shares) = generate_key_r2::<_, C>(
rng,
&self.params,
&self.context,
&mut self.coefficients,
self.our_commitments.clone(),
commitments,
)?;
Ok((KeyMachine { params: self.params, secret, commitments }, shares))
}
}
impl<C: Curve> KeyMachine<C> {
/// Complete key generation.
/// Takes in everyone elses' shares submitted to us. Returns a FrostCore object representing the
/// generated keys. Successful protocol completion MUST be confirmed by all parties before these
/// keys may be safely used.
pub fn complete<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
shares: HashMap<u16, SecretShare<C>>,
) -> Result<FrostCore<C>, FrostError> {
complete_r2(rng, self.params, self.secret, &mut self.commitments, shares)
}
}

View File

@@ -11,31 +11,20 @@
//!
//! This library offers ciphersuites compatible with the
//! [IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version
//! 10 is supported.
//! 11 is supported.
use core::fmt::{self, Debug};
use std::{io::Read, sync::Arc, collections::HashMap};
use core::fmt::Debug;
use std::collections::HashMap;
use thiserror::Error;
use zeroize::{Zeroize, ZeroizeOnDrop};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
mod schnorr;
/// Distributed key generation protocol.
pub use dkg::{self, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView};
/// Curve trait and provided curves/HRAMs, forming various ciphersuites.
pub mod curve;
use curve::Curve;
/// Distributed key generation protocol.
pub mod key_gen;
/// Promote keys between curves.
pub mod promote;
/// Algorithm for the signing process.
pub mod algorithm;
mod nonce;
@@ -72,59 +61,11 @@ pub(crate) fn validate_map<T>(
Ok(())
}
/// Parameters for a multisig.
// These fields can not be made public as they should be static
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct FrostParams {
/// Participants needed to sign on behalf of the group.
t: u16,
/// Amount of participants.
n: u16,
/// Index of the participant being acted for.
i: u16,
}
impl FrostParams {
pub fn new(t: u16, n: u16, i: u16) -> Result<FrostParams, FrostError> {
if (t == 0) || (n == 0) {
Err(FrostError::ZeroParameter(t, n))?;
}
// When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason),
// but it's not invalid to do so
if t > n {
Err(FrostError::InvalidRequiredQuantity(t, n))?;
}
if (i == 0) || (i > n) {
Err(FrostError::InvalidParticipantIndex(n, i))?;
}
Ok(FrostParams { t, n, i })
}
pub fn t(&self) -> u16 {
self.t
}
pub fn n(&self) -> u16 {
self.n
}
pub fn i(&self) -> u16 {
self.i
}
}
/// Various errors possible during key generation/signing.
/// Various errors possible during signing.
#[derive(Copy, Clone, Error, Debug)]
pub enum FrostError {
#[error("a parameter was 0 (required {0}, participants {1})")]
ZeroParameter(u16, u16),
#[error("too many participants (max {1}, got {0})")]
TooManyParticipants(usize, u16),
#[error("invalid amount of required participants (max {1}, got {0})")]
InvalidRequiredQuantity(u16, u16),
#[error("invalid participant index (0 < index <= {0}, yet index is {1})")]
InvalidParticipantIndex(u16, u16),
#[error("invalid signing set ({0})")]
InvalidSigningSet(&'static str),
#[error("invalid participant quantity (expected {0}, got {1})")]
@@ -133,10 +74,7 @@ pub enum FrostError {
DuplicatedIndex(u16),
#[error("missing participant {0}")]
MissingParticipant(u16),
#[error("invalid commitment (participant {0})")]
InvalidCommitment(u16),
#[error("invalid proof of knowledge (participant {0})")]
InvalidProofOfKnowledge(u16),
#[error("invalid preprocess (participant {0})")]
InvalidPreprocess(u16),
#[error("invalid share (participant {0})")]
@@ -145,280 +83,3 @@ pub enum FrostError {
#[error("internal error ({0})")]
InternalError(&'static str),
}
/// Calculate the lagrange coefficient for a signing set.
pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
let mut num = F::one();
let mut denom = F::one();
for l in included {
if i == *l {
continue;
}
let share = F::from(u64::try_from(*l).unwrap());
num *= share;
denom *= share - F::from(u64::try_from(i).unwrap());
}
// Safe as this will only be 0 if we're part of the above loop
// (which we have an if case to avoid)
num * denom.invert().unwrap()
}
/// Core keys generated by performing a FROST keygen protocol.
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct FrostCore<C: Curve> {
/// FROST Parameters.
#[zeroize(skip)]
params: FrostParams,
/// Secret share key.
secret_share: C::F,
/// Group key.
group_key: C::G,
/// Verification shares.
#[zeroize(skip)]
verification_shares: HashMap<u16, C::G>,
}
impl<C: Curve> Drop for FrostCore<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> ZeroizeOnDrop for FrostCore<C> {}
impl<C: Curve> Debug for FrostCore<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FrostCore")
.field("params", &self.params)
.field("group_key", &self.group_key)
.field("verification_shares", &self.verification_shares)
.finish()
}
}
impl<C: Curve> FrostCore<C> {
pub(crate) fn new(
params: FrostParams,
secret_share: C::F,
verification_shares: HashMap<u16, C::G>,
) -> FrostCore<C> {
#[cfg(debug_assertions)]
validate_map(&verification_shares, &(0 ..= params.n).collect::<Vec<_>>(), 0).unwrap();
let t = (1 ..= params.t).collect::<Vec<_>>();
FrostCore {
params,
secret_share,
group_key: t.iter().map(|i| verification_shares[i] * lagrange::<C::F>(*i, &t)).sum(),
verification_shares,
}
}
pub fn params(&self) -> FrostParams {
self.params
}
#[cfg(any(test, feature = "tests"))]
pub(crate) fn secret_share(&self) -> C::F {
self.secret_share
}
pub fn group_key(&self) -> C::G {
self.group_key
}
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
self.verification_shares.clone()
}
pub fn serialized_len(n: u16) -> usize {
8 + C::ID.len() + (3 * 2) + C::F_len() + C::G_len() + (usize::from(n) * C::G_len())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(FrostCore::<C>::serialized_len(self.params.n));
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
serialized.extend(C::ID);
serialized.extend(self.params.t.to_be_bytes());
serialized.extend(self.params.n.to_be_bytes());
serialized.extend(self.params.i.to_be_bytes());
serialized.extend(self.secret_share.to_repr().as_ref());
for l in 1 ..= self.params.n {
serialized.extend(self.verification_shares[&l].to_bytes().as_ref());
}
serialized
}
pub fn deserialize<R: Read>(cursor: &mut R) -> Result<FrostCore<C>, FrostError> {
{
let missing = FrostError::InternalError("FrostCore serialization is missing its curve");
let different = FrostError::InternalError("deserializing FrostCore for another curve");
let mut id_len = [0; 4];
cursor.read_exact(&mut id_len).map_err(|_| missing)?;
if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len {
Err(different)?;
}
let mut id = vec![0; C::ID.len()];
cursor.read_exact(&mut id).map_err(|_| missing)?;
if id != C::ID {
Err(different)?;
}
}
let (t, n, i) = {
let mut read_u16 = || {
let mut value = [0; 2];
cursor
.read_exact(&mut value)
.map_err(|_| FrostError::InternalError("missing participant quantities"))?;
Ok(u16::from_be_bytes(value))
};
(read_u16()?, read_u16()?, read_u16()?)
};
let secret_share =
C::read_F(cursor).map_err(|_| FrostError::InternalError("invalid secret share"))?;
let mut verification_shares = HashMap::new();
for l in 1 ..= n {
verification_shares.insert(
l,
C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid verification share"))?,
);
}
Ok(FrostCore::new(
FrostParams::new(t, n, i).map_err(|_| FrostError::InternalError("invalid parameters"))?,
secret_share,
verification_shares,
))
}
}
/// FROST keys usable for signing.
#[derive(Clone, Debug, Zeroize)]
pub struct FrostKeys<C: Curve> {
/// Core keys.
#[zeroize(skip)]
core: Arc<FrostCore<C>>,
/// Offset applied to these keys.
pub(crate) offset: Option<C::F>,
}
// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786
impl<C: Curve> Drop for FrostKeys<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> ZeroizeOnDrop for FrostKeys<C> {}
/// View of keys passed to algorithm implementations.
#[derive(Clone, Zeroize)]
pub struct FrostView<C: Curve> {
group_key: C::G,
#[zeroize(skip)]
included: Vec<u16>,
secret_share: C::F,
#[zeroize(skip)]
verification_shares: HashMap<u16, C::G>,
}
impl<C: Curve> Drop for FrostView<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> ZeroizeOnDrop for FrostView<C> {}
impl<C: Curve> FrostKeys<C> {
pub fn new(core: FrostCore<C>) -> FrostKeys<C> {
FrostKeys { core: Arc::new(core), offset: None }
}
/// Offset the keys by a given scalar to allow for account and privacy schemes.
/// This offset is ephemeral and will not be included when these keys are serialized.
/// Keys offset multiple times will form a new offset of their sum.
/// Not IETF compliant.
pub fn offset(&self, offset: C::F) -> FrostKeys<C> {
let mut res = self.clone();
// Carry any existing offset
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
// one-time-key offset
res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero));
res
}
pub fn params(&self) -> FrostParams {
self.core.params
}
pub(crate) fn secret_share(&self) -> C::F {
self.core.secret_share
}
/// Returns the group key with any offset applied.
pub fn group_key(&self) -> C::G {
self.core.group_key + (C::generator() * self.offset.unwrap_or_else(C::F::zero))
}
/// Returns all participants' verification shares without any offsetting.
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
self.core.verification_shares()
}
pub fn serialized_len(n: u16) -> usize {
FrostCore::<C>::serialized_len(n)
}
pub fn serialize(&self) -> Vec<u8> {
self.core.serialize()
}
pub fn view(&self, included: &[u16]) -> Result<FrostView<C>, FrostError> {
if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len())
{
Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?;
}
let offset_share = self.offset.unwrap_or_else(C::F::zero) *
C::F::from(included.len().try_into().unwrap()).invert().unwrap();
let offset_verification_share = C::generator() * offset_share;
Ok(FrostView {
group_key: self.group_key(),
secret_share: (self.secret_share() * lagrange::<C::F>(self.params().i, included)) +
offset_share,
verification_shares: self
.verification_shares()
.iter()
.map(|(l, share)| {
(*l, (*share * lagrange::<C::F>(*l, included)) + offset_verification_share)
})
.collect(),
included: included.to_vec(),
})
}
}
impl<C: Curve> FrostView<C> {
pub fn group_key(&self) -> C::G {
self.group_key
}
pub fn included(&self) -> Vec<u16> {
self.included.clone()
}
pub fn secret_share(&self) -> C::F {
self.secret_share
}
pub fn verification_share(&self, l: u16) -> C::G {
self.verification_shares[&l]
}
}

View File

@@ -15,7 +15,7 @@ use std::{
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};
use transcript::Transcript;
@@ -34,13 +34,19 @@ fn dleq_transcript<T: Transcript>() -> T {
// This is considered a single nonce as r = d + be
#[derive(Clone, Zeroize)]
pub(crate) struct Nonce<C: Curve>(pub(crate) [C::F; 2]);
impl<C: Curve> Drop for Nonce<C> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<C: Curve> ZeroizeOnDrop for Nonce<C> {}
// Commitments to a specific generator for this nonce
#[derive(Copy, Clone, PartialEq, Eq, Zeroize)]
#[derive(Copy, Clone, PartialEq, Eq)]
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
impl<C: Curve> GeneratorCommitments<C> {
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
Ok(GeneratorCommitments([C::read_G(reader)?, C::read_G(reader)?]))
Ok(GeneratorCommitments([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
@@ -50,7 +56,7 @@ impl<C: Curve> GeneratorCommitments<C> {
}
// A single nonce's commitments and relevant proofs
#[derive(Clone, PartialEq, Eq, Zeroize)]
#[derive(Clone, PartialEq, Eq)]
pub(crate) struct NonceCommitments<C: Curve> {
// Called generators as these commitments are indexed by generator
pub(crate) generators: Vec<GeneratorCommitments<C>>,
@@ -130,7 +136,7 @@ impl<C: Curve> NonceCommitments<C> {
}
}
#[derive(Clone, PartialEq, Eq, Zeroize)]
#[derive(Clone, PartialEq, Eq)]
pub(crate) struct Commitments<C: Curve> {
// Called nonces as these commitments are indexed by nonce
pub(crate) nonces: Vec<NonceCommitments<C>>,
@@ -165,7 +171,7 @@ impl<C: Curve> Commitments<C> {
// committed to as their entire series per-nonce, not as isolates
if let Some(dleqs) = &nonce.dleqs {
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
let mut buf = Vec::with_capacity(C::G_len() + C::F_len());
let mut buf = vec![];
dleq.serialize(&mut buf).unwrap();
t.append_message(label, &buf);
};
@@ -194,7 +200,6 @@ impl<C: Curve> Commitments<C> {
}
}
#[derive(Zeroize)]
pub(crate) struct IndividualBinding<C: Curve> {
commitments: Commitments<C>,
binding_factors: Option<Vec<C::F>>,
@@ -202,15 +207,6 @@ pub(crate) struct IndividualBinding<C: Curve> {
pub(crate) struct BindingFactor<C: Curve>(pub(crate) HashMap<u16, IndividualBinding<C>>);
impl<C: Curve> Zeroize for BindingFactor<C> {
fn zeroize(&mut self) {
for (mut validator, mut binding) in self.0.drain() {
validator.zeroize();
binding.zeroize();
}
}
}
impl<C: Curve> BindingFactor<C> {
pub(crate) fn insert(&mut self, i: u16, commitments: Commitments<C>) {
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });

View File

@@ -1,141 +0,0 @@
use std::{
marker::PhantomData,
io::{self, Read, Write},
sync::Arc,
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use group::GroupEncoding;
use transcript::{Transcript, RecommendedTranscript};
use dleq::DLEqProof;
use crate::{curve::Curve, FrostError, FrostCore, FrostKeys, validate_map};
/// Promote a set of keys to another Curve definition.
pub trait CurvePromote<C2: Curve> {
#[doc(hidden)]
#[allow(non_snake_case)]
fn _bound_C2(_c2: C2) {
panic!()
}
fn promote(self) -> FrostKeys<C2>;
}
// Implement promotion to different ciphersuites, panicking if the generators are different
// Commented due to lack of practical benefit. While it'd have interoperability benefits, those
// would have their own DKG process which isn't compatible anyways. This becomes unsafe code
// that'll never be used but we're bound to support
/*
impl<C1: Curve, C2: Curve> CurvePromote<C2> for FrostKeys<C1>
where
C2: Curve<F = C1::F, G = C1::G>,
{
fn promote(self) -> FrostKeys<C2> {
assert_eq!(C::GENERATOR, C2::GENERATOR);
FrostKeys {
core: Arc::new(FrostCore {
params: self.core.params,
secret_share: self.core.secret_share,
group_key: self.core.group_key,
verification_shares: self.core.verification_shares(),
}),
offset: None,
}
}
}
*/
fn transcript<G: GroupEncoding>(key: G, i: u16) -> RecommendedTranscript {
let mut transcript = RecommendedTranscript::new(b"FROST Generator Update");
transcript.append_message(b"group_key", key.to_bytes().as_ref());
transcript.append_message(b"participant", &i.to_be_bytes());
transcript
}
/// Proof of valid promotion to another generator.
#[derive(Clone, Copy)]
pub struct GeneratorProof<C: Curve> {
share: C::G,
proof: DLEqProof<C::G>,
}
impl<C: Curve> GeneratorProof<C> {
pub fn serialize<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.share.to_bytes().as_ref())?;
self.proof.serialize(writer)
}
pub fn deserialize<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
Ok(GeneratorProof { share: C::read_G(reader)?, proof: DLEqProof::deserialize(reader)? })
}
}
/// Promote a set of keys from one curve to another, where the elliptic curve is the same.
/// Since the Curve trait additionally specifies a generator, this provides an O(n) way to update
/// the generator used with keys. The key generation protocol itself is exponential.
pub struct GeneratorPromotion<C1: Curve, C2: Curve> {
base: FrostKeys<C1>,
proof: GeneratorProof<C1>,
_c2: PhantomData<C2>,
}
impl<C1: Curve, C2: Curve> GeneratorPromotion<C1, C2>
where
C2: Curve<F = C1::F, G = C1::G>,
{
/// Begin promoting keys from one curve to another. Returns a proof this share was properly
/// promoted.
pub fn promote<R: RngCore + CryptoRng>(
rng: &mut R,
base: FrostKeys<C1>,
) -> (GeneratorPromotion<C1, C2>, GeneratorProof<C1>) {
// Do a DLEqProof for the new generator
let proof = GeneratorProof {
share: C2::generator() * base.secret_share(),
proof: DLEqProof::prove(
rng,
&mut transcript(base.core.group_key(), base.params().i),
&[C1::generator(), C2::generator()],
base.secret_share(),
),
};
(GeneratorPromotion { base, proof, _c2: PhantomData::<C2> }, proof)
}
/// Complete promotion by taking in the proofs from all other participants.
pub fn complete(
self,
proofs: &HashMap<u16, GeneratorProof<C1>>,
) -> Result<FrostKeys<C2>, FrostError> {
let params = self.base.params();
validate_map(proofs, &(1 ..= params.n).collect::<Vec<_>>(), params.i)?;
let original_shares = self.base.verification_shares();
let mut verification_shares = HashMap::new();
verification_shares.insert(params.i, self.proof.share);
for (i, proof) in proofs {
let i = *i;
proof
.proof
.verify(
&mut transcript(self.base.core.group_key(), i),
&[C1::generator(), C2::generator()],
&[original_shares[&i], proof.share],
)
.map_err(|_| FrostError::InvalidProofOfKnowledge(i))?;
verification_shares.insert(i, proof.share);
}
Ok(FrostKeys {
core: Arc::new(FrostCore::new(params, self.base.secret_share(), verification_shares)),
offset: None,
})
}
}

View File

@@ -1,77 +0,0 @@
use std::io::{self, Read, Write};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
use multiexp::BatchVerifier;
use crate::curve::Curve;
/// A Schnorr signature of the form (R, s) where s = r + cx.
#[allow(non_snake_case)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct SchnorrSignature<C: Curve> {
pub R: C::G,
pub s: C::F,
}
impl<C: Curve> SchnorrSignature<C> {
pub(crate) fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
Ok(SchnorrSignature { R: C::read_G(reader)?, s: C::read_F(reader)? })
}
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.R.to_bytes().as_ref())?;
writer.write_all(self.s.to_repr().as_ref())
}
}
pub(crate) fn sign<C: Curve>(
mut private_key: C::F,
mut nonce: C::F,
challenge: C::F,
) -> SchnorrSignature<C> {
let res = SchnorrSignature { R: C::generator() * nonce, s: nonce + (private_key * challenge) };
private_key.zeroize();
nonce.zeroize();
res
}
#[must_use]
pub(crate) fn verify<C: Curve>(
public_key: C::G,
challenge: C::F,
signature: &SchnorrSignature<C>,
) -> bool {
(C::generator() * signature.s) == (signature.R + (public_key * challenge))
}
pub(crate) fn batch_verify<C: Curve, R: RngCore + CryptoRng>(
rng: &mut R,
triplets: &[(u16, C::G, C::F, SchnorrSignature<C>)],
) -> Result<(), u16> {
let mut values = [(C::F::one(), C::generator()); 3];
let mut batch = BatchVerifier::new(triplets.len());
for triple in triplets {
// s = r + ca
// sG == R + cA
// R + cA - sG == 0
// R
values[0].1 = triple.3.R;
// cA
values[1] = (triple.2, triple.1);
// -sG
values[2].0 = -triple.3.s;
batch.queue(rng, triple.0, values);
}
batch.verify_vartime_with_vartime_blame()
}

View File

@@ -14,8 +14,8 @@ use group::{ff::PrimeField, GroupEncoding};
use crate::{
curve::Curve,
FrostError, FrostParams, FrostKeys, FrostView,
algorithm::{AddendumSerialize, Addendum, Algorithm},
FrostError, ThresholdParams, ThresholdKeys, ThresholdView,
algorithm::{WriteAddendum, Addendum, Algorithm},
validate_map,
};
@@ -24,6 +24,12 @@ pub(crate) use crate::nonce::*;
/// Trait enabling writing preprocesses and signature shares.
pub trait Writable {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
}
impl<T: Writable> Writable for Vec<T> {
@@ -35,18 +41,18 @@ impl<T: Writable> Writable for Vec<T> {
}
}
/// Pairing of an Algorithm with a FrostKeys instance and this specific signing set.
/// Pairing of an Algorithm with a ThresholdKeys instance and this specific signing set.
#[derive(Clone)]
pub struct Params<C: Curve, A: Algorithm<C>> {
algorithm: A,
keys: FrostKeys<C>,
view: FrostView<C>,
keys: ThresholdKeys<C>,
view: ThresholdView<C>,
}
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
pub fn new(
algorithm: A,
keys: FrostKeys<C>,
keys: ThresholdKeys<C>,
included: &[u16],
) -> Result<Params<C, A>, FrostError> {
let params = keys.params();
@@ -55,16 +61,16 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
included.sort_unstable();
// Included < threshold
if included.len() < usize::from(params.t) {
if included.len() < usize::from(params.t()) {
Err(FrostError::InvalidSigningSet("not enough signers"))?;
}
// Invalid index
if included[0] == 0 {
Err(FrostError::InvalidParticipantIndex(included[0], params.n))?;
Err(FrostError::InvalidParticipantIndex(included[0], params.n()))?;
}
// OOB index
if included[included.len() - 1] > params.n {
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n))?;
if included[included.len() - 1] > params.n() {
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n()))?;
}
// Same signer included multiple times
for i in 0 .. (included.len() - 1) {
@@ -73,7 +79,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
}
}
// Not included
if !included.contains(&params.i) {
if !included.contains(&params.i()) {
Err(FrostError::InvalidSigningSet("signing despite not being included"))?;
}
@@ -81,17 +87,17 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
Ok(Params { algorithm, view: keys.view(&included).unwrap(), keys })
}
pub fn multisig_params(&self) -> FrostParams {
pub fn multisig_params(&self) -> ThresholdParams {
self.keys.params()
}
pub fn view(&self) -> FrostView<C> {
pub fn view(&self) -> ThresholdView<C> {
self.view.clone()
}
}
/// Preprocess for an instance of the FROST signing protocol.
#[derive(Clone, PartialEq, Eq, Zeroize)]
#[derive(Clone, PartialEq, Eq)]
pub struct Preprocess<C: Curve, A: Addendum> {
pub(crate) commitments: Commitments<C>,
pub addendum: A,
@@ -107,6 +113,7 @@ impl<C: Curve, A: Addendum> Writable for Preprocess<C, A> {
#[derive(Zeroize)]
pub(crate) struct PreprocessData<C: Curve, A: Addendum> {
pub(crate) nonces: Vec<Nonce<C>>,
#[zeroize(skip)]
pub(crate) preprocess: Preprocess<C, A>,
}
@@ -140,7 +147,7 @@ struct SignData<C: Curve> {
}
/// Share of a signature produced via FROST.
#[derive(Clone, PartialEq, Eq, Zeroize)]
#[derive(Clone, PartialEq, Eq)]
pub struct SignatureShare<C: Curve>(C::F);
impl<C: Curve> Writable for SignatureShare<C> {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
@@ -158,7 +165,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
msg: &[u8],
) -> Result<(SignData<C>, SignatureShare<C>), FrostError> {
let multisig_params = params.multisig_params();
validate_map(&preprocesses, &params.view.included, multisig_params.i)?;
validate_map(&preprocesses, &params.view.included(), multisig_params.i())?;
{
// Domain separate FROST
@@ -167,10 +174,10 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
let nonces = params.algorithm.nonces();
#[allow(non_snake_case)]
let mut B = BindingFactor(HashMap::<u16, _>::with_capacity(params.view.included.len()));
let mut B = BindingFactor(HashMap::<u16, _>::with_capacity(params.view.included().len()));
{
// Parse the preprocesses
for l in &params.view.included {
for l in &params.view.included() {
{
params
.algorithm
@@ -178,7 +185,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
}
if *l == params.keys.params().i {
if *l == params.keys.params().i() {
let commitments = our_preprocess.preprocess.commitments.clone();
commitments.transcript(params.algorithm.transcript());
@@ -216,7 +223,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
// Include the offset, if one exists
// While this isn't part of the FROST-expected rho transcript, the offset being here coincides
// with another specification (despite the transcript format being distinct)
if let Some(offset) = params.keys.offset {
if let Some(offset) = params.keys.current_offset() {
// Transcript as a point
// Under a coordinated model, the coordinater can be the only party to know the discrete log
// of the offset. This removes the ability for any signer to provide the discrete log,
@@ -262,7 +269,7 @@ fn complete<C: Curve, A: Algorithm<C>>(
mut shares: HashMap<u16, SignatureShare<C>>,
) -> Result<A::Signature, FrostError> {
let params = sign_params.multisig_params();
validate_map(&shares, &sign_params.view.included, params.i)?;
validate_map(&shares, &sign_params.view.included(), params.i())?;
let mut responses = HashMap::new();
responses.insert(params.i(), sign.share);
@@ -275,13 +282,14 @@ fn complete<C: Curve, A: Algorithm<C>>(
// Perform signature validation instead of individual share validation
// For the success route, which should be much more frequent, this should be faster
// It also acts as an integrity check of this library's signing function
if let Some(sig) = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum) {
if let Some(sig) = sign_params.algorithm.verify(sign_params.view.group_key(), &sign.Rs, sum) {
return Ok(sig);
}
// Find out who misbehaved. It may be beneficial to randomly sort this to have detection be
// within n / 2 on average, and not gameable to n, though that should be minor
for l in &sign_params.view.included {
// TODO
for l in &sign_params.view.included() {
if !sign_params.algorithm.verify_share(
sign_params.view.verification_share(*l),
&sign.B.bound(*l),
@@ -367,7 +375,7 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
/// Creates a new machine to generate a signature with the specified keys.
pub fn new(
algorithm: A,
keys: FrostKeys<C>,
keys: ThresholdKeys<C>,
included: &[u16],
) -> Result<AlgorithmMachine<C, A>, FrostError> {
Ok(AlgorithmMachine { params: Params::new(algorithm, keys, included)? })

View File

@@ -2,23 +2,7 @@ use rand_core::{RngCore, CryptoRng};
use group::Group;
use crate::{Curve, FrostCore, tests::core_gen};
// Test generation of FROST keys
fn key_generation<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// This alone verifies the verification shares and group key are agreed upon as expected
core_gen::<_, C>(rng);
}
// Test serialization of generated keys
fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
for (_, keys) in core_gen::<_, C>(rng) {
assert_eq!(
&FrostCore::<C>::deserialize::<&[u8]>(&mut keys.serialize().as_ref()).unwrap(),
&keys
);
}
}
use crate::Curve;
// Test successful multiexp, with enough pairs to trigger its variety of algorithms
// Multiexp has its own tests, yet only against k256 and Ed25519 (which should be sufficient
@@ -28,7 +12,7 @@ pub fn test_multiexp<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let mut sum = C::G::identity();
for _ in 0 .. 10 {
for _ in 0 .. 100 {
pairs.push((C::random_F(&mut *rng), C::generator() * C::random_F(&mut *rng)));
pairs.push((C::random_nonzero_F(&mut *rng), C::generator() * C::random_nonzero_F(&mut *rng)));
sum += pairs[pairs.len() - 1].1 * pairs[pairs.len() - 1].0;
}
assert_eq!(multiexp::multiexp(&pairs), sum);
@@ -40,8 +24,4 @@ pub fn test_curve<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// TODO: Test the Curve functions themselves
test_multiexp::<_, C>(rng);
// Test FROST key generation and serialization of FrostCore works as expected
key_generation::<_, C>(rng);
keys_serialization::<_, C>(rng);
}

View File

@@ -5,7 +5,7 @@ use crate::{
tests::vectors::{Vectors, test_with_vectors},
};
#[cfg(any(test, feature = "ristretto"))]
#[cfg(feature = "ristretto")]
#[test]
fn ristretto_vectors() {
test_with_vectors::<_, curve::Ristretto, curve::IetfRistrettoHram>(

View File

@@ -1,8 +1,11 @@
use rand_core::OsRng;
use ciphersuite::Ciphersuite;
use schnorr::SchnorrSignature;
use crate::{
curve::{Curve, Ed448, Ietf8032Ed448Hram, IetfEd448Hram},
schnorr::{SchnorrSignature, verify},
curve::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram},
tests::vectors::{Vectors, test_with_vectors},
};
@@ -37,11 +40,9 @@ fn ed448_8032_vector() {
let R = Ed448::read_G::<&[u8]>(&mut sig.as_ref()).unwrap();
let s = Ed448::read_F::<&[u8]>(&mut &sig[57 ..]).unwrap();
assert!(verify(
A,
Ietf8032Ed448Hram::hram(&context, &R, &A, &msg),
&SchnorrSignature::<Ed448> { R, s }
));
assert!(
SchnorrSignature::<Ed448> { R, s }.verify(A, Ietf8032Ed448Hram::hram(&context, &R, &A, &msg))
);
}
#[test]

View File

@@ -1,6 +1,6 @@
#[cfg(any(test, feature = "dalek"))]
#[cfg(any(feature = "ristretto", feature = "ed25519"))]
mod dalek;
#[cfg(feature = "kp256")]
#[cfg(any(feature = "secp256k1", feature = "p256"))]
mod kp256;
#[cfg(feature = "ed448")]
mod ed448;

View File

@@ -2,21 +2,16 @@ use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
use group::ff::Field;
pub use dkg::tests::{key_gen, recover_key};
use crate::{
Curve, FrostParams, FrostCore, FrostKeys, lagrange,
key_gen::{SecretShare, Commitments as KGCommitments, KeyGenMachine},
Curve, ThresholdKeys,
algorithm::Algorithm,
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
};
/// Curve tests.
pub mod curve;
/// Schnorr signature tests.
pub mod schnorr;
/// Promotion tests.
pub mod promote;
/// Vectorized test suite to ensure consistency.
pub mod vectors;
@@ -39,102 +34,11 @@ pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
res
}
/// Generate FROST keys (as FrostCore objects) for tests.
pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, FrostCore<C>> {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= PARTICIPANTS {
let machine = KeyGenMachine::<C>::new(
FrostParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(),
"FROST Test key_gen".to_string(),
);
let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine);
commitments.insert(i, {
let mut buf = vec![];
these_commitments.write(&mut buf).unwrap();
KGCommitments::read::<&[u8]>(
&mut buf.as_ref(),
FrostParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 },
)
.unwrap()
});
}
let mut secret_shares = HashMap::new();
let mut machines = machines
.drain()
.map(|(l, machine)| {
let (machine, mut shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
let shares = shares
.drain()
.map(|(l, share)| {
let mut buf = vec![];
share.write(&mut buf).unwrap();
(l, SecretShare::<C>::read::<&[u8]>(&mut buf.as_ref()).unwrap())
})
.collect::<HashMap<_, _>>();
secret_shares.insert(l, shares);
(l, machine)
})
.collect::<HashMap<_, _>>();
let mut verification_shares = None;
let mut group_key = None;
machines
.drain()
.map(|(i, machine)| {
let mut our_secret_shares = HashMap::new();
for (l, shares) in &secret_shares {
if i == *l {
continue;
}
our_secret_shares.insert(*l, shares[&i].clone());
}
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(these_keys.verification_shares());
}
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, these_keys)
})
.collect::<HashMap<_, _>>()
}
/// Generate FROST keys for tests.
pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, FrostKeys<C>> {
core_gen(rng).drain().map(|(i, core)| (i, FrostKeys::new(core))).collect()
}
/// Recover the secret from a collection of keys.
pub fn recover<C: Curve>(keys: &HashMap<u16, FrostKeys<C>>) -> C::F {
let first = keys.values().next().expect("no keys provided");
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
let included = keys.keys().cloned().collect::<Vec<_>>();
let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| {
accum + (keys.secret_share() * lagrange::<C::F>(*i, &included))
});
assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys");
group_private
}
/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm.
pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
rng: &mut R,
algorithm: A,
keys: &HashMap<u16, FrostKeys<C>>,
keys: &HashMap<u16, ThresholdKeys<C>>,
) -> HashMap<u16, AlgorithmMachine<C, A>> {
let mut included = vec![];
while included.len() < usize::from(keys[&1].params().t()) {

View File

@@ -1,121 +0,0 @@
use std::{marker::PhantomData, collections::HashMap};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use group::Group;
use crate::{
Curve, // FrostKeys,
promote::{GeneratorPromotion /* CurvePromote */},
tests::{clone_without, key_gen, schnorr::sign_core},
};
/*
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltFunctions<C: Curve> {
_curve: PhantomData<C>,
}
impl<C: Curve> Curve for AltFunctions<C> {
type F = C::F;
type G = C::G;
const ID: &'static [u8] = b"alt_functions";
fn generator() -> Self::G {
C::generator()
}
fn hash_msg(msg: &[u8]) -> Vec<u8> {
C::hash_msg(&[msg, b"alt"].concat())
}
fn hash_binding_factor(binding: &[u8]) -> Self::F {
C::hash_to_F(b"rho_alt", binding)
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
C::hash_to_F(&[dst, b"alt"].concat(), msg)
}
}
// Test promotion of FROST keys to another set of functions for interoperability
fn test_ciphersuite_promotion<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let keys = key_gen::<_, C>(&mut *rng);
for keys in keys.values() {
let promoted: FrostKeys<AltFunctions<C>> = keys.clone().promote();
// Verify equivalence via their serializations, minus the ID's length and ID itself
assert_eq!(
keys.serialize()[(4 + C::ID.len()) ..],
promoted.serialize()[(4 + AltFunctions::<C>::ID.len()) ..]
);
}
}
*/
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltGenerator<C: Curve> {
_curve: PhantomData<C>,
}
impl<C: Curve> Curve for AltGenerator<C> {
type F = C::F;
type G = C::G;
const ID: &'static [u8] = b"alt_generator";
fn generator() -> Self::G {
C::G::generator() * C::hash_to_F(b"FROST_tests", b"generator")
}
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
C::hash_to_vec(&[b"FROST_tests_alt", dst].concat(), data)
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
C::hash_to_F(&[b"FROST_tests_alt", dst].concat(), data)
}
}
// Test promotion of FROST keys to another generator
fn test_generator_promotion<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// A seeded RNG can theoretically generate for C1 and C2, verifying promotion that way?
// TODO
let keys = key_gen::<_, C>(&mut *rng);
let mut promotions = HashMap::new();
let mut proofs = HashMap::new();
for (i, keys) in &keys {
let promotion = GeneratorPromotion::<_, AltGenerator<C>>::promote(&mut *rng, keys.clone());
promotions.insert(*i, promotion.0);
proofs.insert(*i, promotion.1);
}
let mut new_keys = HashMap::new();
let mut group_key = None;
let mut verification_shares = None;
for (i, promoting) in promotions.drain() {
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
assert_eq!(keys[&i].params(), promoted.params());
assert_eq!(keys[&i].secret_share(), promoted.secret_share());
if group_key.is_none() {
group_key = Some(keys[&i].group_key());
verification_shares = Some(keys[&i].verification_shares());
}
assert_eq!(keys[&i].group_key(), group_key.unwrap());
assert_eq!(&keys[&i].verification_shares(), verification_shares.as_ref().unwrap());
new_keys.insert(i, promoted);
}
// Sign with the keys to ensure their integrity
sign_core(rng, &new_keys);
}
pub fn test_promotion<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// test_ciphersuite_promotion::<_, C>(rng);
test_generator_promotion::<_, C>(rng);
}

View File

@@ -1,131 +0,0 @@
use std::{marker::PhantomData, collections::HashMap};
use rand_core::{RngCore, CryptoRng};
use group::{ff::Field, Group, GroupEncoding};
use crate::{
Curve, FrostKeys,
schnorr::{self, SchnorrSignature},
algorithm::{Hram, Schnorr},
tests::{key_gen, algorithm_machines, sign as sign_test},
};
pub(crate) fn core_sign<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let private_key = C::random_F(&mut *rng);
let nonce = C::random_F(&mut *rng);
let challenge = C::random_F(rng); // Doesn't bother to craft an HRAm
assert!(schnorr::verify::<C>(
C::generator() * private_key,
challenge,
&schnorr::sign(private_key, nonce, challenge)
));
}
// The above sign function verifies signing works
// This verifies invalid signatures don't pass, using zero signatures, which should effectively be
// random
pub(crate) fn core_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
assert!(!schnorr::verify::<C>(
C::generator() * C::random_F(&mut *rng),
C::random_F(rng),
&SchnorrSignature { R: C::G::identity(), s: C::F::zero() }
));
}
pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// Create 5 signatures
let mut keys = vec![];
let mut challenges = vec![];
let mut sigs = vec![];
for i in 0 .. 5 {
keys.push(C::random_F(&mut *rng));
challenges.push(C::random_F(&mut *rng));
sigs.push(schnorr::sign::<C>(keys[i], C::random_F(&mut *rng), challenges[i]));
}
// Batch verify
let triplets = (0 .. 5)
.map(|i| (u16::try_from(i + 1).unwrap(), C::generator() * keys[i], challenges[i], sigs[i]))
.collect::<Vec<_>>();
schnorr::batch_verify(rng, &triplets).unwrap();
// Shift 1 from s from one to another and verify it fails
// This test will fail if unique factors aren't used per-signature, hence its inclusion
{
let mut triplets = triplets.clone();
triplets[1].3.s += C::F::one();
triplets[2].3.s -= C::F::one();
if let Err(blame) = schnorr::batch_verify(rng, &triplets) {
assert_eq!(blame, 2);
} else {
panic!("batch verification considered a malleated signature valid");
}
}
// Make sure a completely invalid signature fails when included
for i in 0 .. 5 {
let mut triplets = triplets.clone();
triplets[i].3.s = C::random_F(&mut *rng);
if let Err(blame) = schnorr::batch_verify(rng, &triplets) {
assert_eq!(blame, u16::try_from(i + 1).unwrap());
} else {
panic!("batch verification considered an invalid signature valid");
}
}
}
pub(crate) fn sign_core<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
keys: &HashMap<u16, FrostKeys<C>>,
) {
const MESSAGE: &[u8] = b"Hello, World!";
let machines = algorithm_machines(rng, Schnorr::<C, TestHram<C>>::new(), keys);
let sig = sign_test(&mut *rng, machines, MESSAGE);
let group_key = keys[&1].group_key();
assert!(schnorr::verify(group_key, TestHram::<C>::hram(&sig.R, &group_key, MESSAGE), &sig));
}
#[derive(Clone)]
pub struct TestHram<C: Curve> {
_curve: PhantomData<C>,
}
impl<C: Curve> Hram<C> for TestHram<C> {
#[allow(non_snake_case)]
fn hram(R: &C::G, A: &C::G, m: &[u8]) -> C::F {
C::hash_to_F(b"challenge", &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat())
}
}
fn sign<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let keys = key_gen::<_, C>(&mut *rng);
sign_core(rng, &keys);
}
fn sign_with_offset<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let mut keys = key_gen::<_, C>(&mut *rng);
let group_key = keys[&1].group_key();
let offset = C::hash_to_F(b"FROST Test sign_with_offset", b"offset");
for i in 1 ..= u16::try_from(keys.len()).unwrap() {
keys.insert(i, keys[&i].offset(offset));
}
let offset_key = group_key + (C::generator() * offset);
assert_eq!(keys[&1].group_key(), offset_key);
sign_core(rng, &keys);
}
pub fn test_schnorr<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// Test Schnorr signatures work as expected
// This is a bit unnecessary, as they should for any valid curve, yet this establishes sanity
core_sign::<_, C>(rng);
core_verify::<_, C>(rng);
core_batch_verify::<_, C>(rng);
// Test Schnorr signatures under FROST
sign::<_, C>(rng);
sign_with_offset::<_, C>(rng);
}

View File

@@ -6,17 +6,17 @@ use rand_core::{RngCore, CryptoRng};
use group::{ff::PrimeField, GroupEncoding};
use dkg::tests::{test_ciphersuite as test_dkg};
use crate::{
curve::Curve,
FrostCore, FrostKeys,
ThresholdCore, ThresholdKeys,
algorithm::{Schnorr, Hram},
sign::{
Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess,
PreprocessData, SignMachine, SignatureMachine, AlgorithmMachine,
},
tests::{
clone_without, curve::test_curve, schnorr::test_schnorr, promote::test_promotion, recover,
},
tests::{clone_without, recover_key, curve::test_curve},
};
pub struct Vectors {
@@ -76,8 +76,8 @@ impl From<serde_json::Value> for Vectors {
}
}
// Load these vectors into FrostKeys using a custom serialization it'll deserialize
fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKeys<C>> {
// Load these vectors into ThresholdKeys using a custom serialization it'll deserialize
fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, ThresholdKeys<C>> {
let shares = vectors
.shares
.iter()
@@ -87,7 +87,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
let mut keys = HashMap::new();
for i in 1 ..= u16::try_from(shares.len()).unwrap() {
// Manually re-implement the serialization for FrostCore to import this data
// Manually re-implement the serialization for ThresholdCore to import this data
let mut serialized = vec![];
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
serialized.extend(C::ID);
@@ -99,13 +99,13 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
serialized.extend(share.to_bytes().as_ref());
}
let these_keys = FrostCore::<C>::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap();
let these_keys = ThresholdCore::<C>::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap();
assert_eq!(these_keys.params().t(), vectors.threshold);
assert_eq!(usize::from(these_keys.params().n()), shares.len());
assert_eq!(these_keys.params().i(), i);
assert_eq!(these_keys.secret_share(), shares[usize::from(i - 1)]);
assert_eq!(hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key);
keys.insert(i, FrostKeys::new(these_keys));
keys.insert(i, ThresholdKeys::new(these_keys));
}
keys
@@ -117,17 +117,18 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
) {
// Do basic tests before trying the vectors
test_curve::<_, C>(&mut *rng);
test_schnorr::<_, C>(&mut *rng);
test_promotion::<_, C>(rng);
// Test the DKG
test_dkg::<_, C>(&mut *rng);
// Test against the vectors
let keys = vectors_to_multisig_keys::<C>(&vectors);
let group_key =
C::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap();
<C as Curve>::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap();
let secret =
C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap();
assert_eq!(C::generator() * secret, group_key);
assert_eq!(recover(&keys), secret);
assert_eq!(recover_key(&keys), secret);
let mut machines = vec![];
for i in &vectors.included {