Utilize zeroize (#76)

* Apply Zeroize to nonces used in Bulletproofs

Also makes bit decomposition constant time for a given amount of 
outputs.

* Fix nonce reuse for single-signer CLSAG

* Attach Zeroize to most structures in Monero, and ZOnDrop to anything with private data

* Zeroize private keys and nonces

* Merge prepare_outputs and prepare_transactions

* Ensure CLSAG is constant time

* Pass by borrow where needed, bug fixes

The past few commitments have been one in-progress chunk which I've 
broken up as best read.

* Add Zeroize to FROST structs

Still needs to zeroize internally, yet next step. Not quite as 
aggressive as Monero, partially due to the limitations of HashMaps, 
partially due to less concern about metadata, yet does still delete a 
few smaller items of metadata (group key, context string...).

* Remove Zeroize from most Monero multisig structs

These structs largely didn't have private data, just fields with private 
data, yet those fields implemented ZeroizeOnDrop making them already 
covered. While there is still traces of the transaction left in RAM, 
fully purging that was never the intent.

* Use Zeroize within dleq

bitvec doesn't offer Zeroize, so a manual zeroing has been implemented.

* Use Zeroize for random_nonce

It isn't perfect, due to the inability to zeroize the digest, and due to 
kp256 requiring a few transformations. It does the best it can though.

Does move the per-curve random_nonce to a provided one, which is allowed 
as of https://github.com/cfrg/draft-irtf-cfrg-frost/pull/231.

* Use Zeroize on FROST keygen/signing

* Zeroize constant time multiexp.

* Correct when FROST keygen zeroizes

* Move the FROST keys Arc into FrostKeys

Reduces amount of instances in memory.

* Manually implement Debug for FrostCore to not leak the secret share

* Misc bug fixes

* clippy + multiexp test bug fixes

* Correct FROST key gen share summation

It leaked our own share for ourself.

* Fix cross-group DLEq tests
This commit is contained in:
Luke Parker
2022-08-03 03:25:18 -05:00
committed by GitHub
parent a30568ff57
commit 797be71eb3
56 changed files with 698 additions and 425 deletions

View File

@@ -1,4 +1,4 @@
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use sha2::{Digest, Sha512};
@@ -21,7 +21,7 @@ macro_rules! dalek_curve {
) => {
use dalek_ff_group::{$Point, $POINT};
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Curve;
impl Curve for $Curve {
type F = Scalar;
@@ -30,13 +30,6 @@ macro_rules! dalek_curve {
const ID: &'static [u8] = $ID;
const GENERATOR: Self::G = $POINT;
fn random_nonce<R: RngCore + CryptoRng>(secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
seed.extend(&secret.to_bytes());
Self::hash_to_F(b"nonce", &seed)
}
fn hash_msg(msg: &[u8]) -> Vec<u8> {
Sha512::new()
.chain_update($CONTEXT)

View File

@@ -1,12 +1,14 @@
use std::io::Cursor;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use sha2::{digest::Update, Digest, Sha256};
use group::{ff::Field, GroupEncoding};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
use elliptic_curve::{
generic_array::GenericArray,
bigint::{Encoding, U384},
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
};
@@ -22,7 +24,7 @@ macro_rules! kp_curve {
$ID: literal,
$CONTEXT: literal
) => {
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Curve;
impl Curve for $Curve {
type F = $lib::Scalar;
@@ -31,13 +33,6 @@ macro_rules! kp_curve {
const ID: &'static [u8] = $ID;
const GENERATOR: Self::G = $lib::ProjectivePoint::GENERATOR;
fn random_nonce<R: RngCore + CryptoRng>(secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
seed.extend(secret.to_bytes());
Self::hash_to_F(&[$CONTEXT as &[u8], b"nonce"].concat(), &seed)
}
fn hash_msg(msg: &[u8]) -> Vec<u8> {
(&Sha256::new().chain($CONTEXT).chain(b"digest").chain(msg).finalize()).to_vec()
}
@@ -58,17 +53,21 @@ macro_rules! kp_curve {
let mut modulus = vec![0; 16];
modulus.extend((Self::F::zero() - Self::F::one()).to_bytes());
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
Self::read_F(&mut Cursor::new(
&U384::from_be_slice(&{
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
bytes
})
.reduce(&modulus)
.unwrap()
.to_be_bytes()[16 ..],
))
let mut unreduced = U384::from_be_bytes({
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
bytes
})
.reduce(&modulus)
.unwrap()
.to_be_bytes();
let mut array = *GenericArray::from_slice(&unreduced[16 ..]);
let res = $lib::Scalar::from_repr(array).unwrap();
unreduced.zeroize();
array.zeroize();
res
}
}

View File

@@ -5,6 +5,8 @@ use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use ff::{PrimeField, PrimeFieldBits};
use group::{Group, GroupOps, GroupEncoding, prime::PrimeGroup};
@@ -39,12 +41,12 @@ pub enum CurveError {
// elliptic-curve exists, yet it doesn't really serve the same role, nor does it use &[u8]/Vec<u8>
// It uses GenericArray which will hopefully be deprecated as Rust evolves and doesn't offer enough
// advantages in the modern day to be worth the hassle -- Kayaba
pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
/// Scalar field element type
// This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses
type F: PrimeField + PrimeFieldBits;
type F: PrimeField + PrimeFieldBits + Zeroize;
/// Group element type
type G: Group<Scalar = Self::F> + GroupOps + PrimeGroup;
type G: Group<Scalar = Self::F> + GroupOps + PrimeGroup + Zeroize;
/// ID for this curve
const ID: &'static [u8];
@@ -53,9 +55,6 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
// While group does provide this in its API, privacy coins may want to use a custom basepoint
const GENERATOR: Self::G;
/// Securely generate a random nonce. H4 from the IETF draft
fn random_nonce<R: RngCore + CryptoRng>(secret: Self::F, rng: &mut R) -> Self::F;
/// Hash the message for the binding factor. H3 from the IETF draft
// This doesn't actually need to be part of Curve as it does nothing with the curve
// This also solely relates to FROST and with a proper Algorithm/HRAM, all projects using
@@ -69,8 +68,23 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
/// Hash the commitments and message to calculate the binding factor. H1 from the IETF draft
fn hash_binding_factor(binding: &[u8]) -> Self::F;
// The following methods would optimally be F:: and G:: yet developers can't control F/G
// They can control a trait they pass into this library
/// Securely generate a random nonce. H4 from the IETF draft
fn random_nonce<R: RngCore + CryptoRng>(mut secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
let mut repr = secret.to_repr();
secret.zeroize();
seed.extend(repr.as_ref());
for i in repr.as_mut() {
*i = 0;
}
let res = Self::hash_to_F(b"nonce", &seed);
seed.zeroize();
res
}
/// Field element from hash. Used during key gen and by other crates under Serai as a general
/// utility
@@ -93,8 +107,14 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
fn read_F<R: Read>(r: &mut R) -> Result<Self::F, CurveError> {
let mut encoding = <Self::F as PrimeField>::Repr::default();
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidScalar)?;
// ff mandates this is canonical
Option::<Self::F>::from(Self::F::from_repr(encoding)).ok_or(CurveError::InvalidScalar)
let res =
Option::<Self::F>::from(Self::F::from_repr(encoding)).ok_or(CurveError::InvalidScalar);
for b in encoding.as_mut() {
*b = 0;
}
res
}
#[allow(non_snake_case)]

View File

@@ -6,6 +6,8 @@ use std::{
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use group::{
ff::{Field, PrimeField},
GroupEncoding,
@@ -15,7 +17,7 @@ use multiexp::{multiexp_vartime, BatchVerifier};
use crate::{
curve::Curve,
FrostError, FrostParams, FrostKeys,
FrostError, FrostParams, FrostCore,
schnorr::{self, SchnorrSignature},
validate_map,
};
@@ -54,7 +56,7 @@ fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
}
// Step 2: Provide a proof of knowledge
let r = C::F::random(rng);
let mut r = C::F::random(rng);
serialized.extend(
schnorr::sign::<C>(
coefficients[0],
@@ -67,6 +69,7 @@ fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
)
.serialize(),
);
r.zeroize();
// Step 4: Broadcast
(coefficients, commitments, serialized)
@@ -148,7 +151,7 @@ fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: &FrostParams,
context: &str,
coefficients: Vec<C::F>,
coefficients: &mut Vec<C::F>,
our_commitments: Vec<C::G>,
commitments: HashMap<u16, Re>,
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> {
@@ -163,19 +166,13 @@ fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
continue;
}
res.insert(l, polynomial(&coefficients, l).to_repr().as_ref().to_vec());
res.insert(l, polynomial(coefficients, l).to_repr().as_ref().to_vec());
}
// Calculate our own share
let share = polynomial(&coefficients, params.i());
let share = polynomial(coefficients, params.i());
// The secret shares are discarded here, not cleared. While any system which leaves its memory
// accessible is likely totally lost already, making the distinction meaningless when the key gen
// system acts as the signer system and therefore actively holds the signing key anyways, it
// should be overwritten with /dev/urandom in the name of security (which still doesn't meet
// requirements for secure data deletion yet those requirements expect hardware access which is
// far past what this library can reasonably counter)
// TODO: Zero out the coefficients
coefficients.zeroize();
Ok((share, commitments, res))
}
@@ -189,17 +186,17 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: FrostParams,
mut secret_share: C::F,
commitments: HashMap<u16, Vec<C::G>>,
commitments: &mut HashMap<u16, Vec<C::G>>,
mut serialized: HashMap<u16, Re>,
) -> Result<FrostKeys<C>, FrostError> {
) -> Result<FrostCore<C>, FrostError> {
validate_map(&mut serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
// Step 2. Verify each share
let mut shares = HashMap::new();
// TODO: Clear serialized
for (l, share) in serialized.iter_mut() {
shares.insert(*l, C::read_F(share).map_err(|_| FrostError::InvalidShare(*l))?);
}
shares.insert(params.i(), secret_share);
// Calculate the exponent for a given participant and apply it to a series of commitments
// Initially used with the actual commitments to verify the secret share, later used with stripes
@@ -215,12 +212,12 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
};
let mut batch = BatchVerifier::new(shares.len());
for (l, share) in &shares {
for (l, share) in shares.iter_mut() {
if *l == params.i() {
continue;
}
secret_share += share;
secret_share += *share;
// This can be insecurely linearized from n * t to just n using the below sums for a given
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
@@ -228,6 +225,8 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
// per sender and not as an aggregate of all senders, which also enables blame
let mut values = exponential(params.i, &commitments[l]);
values.push((-*share, C::GENERATOR));
share.zeroize();
batch.queue(rng, *l, values);
}
batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?;
@@ -249,9 +248,7 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
// Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t)
debug_assert_eq!(C::GENERATOR * secret_share, verification_shares[&params.i()]);
// TODO: Clear serialized and shares
Ok(FrostKeys { params, secret_share, group_key: stripes[0], verification_shares, offset: None })
Ok(FrostCore { params, secret_share, group_key: stripes[0], verification_shares })
}
pub struct KeyGenMachine<C: Curve> {
@@ -260,19 +257,37 @@ pub struct KeyGenMachine<C: Curve> {
_curve: PhantomData<C>,
}
#[derive(Zeroize)]
pub struct SecretShareMachine<C: Curve> {
#[zeroize(skip)]
params: FrostParams,
context: String,
coefficients: Vec<C::F>,
#[zeroize(skip)]
our_commitments: Vec<C::G>,
}
impl<C: Curve> Drop for SecretShareMachine<C> {
fn drop(&mut self) {
self.zeroize()
}
}
#[derive(Zeroize)]
pub struct KeyMachine<C: Curve> {
#[zeroize(skip)]
params: FrostParams,
secret: C::F,
#[zeroize(skip)]
commitments: HashMap<u16, Vec<C::G>>,
}
impl<C: Curve> Drop for KeyMachine<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> KeyGenMachine<C> {
/// Creates a new machine to generate a key for the specified curve in the specified multisig
// The context string must be unique among multisigs
@@ -309,7 +324,7 @@ impl<C: Curve> SecretShareMachine<C> {
/// is also expected at index i which is locally handled. Returns a byte vector representing a
/// secret share for each other participant which should be encrypted before sending
pub fn generate_secret_shares<Re: Read, R: RngCore + CryptoRng>(
self,
mut self,
rng: &mut R,
commitments: HashMap<u16, Re>,
) -> Result<(KeyMachine<C>, HashMap<u16, Vec<u8>>), FrostError> {
@@ -317,8 +332,8 @@ impl<C: Curve> SecretShareMachine<C> {
rng,
&self.params,
&self.context,
self.coefficients,
self.our_commitments,
&mut self.coefficients,
self.our_commitments.clone(),
commitments,
)?;
Ok((KeyMachine { params: self.params, secret, commitments }, shares))
@@ -333,10 +348,10 @@ impl<C: Curve> KeyMachine<C> {
/// must report completion without issue before this key can be considered usable, yet you should
/// wait for all participants to report as such
pub fn complete<Re: Read, R: RngCore + CryptoRng>(
self,
mut self,
rng: &mut R,
shares: HashMap<u16, Re>,
) -> Result<FrostKeys<C>, FrostError> {
complete_r2(rng, self.params, self.secret, self.commitments, shares)
) -> Result<FrostCore<C>, FrostError> {
complete_r2(rng, self.params, self.secret, &mut self.commitments, shares)
}
}

View File

@@ -1,8 +1,10 @@
use core::fmt::Debug;
use std::{io::Read, collections::HashMap};
use core::fmt::{self, Debug};
use std::{io::Read, sync::Arc, collections::HashMap};
use thiserror::Error;
use zeroize::Zeroize;
use group::{
ff::{Field, PrimeField},
GroupEncoding,
@@ -18,6 +20,32 @@ pub mod sign;
pub mod tests;
// Validate a map of serialized values to have the expected included participants
pub(crate) fn validate_map<T>(
map: &mut HashMap<u16, T>,
included: &[u16],
ours: u16,
) -> Result<(), FrostError> {
if (map.len() + 1) != included.len() {
Err(FrostError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;
}
for included in included {
if *included == ours {
if map.contains_key(included) {
Err(FrostError::DuplicatedIndex(*included))?;
}
continue;
}
if !map.contains_key(included) {
Err(FrostError::MissingParticipant(*included))?;
}
}
Ok(())
}
/// Parameters for a multisig
// These fields can not be made public as they should be static
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
@@ -89,33 +117,6 @@ pub enum FrostError {
InternalError(&'static str),
}
// View of keys passable to algorithm implementations
#[derive(Clone)]
pub struct FrostView<C: Curve> {
group_key: C::G,
included: Vec<u16>,
secret_share: C::F,
verification_shares: HashMap<u16, C::G>,
}
impl<C: Curve> FrostView<C> {
pub fn group_key(&self) -> C::G {
self.group_key
}
pub fn included(&self) -> Vec<u16> {
self.included.clone()
}
pub fn secret_share(&self) -> C::F {
self.secret_share
}
pub fn verification_share(&self, l: u16) -> C::G {
self.verification_shares[&l]
}
}
/// Calculate the lagrange coefficient for a signing set
pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
let mut num = F::one();
@@ -135,9 +136,11 @@ pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
num * denom.invert().unwrap()
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FrostKeys<C: Curve> {
/// Core keys generated by performing a FROST keygen protocol
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct FrostCore<C: Curve> {
/// FROST Parameters
#[zeroize(skip)]
params: FrostParams,
/// Secret share key
@@ -145,32 +148,32 @@ pub struct FrostKeys<C: Curve> {
/// Group key
group_key: C::G,
/// Verification shares
#[zeroize(skip)]
verification_shares: HashMap<u16, C::G>,
/// Offset applied to these keys
offset: Option<C::F>,
}
impl<C: Curve> FrostKeys<C> {
/// Offset the keys by a given scalar to allow for account and privacy schemes
/// This offset is ephemeral and will not be included when these keys are serialized
/// Keys offset multiple times will form a new offset of their sum
/// Not IETF compliant
pub fn offset(&self, offset: C::F) -> FrostKeys<C> {
let mut res = self.clone();
// Carry any existing offset
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
// one-time-key offset
res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero));
res.group_key += C::GENERATOR * offset;
res
impl<C: Curve> Drop for FrostCore<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> Debug for FrostCore<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FrostCore")
.field("params", &self.params)
.field("group_key", &self.group_key)
.field("verification_shares", &self.verification_shares)
.finish()
}
}
impl<C: Curve> FrostCore<C> {
pub fn params(&self) -> FrostParams {
self.params
}
fn secret_share(&self) -> C::F {
pub(crate) fn secret_share(&self) -> C::F {
self.secret_share
}
@@ -178,39 +181,16 @@ impl<C: Curve> FrostKeys<C> {
self.group_key
}
fn verification_shares(&self) -> HashMap<u16, C::G> {
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
self.verification_shares.clone()
}
pub fn view(&self, included: &[u16]) -> Result<FrostView<C>, FrostError> {
if (included.len() < self.params.t.into()) || (usize::from(self.params.n) < included.len()) {
Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?;
}
let secret_share = self.secret_share * lagrange::<C::F>(self.params.i, included);
let offset = self.offset.unwrap_or_else(C::F::zero);
let offset_share = offset * C::F::from(included.len().try_into().unwrap()).invert().unwrap();
Ok(FrostView {
group_key: self.group_key,
secret_share: secret_share + offset_share,
verification_shares: self
.verification_shares
.iter()
.map(|(l, share)| {
(*l, (*share * lagrange::<C::F>(*l, included)) + (C::GENERATOR * offset_share))
})
.collect(),
included: included.to_vec(),
})
}
pub fn serialized_len(n: u16) -> usize {
8 + C::ID.len() + (3 * 2) + C::F_len() + C::G_len() + (usize::from(n) * C::G_len())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(FrostKeys::<C>::serialized_len(self.params.n));
let mut serialized = Vec::with_capacity(FrostCore::<C>::serialized_len(self.params.n));
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
serialized.extend(C::ID);
serialized.extend(&self.params.t.to_be_bytes());
@@ -224,10 +204,10 @@ impl<C: Curve> FrostKeys<C> {
serialized
}
pub fn deserialize<R: Read>(cursor: &mut R) -> Result<FrostKeys<C>, FrostError> {
pub fn deserialize<R: Read>(cursor: &mut R) -> Result<FrostCore<C>, FrostError> {
{
let missing = FrostError::InternalError("FrostKeys serialization is missing its curve");
let different = FrostError::InternalError("deserializing FrostKeys for another curve");
let missing = FrostError::InternalError("FrostCore serialization is missing its curve");
let different = FrostError::InternalError("deserializing FrostCore for another curve");
let mut id_len = [0; 4];
cursor.read_exact(&mut id_len).map_err(|_| missing)?;
@@ -266,39 +246,133 @@ impl<C: Curve> FrostKeys<C> {
);
}
Ok(FrostKeys {
Ok(FrostCore {
params: FrostParams::new(t, n, i)
.map_err(|_| FrostError::InternalError("invalid parameters"))?,
secret_share,
group_key,
verification_shares,
offset: None,
})
}
}
// Validate a map of serialized values to have the expected included participants
pub(crate) fn validate_map<T>(
map: &mut HashMap<u16, T>,
included: &[u16],
ours: u16,
) -> Result<(), FrostError> {
if (map.len() + 1) != included.len() {
Err(FrostError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;
}
/// FROST keys usable for signing
#[derive(Clone, Debug, Zeroize)]
pub struct FrostKeys<C: Curve> {
/// Core keys
#[zeroize(skip)]
core: Arc<FrostCore<C>>,
for included in included {
if *included == ours {
if map.contains_key(included) {
Err(FrostError::DuplicatedIndex(*included))?;
}
continue;
}
if !map.contains_key(included) {
Err(FrostError::MissingParticipant(*included))?;
}
}
Ok(())
/// Offset applied to these keys
pub(crate) offset: Option<C::F>,
}
// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786
impl<C: Curve> Drop for FrostKeys<C> {
fn drop(&mut self) {
self.zeroize()
}
}
// View of keys passable to algorithm implementations
#[derive(Clone, Zeroize)]
pub struct FrostView<C: Curve> {
group_key: C::G,
#[zeroize(skip)]
included: Vec<u16>,
secret_share: C::F,
#[zeroize(skip)]
verification_shares: HashMap<u16, C::G>,
}
impl<C: Curve> Drop for FrostView<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve> FrostKeys<C> {
pub fn new(core: FrostCore<C>) -> FrostKeys<C> {
FrostKeys { core: Arc::new(core), offset: None }
}
/// Offset the keys by a given scalar to allow for account and privacy schemes
/// This offset is ephemeral and will not be included when these keys are serialized
/// Keys offset multiple times will form a new offset of their sum
/// Not IETF compliant
pub fn offset(&self, offset: C::F) -> FrostKeys<C> {
let mut res = self.clone();
// Carry any existing offset
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
// one-time-key offset
res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero));
res
}
pub fn params(&self) -> FrostParams {
self.core.params
}
pub(crate) fn secret_share(&self) -> C::F {
self.core.secret_share
}
pub fn group_key(&self) -> C::G {
self.core.group_key + (C::GENERATOR * self.offset.unwrap_or_else(C::F::zero))
}
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
self.core.verification_shares.clone()
}
pub fn serialized_len(n: u16) -> usize {
FrostCore::<C>::serialized_len(n)
}
pub fn serialize(&self) -> Vec<u8> {
self.core.serialize()
}
pub fn view(&self, included: &[u16]) -> Result<FrostView<C>, FrostError> {
if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len())
{
Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?;
}
let offset_share = self.offset.unwrap_or_else(C::F::zero) *
C::F::from(included.len().try_into().unwrap()).invert().unwrap();
let offset_verification_share = C::GENERATOR * offset_share;
Ok(FrostView {
group_key: self.group_key(),
secret_share: (self.secret_share() * lagrange::<C::F>(self.params().i, included)) +
offset_share,
verification_shares: self
.verification_shares()
.iter()
.map(|(l, share)| {
(*l, (*share * lagrange::<C::F>(*l, included)) + offset_verification_share)
})
.collect(),
included: included.to_vec(),
})
}
}
impl<C: Curve> FrostView<C> {
pub fn group_key(&self) -> C::G {
self.group_key
}
pub fn included(&self) -> Vec<u16> {
self.included.clone()
}
pub fn secret_share(&self) -> C::F {
self.secret_share
}
pub fn verification_share(&self, l: u16) -> C::G {
self.verification_shares[&l]
}
}

View File

@@ -1,5 +1,7 @@
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use group::{
ff::{Field, PrimeField},
GroupEncoding,
@@ -26,11 +28,14 @@ impl<C: Curve> SchnorrSignature<C> {
}
pub(crate) fn sign<C: Curve>(
private_key: C::F,
nonce: C::F,
mut private_key: C::F,
mut nonce: C::F,
challenge: C::F,
) -> SchnorrSignature<C> {
SchnorrSignature { R: C::GENERATOR * nonce, s: nonce + (private_key * challenge) }
let res = SchnorrSignature { R: C::GENERATOR * nonce, s: nonce + (private_key * challenge) };
private_key.zeroize();
nonce.zeroize();
res
}
#[must_use]

View File

@@ -1,12 +1,13 @@
use core::fmt;
use std::{
io::{Read, Cursor},
sync::Arc,
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use transcript::Transcript;
use group::{
@@ -25,7 +26,7 @@ use crate::{
#[derive(Clone)]
pub struct Params<C: Curve, A: Algorithm<C>> {
algorithm: A,
keys: Arc<FrostKeys<C>>,
keys: FrostKeys<C>,
view: FrostView<C>,
}
@@ -33,23 +34,25 @@ pub struct Params<C: Curve, A: Algorithm<C>> {
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
pub fn new(
algorithm: A,
keys: Arc<FrostKeys<C>>,
keys: FrostKeys<C>,
included: &[u16],
) -> Result<Params<C, A>, FrostError> {
let params = keys.params();
let mut included = included.to_vec();
included.sort_unstable();
// Included < threshold
if included.len() < usize::from(keys.params.t) {
if included.len() < usize::from(params.t) {
Err(FrostError::InvalidSigningSet("not enough signers"))?;
}
// Invalid index
if included[0] == 0 {
Err(FrostError::InvalidParticipantIndex(included[0], keys.params.n))?;
Err(FrostError::InvalidParticipantIndex(included[0], params.n))?;
}
// OOB index
if included[included.len() - 1] > keys.params.n {
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], keys.params.n))?;
if included[included.len() - 1] > params.n {
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n))?;
}
// Same signer included multiple times
for i in 0 .. included.len() - 1 {
@@ -58,7 +61,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
}
}
// Not included
if !included.contains(&keys.params.i) {
if !included.contains(&params.i) {
Err(FrostError::InvalidSigningSet("signing despite not being included"))?;
}
@@ -67,7 +70,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
}
pub fn multisig_params(&self) -> FrostParams {
self.keys.params
self.keys.params()
}
pub fn view(&self) -> FrostView<C> {
@@ -79,12 +82,20 @@ fn nonce_transcript<T: Transcript>() -> T {
T::new(b"FROST_nonce_dleq")
}
#[derive(Zeroize)]
pub(crate) struct PreprocessPackage<C: Curve> {
pub(crate) nonces: Vec<[C::F; 2]>,
#[zeroize(skip)]
pub(crate) commitments: Vec<Vec<[C::G; 2]>>,
pub(crate) addendum: Vec<u8>,
}
impl<C: Curve> Drop for PreprocessPackage<C> {
fn drop(&mut self) {
self.zeroize()
}
}
// This library unifies the preprocessing step with signing due to security concerns and to provide
// a simpler UX
fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
@@ -122,10 +133,11 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
// This could be further optimized with a multi-nonce proof.
// See https://github.com/serai-dex/serai/issues/38
for nonce in nonces {
for mut nonce in nonces {
DLEqProof::prove(&mut *rng, &mut transcript, generators, nonce)
.serialize(&mut serialized)
.unwrap();
nonce.zeroize();
}
}
@@ -190,7 +202,7 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
t.append_message(b"commitment_E", commitments[1].to_bytes().as_ref());
};
if *l == params.keys.params.i {
if *l == params.keys.params().i {
for nonce_commitments in &our_preprocess.commitments {
for commitments in nonce_commitments {
transcript(params.algorithm.transcript(), *commitments);
@@ -282,16 +294,15 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
}
}
let share = params.algorithm.sign_share(
&params.view,
&Rs,
&our_preprocess
.nonces
.iter()
.map(|nonces| nonces[0] + (nonces[1] * B[&params.keys.params.i()].1))
.collect::<Vec<_>>(),
msg,
);
let mut nonces = our_preprocess
.nonces
.iter()
.map(|nonces| nonces[0] + (nonces[1] * B[&params.keys.params().i()].1))
.collect::<Vec<_>>();
let share = params.algorithm.sign_share(&params.view, &Rs, &nonces, msg);
nonces.zeroize();
Ok((Package { B, Rs, share }, share.to_repr().as_ref().to_vec()))
}
@@ -397,7 +408,7 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
/// Creates a new machine to generate a key for the specified curve in the specified multisig
pub fn new(
algorithm: A,
keys: Arc<FrostKeys<C>>,
keys: FrostKeys<C>,
included: &[u16],
) -> Result<AlgorithmMachine<C, A>, FrostError> {
Ok(AlgorithmMachine { params: Params::new(algorithm, keys, included)? })

View File

@@ -4,18 +4,18 @@ use rand_core::{RngCore, CryptoRng};
use group::{ff::Field, Group};
use crate::{Curve, FrostKeys, tests::key_gen};
use crate::{Curve, FrostCore, tests::core_gen};
// Test generation of FROST keys
fn key_generation<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// This alone verifies the verification shares and group key are agreed upon as expected
key_gen::<_, C>(rng);
core_gen::<_, C>(rng);
}
// Test serialization of generated keys
fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
for (_, keys) in key_gen::<_, C>(rng) {
assert_eq!(&FrostKeys::<C>::deserialize(&mut Cursor::new(keys.serialize())).unwrap(), &*keys);
for (_, keys) in core_gen::<_, C>(rng) {
assert_eq!(&FrostCore::<C>::deserialize(&mut Cursor::new(keys.serialize())).unwrap(), &keys);
}
}
@@ -38,7 +38,7 @@ pub fn test_curve<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
}
}
// Test FROST key generation and serialization of FrostKeys works as expected
// Test FROST key generation and serialization of FrostCore works as expected
key_generation::<_, C>(rng);
keys_serialization::<_, C>(rng);
}

View File

@@ -1,11 +1,11 @@
use std::{io::Cursor, sync::Arc, collections::HashMap};
use std::{io::Cursor, collections::HashMap};
use rand_core::{RngCore, CryptoRng};
use group::ff::Field;
use crate::{
Curve, FrostParams, FrostKeys, lagrange,
Curve, FrostParams, FrostCore, FrostKeys, lagrange,
key_gen::KeyGenMachine,
algorithm::Algorithm,
sign::{PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
@@ -32,7 +32,7 @@ pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
res
}
pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, Arc<FrostKeys<C>>> {
pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, FrostCore<C>> {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= PARTICIPANTS {
@@ -82,11 +82,15 @@ pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, Ar
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, Arc::new(these_keys))
(i, these_keys)
})
.collect::<HashMap<_, _>>()
}
pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, FrostKeys<C>> {
core_gen(rng).drain().map(|(i, core)| (i, FrostKeys::new(core))).collect()
}
pub fn recover<C: Curve>(keys: &HashMap<u16, FrostKeys<C>>) -> C::F {
let first = keys.values().next().expect("no keys provided");
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
@@ -102,7 +106,7 @@ pub fn recover<C: Curve>(keys: &HashMap<u16, FrostKeys<C>>) -> C::F {
pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
rng: &mut R,
algorithm: A,
keys: &HashMap<u16, Arc<FrostKeys<C>>>,
keys: &HashMap<u16, FrostKeys<C>>,
) -> HashMap<u16, AlgorithmMachine<C, A>> {
let mut included = vec![];
while included.len() < usize::from(keys[&1].params().t()) {

View File

@@ -1,4 +1,4 @@
use std::{marker::PhantomData, sync::Arc, collections::HashMap};
use std::{marker::PhantomData, collections::HashMap};
use rand_core::{RngCore, CryptoRng};
@@ -78,7 +78,7 @@ pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
fn sign_core<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
group_key: C::G,
keys: &HashMap<u16, Arc<FrostKeys<C>>>,
keys: &HashMap<u16, FrostKeys<C>>,
) {
const MESSAGE: &[u8] = b"Hello, World!";
@@ -109,7 +109,7 @@ fn sign_with_offset<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let offset = C::hash_to_F(b"FROST Test sign_with_offset", b"offset");
for i in 1 ..= u16::try_from(keys.len()).unwrap() {
keys.insert(i, Arc::new(keys[&i].offset(offset)));
keys.insert(i, keys[&i].offset(offset));
}
let offset_key = group_key + (C::GENERATOR * offset);

View File

@@ -1,4 +1,4 @@
use std::{io::Cursor, sync::Arc, collections::HashMap};
use std::{io::Cursor, collections::HashMap};
use rand_core::{RngCore, CryptoRng};
@@ -6,7 +6,7 @@ use group::{ff::PrimeField, GroupEncoding};
use crate::{
curve::Curve,
FrostKeys,
FrostCore, FrostKeys,
algorithm::{Schnorr, Hram},
sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine},
tests::{clone_without, curve::test_curve, schnorr::test_schnorr, recover},
@@ -48,13 +48,13 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
serialized.extend(share.to_bytes().as_ref());
}
let these_keys = FrostKeys::<C>::deserialize(&mut Cursor::new(serialized)).unwrap();
let these_keys = FrostCore::<C>::deserialize(&mut Cursor::new(serialized)).unwrap();
assert_eq!(these_keys.params().t(), vectors.threshold);
assert_eq!(usize::from(these_keys.params().n()), shares.len());
assert_eq!(these_keys.params().i(), i);
assert_eq!(these_keys.secret_share(), shares[usize::from(i - 1)]);
assert_eq!(&hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key);
keys.insert(i, these_keys);
keys.insert(i, FrostKeys::new(these_keys));
}
keys
@@ -86,7 +86,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
*i,
AlgorithmMachine::new(
Schnorr::<C, H>::new(),
Arc::new(keys[i].clone()),
keys[i].clone(),
&vectors.included.to_vec().clone(),
)
.unwrap(),