mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-10 13:09:24 +00:00
Finish updating crypto to new clippy
This commit is contained in:
@@ -97,8 +97,8 @@ mod sealed {
|
||||
impl Transcript for IetfTranscript {
|
||||
type Challenge = Vec<u8>;
|
||||
|
||||
fn new(_: &'static [u8]) -> IetfTranscript {
|
||||
IetfTranscript(vec![])
|
||||
fn new(_: &'static [u8]) -> Self {
|
||||
Self(vec![])
|
||||
}
|
||||
|
||||
fn domain_separate(&mut self, _: &[u8]) {}
|
||||
@@ -147,8 +147,9 @@ pub type IetfSchnorr<C, H> = Schnorr<C, IetfTranscript, H>;
|
||||
|
||||
impl<C: Curve, T: Sync + Clone + Debug + Transcript, H: Hram<C>> Schnorr<C, T, H> {
|
||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
||||
pub fn new(transcript: T) -> Schnorr<C, T, H> {
|
||||
Schnorr { transcript, c: None, _hram: PhantomData }
|
||||
#[must_use]
|
||||
pub const fn new(transcript: T) -> Self {
|
||||
Self { transcript, c: None, _hram: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,8 +157,9 @@ impl<C: Curve, H: Hram<C>> IetfSchnorr<C, H> {
|
||||
/// Construct a IETF-compatible Schnorr algorithm.
|
||||
///
|
||||
/// Please see the `IetfSchnorr` documentation for the full details of this.
|
||||
pub fn ietf() -> IetfSchnorr<C, H> {
|
||||
Schnorr::new(IetfTranscript(vec![]))
|
||||
#[must_use]
|
||||
pub const fn ietf() -> Self {
|
||||
Self::new(IetfTranscript(vec![]))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ pub trait Curve: Ciphersuite {
|
||||
const CONTEXT: &'static [u8];
|
||||
|
||||
/// Hash the given dst and data to a byte vector. Used to instantiate H4 and H5.
|
||||
#[must_use]
|
||||
fn hash(dst: &[u8], data: &[u8]) -> Output<Self::H> {
|
||||
Self::H::digest([Self::CONTEXT, dst, data].concat())
|
||||
}
|
||||
@@ -53,26 +54,31 @@ pub trait Curve: Ciphersuite {
|
||||
/// Field element from hash. Used during key gen and by other crates under Serai as a general
|
||||
/// utility. Used to instantiate H1 and H3.
|
||||
#[allow(non_snake_case)]
|
||||
#[must_use]
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
<Self as Ciphersuite>::hash_to_F(&[Self::CONTEXT, dst].concat(), msg)
|
||||
}
|
||||
|
||||
/// Hash the message for the binding factor. H4 from the IETF draft.
|
||||
#[must_use]
|
||||
fn hash_msg(msg: &[u8]) -> Output<Self::H> {
|
||||
Self::hash(b"msg", msg)
|
||||
}
|
||||
|
||||
/// Hash the commitments for the binding factor. H5 from the IETF draft.
|
||||
#[must_use]
|
||||
fn hash_commitments(commitments: &[u8]) -> Output<Self::H> {
|
||||
Self::hash(b"com", commitments)
|
||||
}
|
||||
|
||||
/// Hash the commitments and message to calculate the binding factor. H1 from the IETF draft.
|
||||
#[must_use]
|
||||
fn hash_binding_factor(binding: &[u8]) -> Self::F {
|
||||
<Self as Curve>::hash_to_F(b"rho", binding)
|
||||
}
|
||||
|
||||
/// Securely generate a random nonce. H3 from the IETF draft.
|
||||
#[must_use]
|
||||
fn random_nonce<R: RngCore + CryptoRng>(
|
||||
secret: &Zeroizing<Self::F>,
|
||||
rng: &mut R,
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
use core::fmt::Debug;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
/// Distributed key generation protocol.
|
||||
pub use dkg::{self, Participant, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView};
|
||||
|
||||
@@ -23,25 +20,32 @@ pub mod sign;
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub mod tests;
|
||||
|
||||
/// Various errors possible during signing.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||
pub enum FrostError {
|
||||
#[error("invalid participant (0 < participant <= {0}, yet participant is {1})")]
|
||||
InvalidParticipant(u16, Participant),
|
||||
#[error("invalid signing set ({0})")]
|
||||
InvalidSigningSet(&'static str),
|
||||
#[error("invalid participant quantity (expected {0}, got {1})")]
|
||||
InvalidParticipantQuantity(usize, usize),
|
||||
#[error("duplicated participant ({0})")]
|
||||
DuplicatedParticipant(Participant),
|
||||
#[error("missing participant {0}")]
|
||||
MissingParticipant(Participant),
|
||||
#[allow(clippy::std_instead_of_core)]
|
||||
mod frost_error {
|
||||
use core::fmt::Debug;
|
||||
use thiserror::Error;
|
||||
use dkg::Participant;
|
||||
/// Various errors possible during signing.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||
pub enum FrostError {
|
||||
#[error("invalid participant (0 < participant <= {0}, yet participant is {1})")]
|
||||
InvalidParticipant(u16, Participant),
|
||||
#[error("invalid signing set ({0})")]
|
||||
InvalidSigningSet(&'static str),
|
||||
#[error("invalid participant quantity (expected {0}, got {1})")]
|
||||
InvalidParticipantQuantity(usize, usize),
|
||||
#[error("duplicated participant ({0})")]
|
||||
DuplicatedParticipant(Participant),
|
||||
#[error("missing participant {0}")]
|
||||
MissingParticipant(Participant),
|
||||
|
||||
#[error("invalid preprocess (participant {0})")]
|
||||
InvalidPreprocess(Participant),
|
||||
#[error("invalid share (participant {0})")]
|
||||
InvalidShare(Participant),
|
||||
#[error("invalid preprocess (participant {0})")]
|
||||
InvalidPreprocess(Participant),
|
||||
#[error("invalid share (participant {0})")]
|
||||
InvalidShare(Participant),
|
||||
}
|
||||
}
|
||||
pub use frost_error::FrostError;
|
||||
|
||||
/// Validate a map of values to have the expected participants.
|
||||
pub fn validate_map<T>(
|
||||
|
||||
@@ -59,8 +59,8 @@ pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
|
||||
impl<C: Curve> GeneratorCommitments<C> {
|
||||
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
|
||||
Ok(GeneratorCommitments([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
|
||||
fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(Self([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
|
||||
}
|
||||
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
@@ -82,7 +82,7 @@ impl<C: Curve> NonceCommitments<C> {
|
||||
rng: &mut R,
|
||||
secret_share: &Zeroizing<C::F>,
|
||||
generators: &[C::G],
|
||||
) -> (Nonce<C>, NonceCommitments<C>) {
|
||||
) -> (Nonce<C>, Self) {
|
||||
let nonce = Nonce::<C>([
|
||||
C::random_nonce(secret_share, &mut *rng),
|
||||
C::random_nonce(secret_share, &mut *rng),
|
||||
@@ -96,11 +96,11 @@ impl<C: Curve> NonceCommitments<C> {
|
||||
]));
|
||||
}
|
||||
|
||||
(nonce, NonceCommitments { generators: commitments })
|
||||
(nonce, Self { generators: commitments })
|
||||
}
|
||||
|
||||
fn read<R: Read>(reader: &mut R, generators: &[C::G]) -> io::Result<NonceCommitments<C>> {
|
||||
Ok(NonceCommitments {
|
||||
fn read<R: Read>(reader: &mut R, generators: &[C::G]) -> io::Result<Self> {
|
||||
Ok(Self {
|
||||
generators: (0 .. generators.len())
|
||||
.map(|_| GeneratorCommitments::read(reader))
|
||||
.collect::<Result<_, _>>()?,
|
||||
@@ -146,7 +146,7 @@ impl<C: Curve> Commitments<C> {
|
||||
secret_share: &Zeroizing<C::F>,
|
||||
planned_nonces: &[Vec<C::G>],
|
||||
context: &[u8],
|
||||
) -> (Vec<Nonce<C>>, Commitments<C>) {
|
||||
) -> (Vec<Nonce<C>>, Self) {
|
||||
let mut nonces = vec![];
|
||||
let mut commitments = vec![];
|
||||
|
||||
@@ -168,18 +168,18 @@ impl<C: Curve> Commitments<C> {
|
||||
commitments.push(these_commitments);
|
||||
}
|
||||
|
||||
let dleq = if !dleq_generators.is_empty() {
|
||||
let dleq = if dleq_generators.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(MultiDLEqProof::prove(
|
||||
rng,
|
||||
&mut dleq_transcript::<T>(context),
|
||||
&dleq_generators,
|
||||
&dleq_nonces,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
(nonces, Commitments { nonces: commitments, dleq })
|
||||
(nonces, Self { nonces: commitments, dleq })
|
||||
}
|
||||
|
||||
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
|
||||
@@ -219,17 +219,17 @@ impl<C: Curve> Commitments<C> {
|
||||
}
|
||||
}
|
||||
|
||||
let dleq = if !dleq_generators.is_empty() {
|
||||
let dleq = if dleq_generators.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let dleq = MultiDLEqProof::read(reader, dleq_generators.len())?;
|
||||
dleq
|
||||
.verify(&mut dleq_transcript::<T>(context), &dleq_generators, &dleq_nonces)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
|
||||
Some(dleq)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Commitments { nonces, dleq })
|
||||
Ok(Self { nonces, dleq })
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
@@ -256,7 +256,7 @@ impl<C: Curve> BindingFactor<C> {
|
||||
}
|
||||
|
||||
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &mut T) {
|
||||
for (l, binding) in self.0.iter_mut() {
|
||||
for (l, binding) in &mut self.0 {
|
||||
let mut transcript = transcript.clone();
|
||||
transcript.append_message(b"participant", C::F::from(u64::from(u16::from(*l))).to_repr());
|
||||
// It *should* be perfectly fine to reuse a binding factor for multiple nonces
|
||||
|
||||
@@ -53,8 +53,8 @@ struct Params<C: Curve, A: Algorithm<C>> {
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
||||
fn new(algorithm: A, keys: ThresholdKeys<C>) -> Params<C, A> {
|
||||
Params { algorithm, keys }
|
||||
const fn new(algorithm: A, keys: ThresholdKeys<C>) -> Self {
|
||||
Self { algorithm, keys }
|
||||
}
|
||||
|
||||
fn multisig_params(&self) -> ThresholdParams {
|
||||
@@ -111,8 +111,8 @@ pub struct AlgorithmMachine<C: Curve, A: Algorithm<C>> {
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
||||
/// Creates a new machine to generate a signature with the specified keys.
|
||||
pub fn new(algorithm: A, keys: ThresholdKeys<C>) -> AlgorithmMachine<C, A> {
|
||||
AlgorithmMachine { params: Params::new(algorithm, keys) }
|
||||
pub const fn new(algorithm: A, keys: ThresholdKeys<C>) -> Self {
|
||||
Self { params: Params::new(algorithm, keys) }
|
||||
}
|
||||
|
||||
fn seeded_preprocess(
|
||||
|
||||
@@ -27,7 +27,7 @@ pub const PARTICIPANTS: u16 = 5;
|
||||
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
|
||||
|
||||
/// Clone a map without a specific value.
|
||||
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
|
||||
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
|
||||
map: &HashMap<K, V>,
|
||||
without: &K,
|
||||
) -> HashMap<K, V> {
|
||||
@@ -57,11 +57,7 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
|
||||
keys
|
||||
.iter()
|
||||
.filter_map(|(i, keys)| {
|
||||
if included.contains(i) {
|
||||
Some((*i, AlgorithmMachine::new(algorithm.clone(), keys.clone())))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
included.contains(i).then(|| (*i, AlgorithmMachine::new(algorithm.clone(), keys.clone())))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -177,8 +173,8 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
machines,
|
||||
|rng, machines| {
|
||||
// Cache and rebuild half of the machines
|
||||
let mut included = machines.keys().cloned().collect::<Vec<_>>();
|
||||
for i in included.drain(..) {
|
||||
let included = machines.keys().copied().collect::<Vec<_>>();
|
||||
for i in included {
|
||||
if (rng.next_u64() % 2) == 0 {
|
||||
let cache = machines.remove(&i).unwrap().cache();
|
||||
machines.insert(
|
||||
@@ -208,13 +204,13 @@ pub fn test_schnorr_with_keys<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
/// Test a basic Schnorr signature.
|
||||
pub fn test_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &mut R) {
|
||||
let keys = key_gen(&mut *rng);
|
||||
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys)
|
||||
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys);
|
||||
}
|
||||
|
||||
/// Test a basic Schnorr signature, yet with MuSig.
|
||||
pub fn test_musig_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &mut R) {
|
||||
let keys = musig_key_gen(&mut *rng);
|
||||
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys)
|
||||
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys);
|
||||
}
|
||||
|
||||
/// Test an offset Schnorr signature.
|
||||
@@ -226,7 +222,7 @@ pub fn test_offset_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &m
|
||||
|
||||
let offset = C::F::from(5);
|
||||
let offset_key = group_key + (C::generator() * offset);
|
||||
for (_, keys) in keys.iter_mut() {
|
||||
for keys in keys.values_mut() {
|
||||
*keys = keys.offset(offset);
|
||||
assert_eq!(keys.group_key(), offset_key);
|
||||
}
|
||||
|
||||
@@ -26,8 +26,8 @@ struct MultiNonce<C: Curve> {
|
||||
}
|
||||
|
||||
impl<C: Curve> MultiNonce<C> {
|
||||
fn new() -> MultiNonce<C> {
|
||||
MultiNonce {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
transcript: RecommendedTranscript::new(b"FROST MultiNonce Algorithm Test"),
|
||||
nonces: None,
|
||||
}
|
||||
@@ -173,16 +173,10 @@ pub fn test_invalid_commitment<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
let mut preprocess = preprocesses.remove(&faulty).unwrap();
|
||||
|
||||
// Mutate one of the commitments
|
||||
let nonce =
|
||||
preprocess.commitments.nonces.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2).unwrap();
|
||||
let nonce = &mut preprocess.commitments.nonces[usize::try_from(rng.next_u64()).unwrap() % 2];
|
||||
let generators_len = nonce.generators.len();
|
||||
*nonce
|
||||
.generators
|
||||
.get_mut(usize::try_from(rng.next_u64()).unwrap() % generators_len)
|
||||
.unwrap()
|
||||
.0
|
||||
.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2)
|
||||
.unwrap() = C::G::random(&mut *rng);
|
||||
nonce.generators[usize::try_from(rng.next_u64()).unwrap() % generators_len].0
|
||||
[usize::try_from(rng.next_u64()).unwrap() % 2] = C::G::random(&mut *rng);
|
||||
|
||||
// The commitments are validated at time of deserialization (read_preprocess)
|
||||
// Accordingly, serialize it and read it again to make sure that errors
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use core::ops::Deref;
|
||||
|
||||
use std::collections::HashMap;
|
||||
#[cfg(test)]
|
||||
use std::str::FromStr;
|
||||
use core::str::FromStr;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
@@ -45,11 +45,12 @@ pub struct Vectors {
|
||||
// Vectors are expected to be formatted per the IETF proof of concept
|
||||
// The included vectors are direcly from
|
||||
// https://github.com/cfrg/draft-irtf-cfrg-frost/tree/draft-irtf-cfrg-frost-11/poc
|
||||
#[allow(clippy::fallible_impl_from)]
|
||||
#[cfg(test)]
|
||||
impl From<serde_json::Value> for Vectors {
|
||||
fn from(value: serde_json::Value) -> Vectors {
|
||||
fn from(value: serde_json::Value) -> Self {
|
||||
let to_str = |value: &serde_json::Value| value.as_str().unwrap().to_string();
|
||||
Vectors {
|
||||
Self {
|
||||
threshold: u16::from_str(value["config"]["NUM_PARTICIPANTS"].as_str().unwrap()).unwrap(),
|
||||
|
||||
group_secret: to_str(&value["inputs"]["group_secret_key"]),
|
||||
@@ -166,8 +167,9 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
}
|
||||
|
||||
let mut commitments = HashMap::new();
|
||||
let mut machines = machines
|
||||
.drain(..)
|
||||
#[allow(clippy::needless_collect)] // Fails to compile without it due to borrow checking
|
||||
let machines = machines
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(c, (i, machine))| {
|
||||
let nonce = |i| {
|
||||
@@ -224,8 +226,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut shares = HashMap::new();
|
||||
let mut machines = machines
|
||||
.drain(..)
|
||||
let machines = machines
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(c, (i, machine))| {
|
||||
let (machine, share) = machine
|
||||
@@ -244,7 +246,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
for (i, machine) in machines.drain() {
|
||||
for (i, machine) in machines {
|
||||
let sig = machine.complete(clone_without(&shares, i)).unwrap();
|
||||
let mut serialized = sig.R.to_bytes().as_ref().to_vec();
|
||||
serialized.extend(sig.s.to_repr().as_ref());
|
||||
@@ -265,7 +267,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
unimplemented!()
|
||||
}
|
||||
fn fill_bytes(&mut self, dest: &mut [u8]) {
|
||||
dest.copy_from_slice(&self.0.remove(0))
|
||||
dest.copy_from_slice(&self.0.remove(0));
|
||||
}
|
||||
fn try_fill_bytes(&mut self, _: &mut [u8]) -> Result<(), rand_core::Error> {
|
||||
unimplemented!()
|
||||
@@ -347,7 +349,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
machines.push((i, AlgorithmMachine::new(IetfSchnorr::<C, H>::ietf(), keys[i].clone())));
|
||||
}
|
||||
|
||||
for (i, machine) in machines.drain(..) {
|
||||
for (i, machine) in machines {
|
||||
let (_, preprocess) = machine.preprocess(&mut frosts.clone());
|
||||
|
||||
// Calculate the expected nonces
|
||||
|
||||
Reference in New Issue
Block a user