mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Correct clippy warnings
Currently intended to be done with: cargo clippy --features "recommended merlin batch serialize experimental ed25519 ristretto p256 secp256k1 multisig" -- -A clippy::type_complexity -A dead_code
This commit is contained in:
@@ -231,7 +231,7 @@ impl PrimeField for Scalar {
|
||||
fn from_repr(bytes: [u8; 32]) -> CtOption<Self> {
|
||||
let scalar = DScalar::from_canonical_bytes(bytes);
|
||||
// TODO: This unwrap_or isn't constant time, yet do we have an alternative?
|
||||
CtOption::new(Scalar(scalar.unwrap_or(DScalar::zero())), choice(scalar.is_some()))
|
||||
CtOption::new(Scalar(scalar.unwrap_or_else(DScalar::zero)), choice(scalar.is_some()))
|
||||
}
|
||||
fn to_repr(&self) -> [u8; 32] {
|
||||
self.0.to_bytes()
|
||||
|
||||
@@ -163,6 +163,7 @@ where
|
||||
match self.Re_0 {
|
||||
Re::R(R0_0, R1_0) => {
|
||||
let mut e = Self::nonces(transcript.clone(), (R0_0, R1_0));
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. (RING_LEN - 1) {
|
||||
e = Self::R_nonces(transcript.clone(), generators, self.s[i], ring[i], e);
|
||||
}
|
||||
@@ -178,6 +179,7 @@ where
|
||||
Re::e(e_0) => {
|
||||
let e_0 = (e_0, scalar_convert(e_0).ok_or(DLEqError::InvalidChallenge)?);
|
||||
let mut e = None;
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. RING_LEN {
|
||||
e = Some(Self::R_nonces(
|
||||
transcript.clone(),
|
||||
@@ -230,8 +232,8 @@ where
|
||||
}
|
||||
|
||||
let mut s = [(G0::Scalar::zero(), G1::Scalar::zero()); RING_LEN];
|
||||
for i in 0 .. RING_LEN {
|
||||
s[i] = (read_scalar(r)?, read_scalar(r)?);
|
||||
for s in s.iter_mut() {
|
||||
*s = (read_scalar(r)?, read_scalar(r)?);
|
||||
}
|
||||
|
||||
Ok(Aos { Re_0, s })
|
||||
|
||||
@@ -15,6 +15,7 @@ use std::io::{Read, Write};
|
||||
#[cfg(feature = "serialize")]
|
||||
use crate::cross_group::read_point;
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
pub(crate) enum BitSignature {
|
||||
ClassicLinear,
|
||||
ConciseLinear,
|
||||
|
||||
@@ -97,6 +97,12 @@ pub struct Schnorr<C: Curve, H: Hram<C>> {
|
||||
_hram: PhantomData<H>,
|
||||
}
|
||||
|
||||
impl<C: Curve, H: Hram<C>> Default for Schnorr<C, H> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve, H: Hram<C>> Schnorr<C, H> {
|
||||
pub fn new() -> Schnorr<C, H> {
|
||||
Schnorr { transcript: IetfTranscript(vec![]), c: None, _hram: PhantomData }
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::{
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn challenge<C: Curve>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
||||
const DST: &'static [u8] = b"FROST Schnorr Proof of Knowledge";
|
||||
const DST: &[u8] = b"FROST Schnorr Proof of Knowledge";
|
||||
|
||||
// Uses hash_msg to get a fixed size value out of the context string
|
||||
let mut transcript = C::hash_msg(context.as_bytes());
|
||||
@@ -91,7 +91,7 @@ fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
continue;
|
||||
}
|
||||
|
||||
let invalid = FrostError::InvalidCommitment(l.try_into().unwrap());
|
||||
let invalid = FrostError::InvalidCommitment(l);
|
||||
|
||||
// Read the entire list of commitments as the key we're providing a PoK for (A) and the message
|
||||
#[allow(non_snake_case)]
|
||||
@@ -124,7 +124,7 @@ fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
commitments.insert(l, these_commitments);
|
||||
}
|
||||
|
||||
schnorr::batch_verify(rng, &signatures).map_err(|l| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?;
|
||||
|
||||
Ok(commitments)
|
||||
}
|
||||
@@ -230,7 +230,7 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
|
||||
values.push((-*share, C::GENERATOR));
|
||||
batch.queue(rng, *l, values);
|
||||
}
|
||||
batch.verify_with_vartime_blame().map_err(|l| FrostError::InvalidCommitment(l))?;
|
||||
batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?;
|
||||
|
||||
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
|
||||
// these sums so preprocessing them is a massive speedup
|
||||
|
||||
@@ -161,7 +161,7 @@ impl<C: Curve> FrostKeys<C> {
|
||||
// Carry any existing offset
|
||||
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
|
||||
// one-time-key offset
|
||||
res.offset = Some(offset + res.offset.unwrap_or(C::F::zero()));
|
||||
res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero));
|
||||
res.group_key += C::GENERATOR * offset;
|
||||
res
|
||||
}
|
||||
@@ -187,8 +187,8 @@ impl<C: Curve> FrostKeys<C> {
|
||||
Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?;
|
||||
}
|
||||
|
||||
let secret_share = self.secret_share * lagrange::<C::F>(self.params.i, &included);
|
||||
let offset = self.offset.unwrap_or(C::F::zero());
|
||||
let secret_share = self.secret_share * lagrange::<C::F>(self.params.i, included);
|
||||
let offset = self.offset.unwrap_or_else(C::F::zero);
|
||||
let offset_share = offset * C::F::from(included.len().try_into().unwrap()).invert().unwrap();
|
||||
|
||||
Ok(FrostView {
|
||||
@@ -198,7 +198,7 @@ impl<C: Curve> FrostKeys<C> {
|
||||
.verification_shares
|
||||
.iter()
|
||||
.map(|(l, share)| {
|
||||
(*l, (*share * lagrange::<C::F>(*l, &included)) + (C::GENERATOR * offset_share))
|
||||
(*l, (*share * lagrange::<C::F>(*l, included)) + (C::GENERATOR * offset_share))
|
||||
})
|
||||
.collect(),
|
||||
included: included.to_vec(),
|
||||
@@ -218,7 +218,7 @@ impl<C: Curve> FrostKeys<C> {
|
||||
serialized.extend(&self.params.i.to_be_bytes());
|
||||
serialized.extend(self.secret_share.to_repr().as_ref());
|
||||
serialized.extend(self.group_key.to_bytes().as_ref());
|
||||
for l in 1 ..= self.params.n.into() {
|
||||
for l in 1 ..= self.params.n {
|
||||
serialized.extend(self.verification_shares[&l].to_bytes().as_ref());
|
||||
}
|
||||
serialized
|
||||
@@ -237,7 +237,7 @@ impl<C: Curve> FrostKeys<C> {
|
||||
|
||||
let mut id = vec![0; C::ID.len()];
|
||||
cursor.read_exact(&mut id).map_err(|_| missing)?;
|
||||
if &id != &C::ID {
|
||||
if id != C::ID {
|
||||
Err(different)?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
||||
included: &[u16],
|
||||
) -> Result<Params<C, A>, FrostError> {
|
||||
let mut included = included.to_vec();
|
||||
(&mut included).sort_unstable();
|
||||
included.sort_unstable();
|
||||
|
||||
// Included < threshold
|
||||
if included.len() < usize::from(keys.params.t) {
|
||||
@@ -123,7 +123,7 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
|
||||
// This could be further optimized with a multi-nonce proof.
|
||||
// See https://github.com/serai-dex/serai/issues/38
|
||||
for nonce in nonces {
|
||||
DLEqProof::prove(&mut *rng, &mut transcript, &generators, nonce)
|
||||
DLEqProof::prove(&mut *rng, &mut transcript, generators, nonce)
|
||||
.serialize(&mut serialized)
|
||||
.unwrap();
|
||||
}
|
||||
@@ -221,7 +221,7 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
.map_err(|_| FrostError::InvalidCommitment(*l))?
|
||||
.verify(
|
||||
&mut transcript,
|
||||
&nonce_generators,
|
||||
nonce_generators,
|
||||
&commitments[n].iter().map(|commitments| commitments[de]).collect::<Vec<_>>(),
|
||||
)
|
||||
.map_err(|_| FrostError::InvalidCommitment(*l))?;
|
||||
@@ -236,7 +236,7 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
|
||||
|
||||
// Re-format into the FROST-expected rho transcript
|
||||
let mut rho_transcript = A::Transcript::new(b"FROST_rho");
|
||||
rho_transcript.append_message(b"message", &C::hash_msg(&msg));
|
||||
rho_transcript.append_message(b"message", &C::hash_msg(msg));
|
||||
// This won't just be the commitments, yet the full existing transcript if used in an extended
|
||||
// protocol
|
||||
rho_transcript.append_message(
|
||||
|
||||
@@ -116,7 +116,7 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
|
||||
keys
|
||||
.iter()
|
||||
.filter_map(|(i, keys)| {
|
||||
if included.contains(&i) {
|
||||
if included.contains(i) {
|
||||
Some((
|
||||
*i,
|
||||
AlgorithmMachine::new(algorithm.clone(), keys.clone(), &included.clone()).unwrap(),
|
||||
|
||||
@@ -59,7 +59,7 @@ pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
if let Err(blame) = schnorr::batch_verify(rng, &triplets) {
|
||||
assert_eq!(blame, 2);
|
||||
} else {
|
||||
assert!(false);
|
||||
panic!("batch verification considered a malleated signature valid");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@ pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
if let Err(blame) = schnorr::batch_verify(rng, &triplets) {
|
||||
assert_eq!(blame, u16::try_from(i + 1).unwrap());
|
||||
} else {
|
||||
assert!(false);
|
||||
panic!("batch verification considered an invalid signature valid");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -80,7 +80,7 @@ fn sign_core<R: RngCore + CryptoRng, C: Curve>(
|
||||
group_key: C::G,
|
||||
keys: &HashMap<u16, Arc<FrostKeys<C>>>,
|
||||
) {
|
||||
const MESSAGE: &'static [u8] = b"Hello, World!";
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
|
||||
let machines = algorithm_machines(rng, Schnorr::<C, TestHram<C>>::new(), keys);
|
||||
let sig = sign_test(&mut *rng, machines, MESSAGE);
|
||||
|
||||
@@ -87,7 +87,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
AlgorithmMachine::new(
|
||||
Schnorr::<C, H>::new(),
|
||||
Arc::new(keys[i].clone()),
|
||||
vectors.included.clone(),
|
||||
&vectors.included.to_vec().clone(),
|
||||
)
|
||||
.unwrap(),
|
||||
));
|
||||
|
||||
@@ -24,7 +24,7 @@ where
|
||||
pairs: I,
|
||||
) {
|
||||
// Define a unique scalar factor for this set of variables so individual items can't overlap
|
||||
let u = if self.0.len() == 0 {
|
||||
let u = if self.0.is_empty() {
|
||||
G::Scalar::one()
|
||||
} else {
|
||||
let mut weight;
|
||||
|
||||
Reference in New Issue
Block a user