From eab5d9e64fd27076a49583e5f45901d34b463c01 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 12 Jul 2025 03:29:21 -0400 Subject: [PATCH 001/116] Remove Mastodon link from README Closes #662. --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 319c05db..a7732ba7 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,6 @@ issued at the discretion of the Immunefi program managers. - [Website](https://serai.exchange/): https://serai.exchange/ - [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/ - [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX -- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai - [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz - [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org - [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/ From a4ceb2e756e9eac7653f7db8fb6085def38a5bcf Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 20:50:12 -0400 Subject: [PATCH 002/116] Forward docker stderr to stdout in case stderr is being dropped for some reason --- substrate/client/tests/common/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrate/client/tests/common/mod.rs b/substrate/client/tests/common/mod.rs index 7dda7d0a..5415e45e 100644 --- a/substrate/client/tests/common/mod.rs +++ b/substrate/client/tests/common/mod.rs @@ -40,7 +40,7 @@ macro_rules! serai_test { .set_handle(handle) .set_start_policy(StartPolicy::Strict) .set_log_options(Some(LogOptions { - action: LogAction::Forward, + action: LogAction::ForwardToStdOut, policy: LogPolicy::Always, source: LogSource::Both, })); From 21ce50ecf73f78536680e35e9c917356cf451f3b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 20:53:09 -0400 Subject: [PATCH 003/116] Revert "Forward docker stderr to stdout in case stderr is being dropped for some reason" This was intended for the monero-audit branch. --- substrate/client/tests/common/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrate/client/tests/common/mod.rs b/substrate/client/tests/common/mod.rs index 5415e45e..7dda7d0a 100644 --- a/substrate/client/tests/common/mod.rs +++ b/substrate/client/tests/common/mod.rs @@ -40,7 +40,7 @@ macro_rules! serai_test { .set_handle(handle) .set_start_policy(StartPolicy::Strict) .set_log_options(Some(LogOptions { - action: LogAction::ForwardToStdOut, + action: LogAction::Forward, policy: LogPolicy::Always, source: LogSource::Both, })); From b426bfcfe89c09436ff3a1dad1eee1bb3746c30d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 23 Jul 2025 08:42:04 -0400 Subject: [PATCH 004/116] Respond to 1.1 A1 --- networks/monero/ringct/clsag/src/lib.rs | 22 ++++++++++++-------- networks/monero/ringct/clsag/src/multisig.rs | 12 +++++------ networks/monero/ringct/clsag/src/tests.rs | 8 +++---- 3 files changed, 23 insertions(+), 19 deletions(-) diff --git a/networks/monero/ringct/clsag/src/lib.rs b/networks/monero/ringct/clsag/src/lib.rs index 0aab537b..454c34fd 100644 --- a/networks/monero/ringct/clsag/src/lib.rs +++ b/networks/monero/ringct/clsag/src/lib.rs @@ -100,7 +100,7 @@ fn core( ring: &[[EdwardsPoint; 2]], I: &EdwardsPoint, pseudo_out: &EdwardsPoint, - msg: &[u8; 32], + msg_hash: &[u8; 32], D: &EdwardsPoint, s: &[Scalar], A_c1: &Mode, @@ -156,7 +156,7 @@ fn core( // Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be // truncated just to add it back to_hash.extend(pseudo_out.compress().to_bytes()); - to_hash.extend(msg); + to_hash.extend(msg_hash); // Configure the loop based on if we're signing or verifying let start; @@ -245,7 +245,7 @@ impl Clsag { I: &EdwardsPoint, input: &ClsagContext, mask: Scalar, - msg: &[u8; 32], + msg_hash: &[u8; 32], A: EdwardsPoint, AH: EdwardsPoint, ) -> ClsagSignCore { @@ -261,7 +261,7 @@ impl Clsag { s.push(Scalar::random(rng)); } let ((D, c_p, c_c), c1) = - core(input.decoys.ring(), I, &pseudo_out, msg, &D, &s, &Mode::Sign(r, A, AH)); + core(input.decoys.ring(), I, &pseudo_out, msg_hash, &D, &s, &Mode::Sign(r, A, AH)); ClsagSignCore { incomplete_clsag: Clsag { D, s, c1 }, @@ -288,11 +288,15 @@ impl Clsag { /// `inputs` is of the form (discrete logarithm of the key, context). /// /// `sum_outputs` is for the sum of the output commitments' masks. + /// + /// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which + /// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do + /// not use this if you don't know what you're doing. pub fn sign( rng: &mut R, mut inputs: Vec<(Zeroizing, ClsagContext)>, sum_outputs: Scalar, - msg: [u8; 32], + msg_hash: [u8; 32], ) -> Result, ClsagError> { // Create the key images let mut key_image_generators = vec![]; @@ -329,7 +333,7 @@ impl Clsag { &key_images[i], &inputs[i].1, mask, - &msg, + &msg_hash, nonce.deref() * ED25519_BASEPOINT_TABLE, nonce.deref() * key_image_generators[i], ); @@ -345,7 +349,7 @@ impl Clsag { nonce.zeroize(); debug_assert!(clsag - .verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg) + .verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg_hash) .is_ok()); res.push((clsag, pseudo_out)); @@ -360,7 +364,7 @@ impl Clsag { ring: &[[EdwardsPoint; 2]], I: &EdwardsPoint, pseudo_out: &EdwardsPoint, - msg: &[u8; 32], + msg_hash: &[u8; 32], ) -> Result<(), ClsagError> { // Preliminary checks // s, c1, and points must also be encoded canonically, which is checked at time of decode @@ -379,7 +383,7 @@ impl Clsag { Err(ClsagError::InvalidD)?; } - let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, &Mode::Verify(self.c1)); + let (_, c1) = core(ring, I, pseudo_out, msg_hash, &D, &self.s, &Mode::Verify(self.c1)); if c1 != self.c1 { Err(ClsagError::InvalidC1)?; } diff --git a/networks/monero/ringct/clsag/src/multisig.rs b/networks/monero/ringct/clsag/src/multisig.rs index bfbb8fc5..8fdb73f2 100644 --- a/networks/monero/ringct/clsag/src/multisig.rs +++ b/networks/monero/ringct/clsag/src/multisig.rs @@ -132,7 +132,7 @@ pub struct ClsagMultisig { mask_recv: Option, mask: Option, - msg: Option<[u8; 32]>, + msg_hash: Option<[u8; 32]>, interim: Option, } @@ -156,7 +156,7 @@ impl ClsagMultisig { mask_recv: Some(mask_recv), mask: None, - msg: None, + msg_hash: None, interim: None, }, mask_send, @@ -253,7 +253,7 @@ impl Algorithm for ClsagMultisig { view: &ThresholdView, nonce_sums: &[Vec], nonces: Vec>, - msg: &[u8], + msg_hash: &[u8], ) -> dfg::Scalar { // Use the transcript to get a seeded random number generator // @@ -264,14 +264,14 @@ impl Algorithm for ClsagMultisig { // opening of the commitment being re-randomized (and what it's re-randomized to) let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses")); - self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes")); + self.msg_hash = Some(msg_hash.try_into().expect("CLSAG message hash should be 32-bytes")); let sign_core = Clsag::sign_core( &mut rng, &self.image.expect("verifying a share despite never processing any addendums").0, &self.context, self.mask.expect("mask wasn't set"), - self.msg.as_ref().unwrap(), + self.msg_hash.as_ref().unwrap(), nonce_sums[0][0].0, nonce_sums[0][1].0, ); @@ -303,7 +303,7 @@ impl Algorithm for ClsagMultisig { self.context.decoys.ring(), &self.image.expect("verifying a signature despite never processing any addendums").0, &interim.pseudo_out, - self.msg.as_ref().unwrap(), + self.msg_hash.as_ref().unwrap(), ) .is_ok() { diff --git a/networks/monero/ringct/clsag/src/tests.rs b/networks/monero/ringct/clsag/src/tests.rs index ba71d69c..d4ae1f41 100644 --- a/networks/monero/ringct/clsag/src/tests.rs +++ b/networks/monero/ringct/clsag/src/tests.rs @@ -31,7 +31,7 @@ const RING_INDEX: u8 = 3; #[test] fn clsag() { for real in 0 .. RING_LEN { - let msg = [1; 32]; + let msg_hash = [1; 32]; let mut secrets = (Zeroizing::new(Scalar::ZERO), Scalar::ZERO); let mut ring = vec![]; @@ -61,18 +61,18 @@ fn clsag() { .unwrap(), )], Scalar::random(&mut OsRng), - msg, + msg_hash, ) .unwrap() .swap_remove(0); let image = hash_to_point((ED25519_BASEPOINT_TABLE * secrets.0.deref()).compress().0) * secrets.0.deref(); - clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap(); + clsag.verify(&ring, &image, &pseudo_out, &msg_hash).unwrap(); // make sure verification fails if we throw a random `c1` at it. clsag.c1 = Scalar::random(&mut OsRng); - assert!(clsag.verify(&ring, &image, &pseudo_out, &msg).is_err()); + assert!(clsag.verify(&ring, &image, &pseudo_out, &msg_hash).is_err()); } } From 6b8cf6653a9abc892b0c1a896c0a0dceac1fd296 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 23 Jul 2025 08:58:02 -0400 Subject: [PATCH 005/116] Respond to 1.1 A2 (also cited as 2 1) `read_vec` was unbounded. It now accepts an optional bound. In some places, we are able to define and provide a bound (Bulletproofs(+)' `L` and `R` vectors). In others, we cannot (the amount of inputs within a transaction, which is not subject to any rule in the current consensus other than the total transaction size limit). Usage of `None` in those locations preserves the existing behavior. --- networks/monero/generators/src/lib.rs | 2 -- networks/monero/io/src/lib.rs | 18 ++++++++++++++++-- networks/monero/primitives/src/lib.rs | 8 +++++--- .../monero/ringct/bulletproofs/src/core.rs | 2 +- networks/monero/ringct/bulletproofs/src/lib.rs | 15 ++++++++++----- networks/monero/src/transaction.rs | 8 ++++---- networks/monero/wallet/src/extra.rs | 12 +++--------- networks/monero/wallet/src/send/mod.rs | 8 ++++---- 8 files changed, 43 insertions(+), 30 deletions(-) diff --git a/networks/monero/generators/src/lib.rs b/networks/monero/generators/src/lib.rs index bc78a1e5..ebff93cc 100644 --- a/networks/monero/generators/src/lib.rs +++ b/networks/monero/generators/src/lib.rs @@ -51,8 +51,6 @@ pub fn H_pow_2() -> &'static [EdwardsPoint; 64] { pub const MAX_COMMITMENTS: usize = 16; /// The amount of bits a value within a commitment may use. pub const COMMITMENT_BITS: usize = 64; -/// The logarithm (over 2) of the amount of bits a value within a commitment may use. -pub const LOG_COMMITMENT_BITS: usize = 6; // 2 ** 6 == N /// Container struct for Bulletproofs(+) generators. #[allow(non_snake_case)] diff --git a/networks/monero/io/src/lib.rs b/networks/monero/io/src/lib.rs index 68acbe80..4e43b77e 100644 --- a/networks/monero/io/src/lib.rs +++ b/networks/monero/io/src/lib.rs @@ -214,6 +214,20 @@ pub fn read_array io::Result, const N: us } /// Read a length-prefixed variable-length list of elements. -pub fn read_vec io::Result>(f: F, r: &mut R) -> io::Result> { - read_raw_vec(f, read_varint(r)?, r) +/// +/// An optional bound on the length of the result may be provided. If `None`, the returned `Vec` +/// will be of the length read off the reader, if successfully read. If `Some(_)`, an error will be +/// raised if the length read off the read is greater than the bound. +pub fn read_vec io::Result>( + f: F, + length_bound: Option, + r: &mut R, +) -> io::Result> { + let declared_length: usize = read_varint(r)?; + if let Some(length_bound) = length_bound { + if declared_length > length_bound { + Err(io::Error::other("vector exceeds bound on length"))?; + } + } + read_raw_vec(f, declared_length, r) } diff --git a/networks/monero/primitives/src/lib.rs b/networks/monero/primitives/src/lib.rs index 543f385e..f3e50a01 100644 --- a/networks/monero/primitives/src/lib.rs +++ b/networks/monero/primitives/src/lib.rs @@ -213,7 +213,7 @@ impl Decoys { pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> { write_vec(write_varint, &self.offsets, w)?; w.write_all(&[self.signer_index])?; - write_vec( + write_raw_vec( |pair, w| { write_point(&pair[0], w)?; write_point(&pair[1], w) @@ -239,10 +239,12 @@ impl Decoys { /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol /// defined serialization. pub fn read(r: &mut impl io::Read) -> io::Result { + let offsets = read_vec(read_varint, None, r)?; + let len = offsets.len(); Decoys::new( - read_vec(read_varint, r)?, + offsets, read_byte(r)?, - read_vec(|r| Ok([read_point(r)?, read_point(r)?]), r)?, + read_raw_vec(|r| Ok([read_point(r)?, read_point(r)?]), len, r)?, ) .ok_or_else(|| io::Error::other("invalid Decoys")) } diff --git a/networks/monero/ringct/bulletproofs/src/core.rs b/networks/monero/ringct/bulletproofs/src/core.rs index 09112670..9dc86fcd 100644 --- a/networks/monero/ringct/bulletproofs/src/core.rs +++ b/networks/monero/ringct/bulletproofs/src/core.rs @@ -6,7 +6,7 @@ use curve25519_dalek::{ edwards::EdwardsPoint, }; -pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS, LOG_COMMITMENT_BITS}; +pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS}; pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint { let mut buf_scalars = Vec::with_capacity(pairs.len()); diff --git a/networks/monero/ringct/bulletproofs/src/lib.rs b/networks/monero/ringct/bulletproofs/src/lib.rs index 2a789575..61a5dbc9 100644 --- a/networks/monero/ringct/bulletproofs/src/lib.rs +++ b/networks/monero/ringct/bulletproofs/src/lib.rs @@ -17,13 +17,13 @@ use curve25519_dalek::edwards::EdwardsPoint; use monero_io::*; pub use monero_generators::MAX_COMMITMENTS; +use monero_generators::COMMITMENT_BITS; use monero_primitives::Commitment; pub(crate) mod scalar_vector; pub(crate) mod point_vector; pub(crate) mod core; -use crate::core::LOG_COMMITMENT_BITS; pub(crate) mod batch_verifier; use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier}; @@ -44,6 +44,11 @@ use crate::plus::{ #[cfg(test)] mod tests; +// The logarithm (over 2) of the amount of bits a value within a commitment may use. +const LOG_COMMITMENT_BITS: usize = COMMITMENT_BITS.ilog2() as usize; +// The maximum length of L/R `Vec`s. +const MAX_LR: usize = (MAX_COMMITMENTS.ilog2() as usize) + LOG_COMMITMENT_BITS; + /// An error from proving/verifying Bulletproofs(+). #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[cfg_attr(feature = "std", derive(thiserror::Error))] @@ -265,8 +270,8 @@ impl Bulletproof { tau_x: read_scalar(r)?, mu: read_scalar(r)?, ip: IpProof { - L: read_vec(read_point, r)?, - R: read_vec(read_point, r)?, + L: read_vec(read_point, Some(MAX_LR), r)?, + R: read_vec(read_point, Some(MAX_LR), r)?, a: read_scalar(r)?, b: read_scalar(r)?, }, @@ -284,8 +289,8 @@ impl Bulletproof { r_answer: read_scalar(r)?, s_answer: read_scalar(r)?, delta_answer: read_scalar(r)?, - L: read_vec(read_point, r)?.into_iter().collect(), - R: read_vec(read_point, r)?.into_iter().collect(), + L: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(), + R: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(), }, })) } diff --git a/networks/monero/src/transaction.rs b/networks/monero/src/transaction.rs index 54b1b80f..8d998ec6 100644 --- a/networks/monero/src/transaction.rs +++ b/networks/monero/src/transaction.rs @@ -71,7 +71,7 @@ impl Input { let amount = if amount == 0 { None } else { Some(amount) }; Input::ToKey { amount, - key_offsets: read_vec(read_varint, r)?, + key_offsets: read_vec(read_varint, None, r)?, key_image: read_torsion_free_point(r)?, } } @@ -241,7 +241,7 @@ impl TransactionPrefix { pub fn read(r: &mut R, version: u64) -> io::Result { let additional_timelock = Timelock::read(r)?; - let inputs = read_vec(|r| Input::read(r), r)?; + let inputs = read_vec(|r| Input::read(r), None, r)?; if inputs.is_empty() { Err(io::Error::other("transaction had no inputs"))?; } @@ -250,10 +250,10 @@ impl TransactionPrefix { let mut prefix = TransactionPrefix { additional_timelock, inputs, - outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?, + outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), None, r)?, extra: vec![], }; - prefix.extra = read_vec(read_byte, r)?; + prefix.extra = read_vec(read_byte, None, r)?; Ok(prefix) } diff --git a/networks/monero/wallet/src/extra.rs b/networks/monero/wallet/src/extra.rs index 537e595a..2c0706fd 100644 --- a/networks/monero/wallet/src/extra.rs +++ b/networks/monero/wallet/src/extra.rs @@ -181,16 +181,10 @@ impl ExtraField { size }), 1 => ExtraField::PublicKey(read_point(r)?), - 2 => ExtraField::Nonce({ - let nonce = read_vec(read_byte, r)?; - if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE { - Err(io::Error::other("too long nonce"))?; - } - nonce - }), + 2 => ExtraField::Nonce(read_vec(read_byte, Some(MAX_TX_EXTRA_NONCE_SIZE), r)?), 3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?), - 4 => ExtraField::PublicKeys(read_vec(read_point, r)?), - 0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, r)?), + 4 => ExtraField::PublicKeys(read_vec(read_point, None, r)?), + 0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, None, r)?), _ => Err(io::Error::other("unknown extra field"))?, }) } diff --git a/networks/monero/wallet/src/send/mod.rs b/networks/monero/wallet/src/send/mod.rs index ca92961f..d3adb166 100644 --- a/networks/monero/wallet/src/send/mod.rs +++ b/networks/monero/wallet/src/send/mod.rs @@ -456,7 +456,7 @@ impl SignableTransaction { /// defined serialization. pub fn read(r: &mut R) -> io::Result { fn read_address(r: &mut R) -> io::Result { - String::from_utf8(read_vec(read_byte, r)?) + String::from_utf8(read_vec(read_byte, None, r)?) .ok() .and_then(|str| MoneroAddress::from_str_with_unchecked_network(&str).ok()) .ok_or_else(|| io::Error::other("invalid address")) @@ -484,9 +484,9 @@ impl SignableTransaction { rct_type: RctType::try_from(read_byte(r)?) .map_err(|()| io::Error::other("unsupported/invalid RctType"))?, outgoing_view_key: Zeroizing::new(read_bytes(r)?), - inputs: read_vec(OutputWithDecoys::read, r)?, - payments: read_vec(read_payment, r)?, - data: read_vec(|r| read_vec(read_byte, r), r)?, + inputs: read_vec(OutputWithDecoys::read, None, r)?, + payments: read_vec(read_payment, None, r)?, + data: read_vec(|r| read_vec(read_byte, None, r), None, r)?, fee_rate: FeeRate::read(r)?, }; match res.validate() { From cb1e6535cbb3ddf7a1d5f0c7e86a9b352a8e7ba6 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 23 Jul 2025 09:27:50 -0400 Subject: [PATCH 006/116] Respond to 2 2 --- networks/monero/primitives/src/tests.rs | 2 +- .../monero/primitives/src/unreduced_scalar.rs | 16 +++++++++------- networks/monero/ringct/borromean/src/lib.rs | 4 ++-- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/networks/monero/primitives/src/tests.rs b/networks/monero/primitives/src/tests.rs index a14d1cd5..68d3df75 100644 --- a/networks/monero/primitives/src/tests.rs +++ b/networks/monero/primitives/src/tests.rs @@ -8,7 +8,7 @@ fn recover_scalars() { let stored = UnreducedScalar(hex::decode(stored).unwrap().try_into().unwrap()); let recovered = Scalar::from_canonical_bytes(hex::decode(recovered).unwrap().try_into().unwrap()).unwrap(); - assert_eq!(stored.recover_monero_slide_scalar(), recovered); + assert_eq!(stored.ref10_slide_scalar_vartime(), recovered); }; // https://www.moneroinflation.com/static/data_py/report_scalars_df.pdf diff --git a/networks/monero/primitives/src/unreduced_scalar.rs b/networks/monero/primitives/src/unreduced_scalar.rs index 90331cd7..bf11645a 100644 --- a/networks/monero/primitives/src/unreduced_scalar.rs +++ b/networks/monero/primitives/src/unreduced_scalar.rs @@ -54,7 +54,7 @@ impl UnreducedScalar { // This matches Monero's `slide` function and intentionally gives incorrect outputs under // certain conditions in order to match Monero. // - // This function does not execute in constant time. + // This function does not execute in constant time and must only be used with public data. fn non_adjacent_form(&self) -> [i8; 256] { let bits = self.as_bits(); let mut naf = [0i8; 256]; @@ -107,15 +107,17 @@ impl UnreducedScalar { naf } - /// Recover the scalar that an array of bytes was incorrectly interpreted as by Monero's `slide` - /// function. + /// Recover the scalar that an array of bytes was incorrectly interpreted as by ref10's `slide` + /// function (as used by the reference Monero implementation in C++). /// - /// In Borromean range proofs, Monero was not checking that the scalars used were - /// reduced. This lead to the scalar stored being interpreted as a different scalar. - /// This function recovers that scalar. + /// For Borromean range proofs, Monero did not check the scalars used were reduced. This led to + /// some scalars serialized being interpreted as distinct scalars. This function recovers these + /// distinct scalars, as required to verify Borromean range proofs within the Monero protocol. /// /// See for more info. - pub fn recover_monero_slide_scalar(&self) -> Scalar { + // + /// This function does not execute in constant time and must only be used with public data. + pub fn ref10_slide_scalar_vartime(&self) -> Scalar { if self.0[31] & 128 == 0 { // Computing the w-NAF of a number can only give an output with 1 more bit than // the number, so even if the number isn't reduced, the `slide` function will be diff --git a/networks/monero/ringct/borromean/src/lib.rs b/networks/monero/ringct/borromean/src/lib.rs index 5e105142..fc0f2194 100644 --- a/networks/monero/ringct/borromean/src/lib.rs +++ b/networks/monero/ringct/borromean/src/lib.rs @@ -56,13 +56,13 @@ impl BorromeanSignatures { let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint( &self.ee, &keys_a[i], - &self.s0[i].recover_monero_slide_scalar(), + &self.s0[i].ref10_slide_scalar_vartime(), ); #[allow(non_snake_case)] let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint( &keccak256_to_scalar(LL.compress().as_bytes()), &keys_b[i], - &self.s1[i].recover_monero_slide_scalar(), + &self.s1[i].ref10_slide_scalar_vartime(), ); transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes()); } From feb18d64a774aab7ae4c8280cbc34bf861ffdb70 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 23 Jul 2025 10:03:11 -0400 Subject: [PATCH 007/116] Respond to 2 3 We now use `FrostError::InternalError` instead of a panic to represent the mask not being set. --- networks/monero/ringct/clsag/src/multisig.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/networks/monero/ringct/clsag/src/multisig.rs b/networks/monero/ringct/clsag/src/multisig.rs index 8fdb73f2..d4ddd180 100644 --- a/networks/monero/ringct/clsag/src/multisig.rs +++ b/networks/monero/ringct/clsag/src/multisig.rs @@ -56,8 +56,7 @@ impl ClsagContext { /// A channel to send the mask to use for the pseudo-out (rerandomized commitment) with. /// -/// A mask must be sent along this channel before any preprocess addendums are handled. Breaking -/// this rule will cause a panic. +/// A mask must be sent along this channel before any preprocess addendums are handled. #[derive(Clone, Debug)] pub struct ClsagMultisigMaskSender { buf: Arc>>, @@ -78,8 +77,8 @@ impl ClsagMultisigMaskSender { } } impl ClsagMultisigMaskReceiver { - fn recv(self) -> Scalar { - self.buf.lock().unwrap() + fn recv(self) -> Option { + *self.buf.lock() } } @@ -114,7 +113,7 @@ struct Interim { /// FROST-inspired algorithm for producing a CLSAG signature. /// -/// Before this has its `process_addendum` called, a mask must be set. Else this will panic. +/// Before this has its `process_addendum` called, a mask must be set. /// /// The message signed is expected to be a 32-byte value. Per Monero, it's the keccak256 hash of /// the transaction data which is signed. This will panic if the message is not a 32-byte value. @@ -218,7 +217,14 @@ impl Algorithm for ClsagMultisig { // Fetch the mask from the Mutex // We set it to a variable to ensure our view of it is consistent // It was this or a mpsc channel... std doesn't have oneshot :/ - self.mask = Some(self.mask_recv.take().unwrap().recv()); + self.mask = Some( + self + .mask_recv + .take() + .unwrap() + .recv() + .ok_or(FrostError::InternalError("CLSAG mask was not provided"))?, + ); // Transcript the mask self.transcript.append_message(b"mask", self.mask.expect("mask wasn't set").to_bytes()); From 4f65a0b147c72b1ba2b2b59dd20776ee5d7d7644 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 23 Jul 2025 15:13:27 -0400 Subject: [PATCH 008/116] Remove Clone from ClsagMultisigMask{Sender, Receiver} This had ill-defined properties on Clone, as a mask could be sent multiple times (unintended) and multiple algorithms may receive the same mask from a singular sender. Requires removing the Clone bound within modular-frost and expanding the test helpers accordingly. This was not raised in the audit yet upon independent review. --- crypto/frost/src/algorithm.rs | 2 +- crypto/frost/src/sign.rs | 6 +-- crypto/frost/src/tests/mod.rs | 55 +++++++++++++++----- networks/monero/ringct/clsag/src/multisig.rs | 8 +-- networks/monero/ringct/clsag/src/tests.rs | 38 +++++++++----- 5 files changed, 75 insertions(+), 34 deletions(-) diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index 0b0abd6c..b595e03b 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -25,7 +25,7 @@ pub trait Addendum: Send + Sync + Clone + PartialEq + Debug + WriteAddendum {} impl Addendum for A {} /// Algorithm trait usable by the FROST signing machine to produce signatures.. -pub trait Algorithm: Send + Sync + Clone { +pub trait Algorithm: Send + Sync { /// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible /// transcript included in this crate. type Transcript: Sync + Clone + Debug + Transcript; diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 5115244f..693960d5 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -47,7 +47,7 @@ impl Writable for Vec { } // Pairing of an Algorithm with a ThresholdKeys instance. -#[derive(Clone, Zeroize)] +#[derive(Zeroize)] struct Params> { // Skips the algorithm due to being too large a bound to feasibly enforce on users #[zeroize(skip)] @@ -193,7 +193,7 @@ impl SignatureShare { /// Trait for the second machine of a two-round signing protocol. pub trait SignMachine: Send + Sync + Sized { /// Params used to instantiate this machine which can be used to rebuild from a cache. - type Params: Clone; + type Params; /// Keys used for signing operations. type Keys; /// Preprocess message for this machine. @@ -397,7 +397,7 @@ impl> SignMachine for AlgorithmSignMachi Ok(( AlgorithmSignatureMachine { - params: self.params.clone(), + params: self.params, view, B, Rs, diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index f93a5fbf..db6553aa 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -37,10 +37,10 @@ pub fn clone_without( } /// Spawn algorithm machines for a random selection of signers, each executing the given algorithm. -pub fn algorithm_machines>( +pub fn algorithm_machines_without_clone>( rng: &mut R, - algorithm: &A, keys: &HashMap>, + machines: HashMap>, ) -> HashMap> { let mut included = vec![]; while included.len() < usize::from(keys[&Participant::new(1).unwrap()].params().t()) { @@ -54,18 +54,28 @@ pub fn algorithm_machines>( included.push(n); } - keys - .iter() - .filter_map(|(i, keys)| { - if included.contains(i) { - Some((*i, AlgorithmMachine::new(algorithm.clone(), keys.clone()))) - } else { - None - } - }) + machines + .into_iter() + .filter_map(|(i, machine)| if included.contains(&i) { Some((i, machine)) } else { None }) .collect() } +/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm. +pub fn algorithm_machines>( + rng: &mut R, + algorithm: &A, + keys: &HashMap>, +) -> HashMap> { + algorithm_machines_without_clone( + rng, + keys, + keys + .values() + .map(|keys| (keys.params().i(), AlgorithmMachine::new(algorithm.clone(), keys.clone()))) + .collect(), + ) +} + // Run the preprocess step pub(crate) fn preprocess< R: RngCore + CryptoRng, @@ -165,10 +175,10 @@ pub fn sign_without_caching( /// Execute the signing protocol, randomly caching various machines to ensure they can cache /// successfully. -pub fn sign( +pub fn sign_without_clone( rng: &mut R, - params: &>::Params, mut keys: HashMap>::Keys>, + mut params: HashMap>::Params>, machines: HashMap, msg: &[u8], ) -> M::Signature { @@ -183,7 +193,8 @@ pub fn sign( let cache = machines.remove(&i).unwrap().cache(); machines.insert( i, - M::SignMachine::from_cache(params.clone(), keys.remove(&i).unwrap(), cache).0, + M::SignMachine::from_cache(params.remove(&i).unwrap(), keys.remove(&i).unwrap(), cache) + .0, ); } } @@ -192,6 +203,22 @@ pub fn sign( ) } +/// Execute the signing protocol, randomly caching various machines to ensure they can cache +/// successfully. +pub fn sign< + R: RngCore + CryptoRng, + M: PreprocessMachine>, +>( + rng: &mut R, + params: &>::Params, + keys: HashMap>::Keys>, + machines: HashMap, + msg: &[u8], +) -> M::Signature { + let params = keys.keys().map(|i| (*i, params.clone())).collect(); + sign_without_clone(rng, keys, params, machines, msg) +} + /// Test a basic Schnorr signature with the provided keys. pub fn test_schnorr_with_keys>( rng: &mut R, diff --git a/networks/monero/ringct/clsag/src/multisig.rs b/networks/monero/ringct/clsag/src/multisig.rs index d4ddd180..2cd7ea6e 100644 --- a/networks/monero/ringct/clsag/src/multisig.rs +++ b/networks/monero/ringct/clsag/src/multisig.rs @@ -57,11 +57,11 @@ impl ClsagContext { /// A channel to send the mask to use for the pseudo-out (rerandomized commitment) with. /// /// A mask must be sent along this channel before any preprocess addendums are handled. -#[derive(Clone, Debug)] +#[derive(Debug)] pub struct ClsagMultisigMaskSender { buf: Arc>>, } -#[derive(Clone, Debug)] +#[derive(Debug)] struct ClsagMultisigMaskReceiver { buf: Arc>>, } @@ -73,6 +73,8 @@ impl ClsagMultisigMaskSender { /// Send a mask to a CLSAG multisig instance. pub fn send(self, mask: Scalar) { + // There is no risk this was prior set as this consumes `self`, which does not implement + // `Clone` *self.buf.lock() = Some(mask); } } @@ -118,7 +120,7 @@ struct Interim { /// The message signed is expected to be a 32-byte value. Per Monero, it's the keccak256 hash of /// the transaction data which is signed. This will panic if the message is not a 32-byte value. #[allow(non_snake_case)] -#[derive(Clone, Debug)] +#[derive(Debug)] pub struct ClsagMultisig { transcript: RecommendedTranscript, diff --git a/networks/monero/ringct/clsag/src/tests.rs b/networks/monero/ringct/clsag/src/tests.rs index d4ae1f41..ff994445 100644 --- a/networks/monero/ringct/clsag/src/tests.rs +++ b/networks/monero/ringct/clsag/src/tests.rs @@ -19,7 +19,8 @@ use crate::ClsagMultisig; #[cfg(feature = "multisig")] use frost::{ Participant, - tests::{key_gen, algorithm_machines, sign}, + sign::AlgorithmMachine, + tests::{key_gen, algorithm_machines_without_clone, sign_without_clone}, }; const RING_LEN: u64 = 11; @@ -99,21 +100,32 @@ fn clsag_multisig() { ring.push([dest, Commitment::new(mask, amount).calculate()]); } - let (algorithm, mask_send) = ClsagMultisig::new( - RecommendedTranscript::new(b"Monero Serai CLSAG Test"), - ClsagContext::new( - Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(), - Commitment::new(randomness, AMOUNT), - ) - .unwrap(), - ); - mask_send.send(Scalar::random(&mut OsRng)); + let mask = Scalar::random(&mut OsRng); + let params = || { + let (algorithm, mask_send) = ClsagMultisig::new( + RecommendedTranscript::new(b"Monero Serai CLSAG Test"), + ClsagContext::new( + Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(), + Commitment::new(randomness, AMOUNT), + ) + .unwrap(), + ); + mask_send.send(mask); + algorithm + }; - sign( + sign_without_clone( &mut OsRng, - &algorithm, keys.clone(), - algorithm_machines(&mut OsRng, &algorithm, &keys), + keys.values().map(|keys| (keys.params().i(), params())).collect(), + algorithm_machines_without_clone( + &mut OsRng, + &keys, + keys + .values() + .map(|keys| (keys.params().i(), AlgorithmMachine::new(params(), keys.clone()))) + .collect(), + ), &[1; 32], ); } From a5f4c450c652359787d951b1204bd996c4fb2777 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 21:28:47 -0400 Subject: [PATCH 009/116] Response to usage of `unwrap` in non-test code This commit replaces all usage of `unwrap` with `expect` within `networks/monero`, clarifying why the panic risked is unreachable. This commit also replaces some uses of `unwrap` with solutions which are guaranteed not to fail. Notably, compilation on 128-bit systems is prevented, ensuring `u64::try_from(usize::MAX)` will never panic at runtime. Slight breaking changes are additionally included as necessary to massage out some avoidable panics. --- .../monero/generators/src/hash_to_point.rs | 24 +++++++-- networks/monero/generators/src/lib.rs | 6 +-- networks/monero/io/src/lib.rs | 27 +++++++--- networks/monero/primitives/src/lib.rs | 4 +- .../monero/primitives/src/unreduced_scalar.rs | 14 ++++-- networks/monero/ringct/bulletproofs/build.rs | 18 ++++--- .../monero/ringct/bulletproofs/src/lib.rs | 29 +++++++---- .../src/original/inner_product.rs | 6 ++- .../ringct/bulletproofs/src/original/mod.rs | 7 ++- .../src/plus/aggregate_range_proof.rs | 15 ++++-- .../src/plus/weighted_inner_product.rs | 8 ++- networks/monero/ringct/clsag/src/lib.rs | 7 +-- networks/monero/ringct/clsag/src/multisig.rs | 16 +++--- networks/monero/rpc/simple-request/src/lib.rs | 33 +++--------- networks/monero/rpc/src/lib.rs | 32 +++++++++--- networks/monero/src/block.rs | 18 +++++-- networks/monero/src/merkle.rs | 2 +- networks/monero/src/ringct.rs | 6 ++- networks/monero/src/transaction.rs | 34 ++++++++----- .../monero/wallet/address/src/base58check.rs | 4 +- networks/monero/wallet/address/src/lib.rs | 13 ++--- networks/monero/wallet/src/decoys.rs | 50 +++++++++++++------ networks/monero/wallet/src/extra.rs | 10 ++-- networks/monero/wallet/src/lib.rs | 11 ++-- networks/monero/wallet/src/output.rs | 26 ++++++---- networks/monero/wallet/src/scan.rs | 14 +++--- networks/monero/wallet/src/send/mod.rs | 12 +++-- networks/monero/wallet/src/send/multisig.rs | 2 +- networks/monero/wallet/src/send/tx.rs | 19 ++++--- networks/monero/wallet/src/send/tx_keys.rs | 8 +-- networks/monero/wallet/tests/runner/mod.rs | 4 +- 31 files changed, 310 insertions(+), 169 deletions(-) diff --git a/networks/monero/generators/src/hash_to_point.rs b/networks/monero/generators/src/hash_to_point.rs index 23b3a086..136fea84 100644 --- a/networks/monero/generators/src/hash_to_point.rs +++ b/networks/monero/generators/src/hash_to_point.rs @@ -29,7 +29,11 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint { let uv3 = u * v3; let v7 = v3 * v3 * v; let uv7 = u * v7; - uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap()) + uv3 * + uv7.pow( + (-FieldElement::from(5u8)) * + FieldElement::from(8u8).invert().expect("eight was coprime with the prime 2^{255}-19"), + ) }; let x = X.square() * x; @@ -45,9 +49,23 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint { #[allow(non_snake_case)] let mut Y = z - w; - Y *= Z.invert().unwrap(); + /* + If sign, `z = -486662`, else, `z = -486662 * v` + `w = v + 1` + + We need `z + w \ne 0`, which would require `z \cong -w \mod 2^{255}-19`. This requires: + - If `sign`, `v \mod 2^{255}-19 \ne 486661`. + - If `!sign`, `(v + 1) \mod 2^{255}-19 \ne (v * 486662) \mod 2^{255}-19` which is equivalent to + `(v * 486661) \mod 2^{255}-19 \ne 1`. + + In summary, if `sign`, `v` must not `486661`, and if `!sign`, `v` must not be the + multiplicative inverse of `486661`. Since `v` is the output of a hash function, this should + have negligible probability. Additionally, since the definition of `sign` is dependent on `v`, + it may be truly impossible to reach. + */ + Y *= Z.invert().expect("if sign, v was 486661. if !sign, v was 486661^{-1}"); let mut bytes = Y.to_repr(); bytes[31] |= sign.unwrap_u8() << 7; - decompress_point(bytes).unwrap().mul_by_cofactor() + decompress_point(bytes).expect("point from hash-to-curve wasn't on-curve").mul_by_cofactor() } diff --git a/networks/monero/generators/src/lib.rs b/networks/monero/generators/src/lib.rs index ebff93cc..6256eecf 100644 --- a/networks/monero/generators/src/lib.rs +++ b/networks/monero/generators/src/lib.rs @@ -28,7 +28,7 @@ fn keccak256(data: &[u8]) -> [u8; 32] { #[allow(non_snake_case)] pub static H: LazyLock = LazyLock::new(|| { decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes())) - .unwrap() + .expect("known on-curve point wasn't on-curve") .mul_by_cofactor() }); @@ -78,11 +78,11 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators { let i = 2 * i; let mut even = preimage.clone(); - write_varint(&i, &mut even).unwrap(); + write_varint(&i, &mut even).expect("write failed but doesn't fail"); res.H.push(hash_to_point(keccak256(&even))); let mut odd = preimage.clone(); - write_varint(&(i + 1), &mut odd).unwrap(); + write_varint(&(i + 1), &mut odd).expect("write failed but doesn't fail"); res.G.push(hash_to_point(keccak256(&odd))); } res diff --git a/networks/monero/io/src/lib.rs b/networks/monero/io/src/lib.rs index 4e43b77e..345a8ed6 100644 --- a/networks/monero/io/src/lib.rs +++ b/networks/monero/io/src/lib.rs @@ -18,10 +18,12 @@ use curve25519_dalek::{ const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000; mod sealed { + use core::fmt::Debug; + /// A trait for a number readable/writable as a VarInt. /// /// This is sealed to prevent unintended implementations. - pub trait VarInt: TryInto + TryFrom + Copy { + pub trait VarInt: TryInto + TryFrom + Copy { const BITS: usize; } @@ -34,6 +36,10 @@ mod sealed { impl VarInt for u64 { const BITS: usize = 64; } + // Don't compile for platforms where `usize` exceeds `u64`, preventing various possible runtime + // exceptions + const _NO_128_BIT_PLATFORMS: [(); (u64::BITS - usize::BITS) as usize] = + [(); (u64::BITS - usize::BITS) as usize]; impl VarInt for usize { const BITS: usize = core::mem::size_of::() * 8; } @@ -43,8 +49,12 @@ mod sealed { /// /// This function will panic if the VarInt exceeds u64::MAX. pub fn varint_len(varint: V) -> usize { - let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap(); - ((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1 + let varint_u64: u64 = varint.try_into().expect("varint exceeded u64"); + ((usize::try_from(u64::BITS - varint_u64.leading_zeros()) + .expect("64 > usize::MAX") + .saturating_sub(1)) / + 7) + + 1 } /// Write a byte. @@ -58,9 +68,10 @@ pub fn write_byte(byte: &u8, w: &mut W) -> io::Result<()> { /// /// This will panic if the VarInt exceeds u64::MAX. pub fn write_varint(varint: &U, w: &mut W) -> io::Result<()> { - let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap(); + let mut varint: u64 = (*varint).try_into().expect("varint exceeded u64"); while { - let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap(); + let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)) + .expect("& eight_bit_mask left more than 8 bits set"); varint >>= 7; if varint != 0 { b |= VARINT_CONTINUATION_MASK; @@ -210,7 +221,11 @@ pub fn read_array io::Result, const N: us f: F, r: &mut R, ) -> io::Result<[T; N]> { - read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap()) + read_raw_vec(f, N, r).map(|vec| { + vec.try_into().expect( + "read vector of specific length yet couldn't transform to an array of the same length", + ) + }) } /// Read a length-prefixed variable-length list of elements. diff --git a/networks/monero/primitives/src/lib.rs b/networks/monero/primitives/src/lib.rs index f3e50a01..783275fb 100644 --- a/networks/monero/primitives/src/lib.rs +++ b/networks/monero/primitives/src/lib.rs @@ -124,7 +124,7 @@ impl Commitment { /// defined serialization. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(32 + 8); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -230,7 +230,7 @@ impl Decoys { pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64)); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } diff --git a/networks/monero/primitives/src/unreduced_scalar.rs b/networks/monero/primitives/src/unreduced_scalar.rs index bf11645a..ae0234f2 100644 --- a/networks/monero/primitives/src/unreduced_scalar.rs +++ b/networks/monero/primitives/src/unreduced_scalar.rs @@ -14,7 +14,8 @@ use monero_io::*; static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| { let mut precomputed_scalars = [Scalar::ONE; 8]; for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) { - *scalar = Scalar::from(u8::try_from((i * 2) + 1).unwrap()); + *scalar = + Scalar::from(u64::try_from((i * 2) + 1).expect("enumerating more than u64::MAX / 2 items")); } precomputed_scalars }); @@ -59,7 +60,7 @@ impl UnreducedScalar { let bits = self.as_bits(); let mut naf = [0i8; 256]; for (b, bit) in bits.into_iter().enumerate() { - naf[b] = i8::try_from(bit).unwrap(); + naf[b] = i8::try_from(bit).expect("bit didn't fit within an i8"); } for i in 0 .. 256 { @@ -129,8 +130,13 @@ impl UnreducedScalar { for &numb in self.non_adjacent_form().iter().rev() { recovered += recovered; match numb.cmp(&0) { - Ordering::Greater => recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).unwrap() / 2], - Ordering::Less => recovered -= PRECOMPUTED_SCALARS[usize::try_from(-numb).unwrap() / 2], + Ordering::Greater => { + recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).expect("positive i8 -> usize") / 2] + } + Ordering::Less => { + recovered -= + PRECOMPUTED_SCALARS[usize::try_from(-numb).expect("negated negative i8 -> usize") / 2] + } Ordering::Equal => (), } } diff --git a/networks/monero/ringct/bulletproofs/build.rs b/networks/monero/ringct/bulletproofs/build.rs index 8eb21eaf..ff07f6da 100644 --- a/networks/monero/ringct/bulletproofs/build.rs +++ b/networks/monero/ringct/bulletproofs/build.rs @@ -16,8 +16,10 @@ fn generators(prefix: &'static str, path: &str) { generators_string.extend( format!( " - curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(), - ", + curve25519_dalek::edwards::CompressedEdwardsY({:?}) + .decompress() + .expect(\"generator from build script wasn't on-curve\"), + ", generator.compress().to_bytes() ) .chars(), @@ -33,10 +35,10 @@ fn generators(prefix: &'static str, path: &str) { let mut H_str = String::new(); serialize(&mut H_str, &generators.H); - let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path); + let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path); let _ = remove_file(&path); File::create(&path) - .unwrap() + .expect("failed to create file in $OUT_DIR") .write_all( format!( " @@ -52,15 +54,15 @@ fn generators(prefix: &'static str, path: &str) { ) .as_bytes(), ) - .unwrap(); + .expect("couldn't write generated source code to file on disk"); } #[cfg(not(feature = "compile-time-generators"))] fn generators(prefix: &'static str, path: &str) { - let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path); + let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path); let _ = remove_file(&path); File::create(&path) - .unwrap() + .expect("failed to create file in $OUT_DIR") .write_all( format!( r#" @@ -71,7 +73,7 @@ fn generators(prefix: &'static str, path: &str) { ) .as_bytes(), ) - .unwrap(); + .expect("couldn't write generated source code to file on disk"); } fn main() { diff --git a/networks/monero/ringct/bulletproofs/src/lib.rs b/networks/monero/ringct/bulletproofs/src/lib.rs index 61a5dbc9..13a52b85 100644 --- a/networks/monero/ringct/bulletproofs/src/lib.rs +++ b/networks/monero/ringct/bulletproofs/src/lib.rs @@ -5,7 +5,6 @@ #![allow(non_snake_case)] use std_shims::{ - vec, vec::Vec, io::{self, Read, Write}, }; @@ -124,9 +123,15 @@ impl Bulletproof { let commitments = outputs.iter().map(Commitment::calculate).collect::>(); Ok(Bulletproof::Original( OriginalStatement::new(&commitments) - .unwrap() - .prove(rng, OriginalWitness::new(outputs).unwrap()) - .unwrap(), + .expect("failed to create statement despite checking amount of commitments") + .prove( + rng, + OriginalWitness::new(outputs) + .expect("failed to create witness despite checking amount of commitments"), + ) + .expect( + "failed to prove Bulletproof::Original despite ensuring statement/witness consistency", + ), )) } @@ -144,9 +149,15 @@ impl Bulletproof { let commitments = outputs.iter().map(Commitment::calculate).collect::>(); Ok(Bulletproof::Plus( PlusStatement::new(&commitments) - .unwrap() - .prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap())) - .unwrap(), + .expect("failed to create statement despite checking amount of commitments") + .prove( + rng, + &Zeroizing::new( + PlusWitness::new(outputs) + .expect("failed to create witness despite checking amount of commitments"), + ), + ) + .expect("failed to prove Bulletproof::Plus despite ensuring statement/witness consistency"), )) } @@ -255,8 +266,8 @@ impl Bulletproof { /// Serialize a Bulletproof(+) to a `Vec`. pub fn serialize(&self) -> Vec { - let mut serialized = vec![]; - self.write(&mut serialized).unwrap(); + let mut serialized = Vec::with_capacity(512); + self.write(&mut serialized).expect("write failed but doesn't fail"); serialized } diff --git a/networks/monero/ringct/bulletproofs/src/original/inner_product.rs b/networks/monero/ringct/bulletproofs/src/original/inner_product.rs index a72ddf81..283064eb 100644 --- a/networks/monero/ringct/bulletproofs/src/original/inner_product.rs +++ b/networks/monero/ringct/bulletproofs/src/original/inner_product.rs @@ -174,7 +174,11 @@ impl IpStatement { R_vec.push(R * INV_EIGHT()); // Now that we've calculate L, R, transcript them to receive x (26-27) - transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap()); + transcript = Self::transcript_L_R( + transcript, + *L_vec.last().expect("couldn't get last L_vec despite always being non-empty"), + *R_vec.last().expect("couldn't get last R_vec despite always being non-empty"), + ); let x = transcript; let x_inv = x.invert(); diff --git a/networks/monero/ringct/bulletproofs/src/original/mod.rs b/networks/monero/ringct/bulletproofs/src/original/mod.rs index f001bc9b..1a5d034b 100644 --- a/networks/monero/ringct/bulletproofs/src/original/mod.rs +++ b/networks/monero/ringct/bulletproofs/src/original/mod.rs @@ -227,8 +227,11 @@ impl<'a> AggregateRangeStatement<'a> { let x_ip = transcript; let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip) - .prove(transcript, IpWitness::new(l, r).unwrap()) - .unwrap(); + .prove( + transcript, + IpWitness::new(l, r).expect("Bulletproofs::Original created an invalid IpWitness"), + ) + .expect("Bulletproofs::Original failed to prove the inner-product"); let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip }; #[cfg(debug_assertions)] diff --git a/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs b/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs index e3d4bc92..6468cdf1 100644 --- a/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs +++ b/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs @@ -106,7 +106,9 @@ impl<'a> AggregateRangeStatement<'a> { let mut d = ScalarVector::new(mn); for j in 1 ..= V.len() { - z_pow.push(*z_pow.last().unwrap() * z_pow[0]); + z_pow.push( + *z_pow.last().expect("couldn't get last z_pow despite always being non-empty") * z_pow[0], + ); d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1])); } @@ -229,8 +231,15 @@ impl<'a> AggregateRangeStatement<'a> { Some(AggregateRangeProof { A, wip: WipStatement::new(generators, A_hat, y) - .prove(rng, transcript, &Zeroizing::new(WipWitness::new(a_l, a_r, alpha).unwrap())) - .unwrap(), + .prove( + rng, + transcript, + &Zeroizing::new( + WipWitness::new(a_l, a_r, alpha) + .expect("Bulletproofs::Plus created an invalid WipWitness"), + ), + ) + .expect("Bulletproof::Plus failed to prove the weighted inner-product"), }) } diff --git a/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs b/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs index 4c838840..5b3c25c2 100644 --- a/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs +++ b/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs @@ -230,7 +230,9 @@ impl WipStatement { let c_l = a1.clone().weighted_inner_product(&b2, &y); let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y); - let y_inv_n_hat = y_inv.pop().unwrap(); + let y_inv_n_hat = y_inv + .pop() + .expect("couldn't pop y_inv despite y_inv being of same length as times iterated"); let mut L_terms = (a1.clone() * y_inv_n_hat) .0 @@ -331,7 +333,9 @@ impl WipStatement { let mut res = Vec::with_capacity(y.len()); res.push(inv_y); while res.len() < y.len() { - res.push(inv_y * res.last().unwrap()); + res.push( + inv_y * res.last().expect("couldn't get last inv_y despite inv_y always being non-empty"), + ); } res }; diff --git a/networks/monero/ringct/clsag/src/lib.rs b/networks/monero/ringct/clsag/src/lib.rs index 454c34fd..c427342c 100644 --- a/networks/monero/ringct/clsag/src/lib.rs +++ b/networks/monero/ringct/clsag/src/lib.rs @@ -199,9 +199,10 @@ fn core( // (c_p * I) + (c_c * D) + (s_i * PH) let R = match A_c1 { Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]), - Mode::Verify(..) => { - images_precomp.as_ref().unwrap().vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]) - } + Mode::Verify(..) => images_precomp + .as_ref() + .expect("value populated when verifying wasn't populated") + .vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]), }; to_hash.truncate(((2 * n) + 3) * 32); diff --git a/networks/monero/ringct/clsag/src/multisig.rs b/networks/monero/ringct/clsag/src/multisig.rs index 2cd7ea6e..5fc098ad 100644 --- a/networks/monero/ringct/clsag/src/multisig.rs +++ b/networks/monero/ringct/clsag/src/multisig.rs @@ -223,7 +223,7 @@ impl Algorithm for ClsagMultisig { self .mask_recv .take() - .unwrap() + .expect("image was none multiple times, despite setting to Some on first iteration") .recv() .ok_or(FrostError::InternalError("CLSAG mask was not provided"))?, ); @@ -243,7 +243,8 @@ impl Algorithm for ClsagMultisig { // Accumulate the interpolated share let interpolated_key_image_share = addendum.key_image_share * lagrange::(l, view.included()); - *self.image.as_mut().unwrap() += interpolated_key_image_share; + *self.image.as_mut().expect("image populated on first iteration wasn't Some") += + interpolated_key_image_share; self .key_image_shares @@ -272,14 +273,15 @@ impl Algorithm for ClsagMultisig { // opening of the commitment being re-randomized (and what it's re-randomized to) let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses")); - self.msg_hash = Some(msg_hash.try_into().expect("CLSAG message hash should be 32-bytes")); + let msg_hash = msg_hash.try_into().expect("CLSAG message hash should be 32-bytes"); + self.msg_hash = Some(msg_hash); let sign_core = Clsag::sign_core( &mut rng, &self.image.expect("verifying a share despite never processing any addendums").0, &self.context, self.mask.expect("mask wasn't set"), - self.msg_hash.as_ref().unwrap(), + &msg_hash, nonce_sums[0][0].0, nonce_sums[0][1].0, ); @@ -301,7 +303,7 @@ impl Algorithm for ClsagMultisig { _: &[Vec], sum: dfg::Scalar, ) -> Option { - let interim = self.interim.as_ref().unwrap(); + let interim = self.interim.as_ref().expect("verify called before sign_share"); let mut clsag = interim.clsag.clone(); // We produced shares as `r - p x`, yet the signature is actually `r - p x - c x` // Substract `c x` (saved as `c`) now @@ -311,7 +313,7 @@ impl Algorithm for ClsagMultisig { self.context.decoys.ring(), &self.image.expect("verifying a signature despite never processing any addendums").0, &interim.pseudo_out, - self.msg_hash.as_ref().unwrap(), + self.msg_hash.as_ref().expect("verify called before sign_share"), ) .is_ok() { @@ -326,7 +328,7 @@ impl Algorithm for ClsagMultisig { nonces: &[Vec], share: dfg::Scalar, ) -> Result, ()> { - let interim = self.interim.as_ref().unwrap(); + let interim = self.interim.as_ref().expect("verify_share called before sign_share"); // For a share `r - p x`, the following two equalities should hold: // - `(r - p x)G == R.0 - pV`, where `V = xG` diff --git a/networks/monero/rpc/simple-request/src/lib.rs b/networks/monero/rpc/simple-request/src/lib.rs index bd52cf01..c6a8ecf5 100644 --- a/networks/monero/rpc/simple-request/src/lib.rs +++ b/networks/monero/rpc/simple-request/src/lib.rs @@ -135,35 +135,13 @@ impl SimpleRequestRpc { }; async fn body_from_response(response: Response<'_>) -> Result, RpcError> { - /* - let length = usize::try_from( - response - .headers() - .get("content-length") - .ok_or(RpcError::InvalidNode("no content-length header"))? - .to_str() - .map_err(|_| RpcError::InvalidNode("non-ascii content-length value"))? - .parse::() - .map_err(|_| RpcError::InvalidNode("non-u32 content-length value"))?, - ) - .unwrap(); - // Only pre-allocate 1 MB so a malicious node which claims a content-length of 1 GB actually - // has to send 1 GB of data to cause a 1 GB allocation - let mut res = Vec::with_capacity(length.max(1024 * 1024)); - let mut body = response.into_body(); - while res.len() < length { - let Some(data) = body.data().await else { break }; - res.extend(data.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?.as_ref()); - } - */ - let mut res = Vec::with_capacity(128); response .body() .await .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))? .read_to_end(&mut res) - .unwrap(); + .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?; Ok(res) } @@ -219,7 +197,12 @@ impl SimpleRequestRpc { })? .to_header_string(), ) - .unwrap(), + .map_err(|_| { + RpcError::InternalError( + "digest-auth challenge response wasn't a valid string for an HTTP header" + .to_string(), + ) + })?, ); } @@ -269,7 +252,7 @@ impl SimpleRequestRpc { ))? } } else { - body_from_response(response.unwrap()).await? + body_from_response(response.expect("no response yet also no error?")).await? } } }); diff --git a/networks/monero/rpc/src/lib.rs b/networks/monero/rpc/src/lib.rs index 3c8d337a..72533465 100644 --- a/networks/monero/rpc/src/lib.rs +++ b/networks/monero/rpc/src/lib.rs @@ -121,7 +121,7 @@ impl FeeRate { /// defined serialization. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(16); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -139,15 +139,22 @@ impl FeeRate { /// /// This function may panic upon overflow. pub fn calculate_fee_from_weight(&self, weight: usize) -> u64 { - let fee = self.per_weight * u64::try_from(weight).unwrap(); + let fee = + self.per_weight * u64::try_from(weight).expect("couldn't convert weight (usize) to u64"); let fee = fee.div_ceil(self.mask) * self.mask; - debug_assert_eq!(weight, self.calculate_weight_from_fee(fee), "Miscalculated weight from fee"); + debug_assert_eq!( + Some(weight), + self.calculate_weight_from_fee(fee), + "Miscalculated weight from fee" + ); fee } /// Calculate the weight from the fee. - pub fn calculate_weight_from_fee(&self, fee: u64) -> usize { - usize::try_from(fee / self.per_weight).unwrap() + /// + /// Returns `None` if the weight would not fit within a `usize`. + pub fn calculate_weight_from_fee(&self, fee: u64) -> Option { + usize::try_from(fee / self.per_weight).ok() } } @@ -272,8 +279,14 @@ pub trait Rpc: Sync + Clone { let res = self .post( route, - if let Some(params) = params { - serde_json::to_string(¶ms).unwrap().into_bytes() + if let Some(params) = params.as_ref() { + serde_json::to_string(params) + .map_err(|e| { + RpcError::InternalError(format!( + "couldn't convert parameters ({params:?}) to JSON: {e:?}" + )) + })? + .into_bytes() } else { vec![] }, @@ -295,7 +308,10 @@ pub trait Rpc: Sync + Clone { async move { let mut req = json!({ "method": method }); if let Some(params) = params { - req.as_object_mut().unwrap().insert("params".into(), params); + req + .as_object_mut() + .expect("accessing object as object failed?") + .insert("params".into(), params); } Ok(self.rpc_call::<_, JsonRpcResponse>("json_rpc", Some(req)).await?.result) } diff --git a/networks/monero/src/block.rs b/networks/monero/src/block.rs index 15a8d1fc..5ab85cc7 100644 --- a/networks/monero/src/block.rs +++ b/networks/monero/src/block.rs @@ -51,7 +51,7 @@ impl BlockHeader { /// Serialize the BlockHeader to a `Vec`. pub fn serialize(&self) -> Vec { let mut serialized = vec![]; - self.write(&mut serialized).unwrap(); + self.write(&mut serialized).expect("write failed but doesn't fail"); serialized } @@ -111,7 +111,7 @@ impl Block { /// Serialize the Block to a `Vec`. pub fn serialize(&self) -> Vec { let mut serialized = vec![]; - self.write(&mut serialized).unwrap(); + self.write(&mut serialized).expect("write failed but doesn't fail"); serialized } @@ -122,7 +122,13 @@ impl Block { pub fn serialize_pow_hash(&self) -> Vec { let mut blob = self.header.serialize(); blob.extend_from_slice(&merkle_root(self.miner_transaction.hash(), &self.transactions)); - write_varint(&(1 + u64::try_from(self.transactions.len()).unwrap()), &mut blob).unwrap(); + write_varint( + &(1 + + u64::try_from(self.transactions.len()) + .expect("amount of transactions in block exceeded u64::MAX")), + &mut blob, + ) + .expect("write failed but doesn't fail"); blob } @@ -132,7 +138,11 @@ impl Block { // Monero pre-appends a VarInt of the block-to-hash'ss length before getting the block hash, // but doesn't do this when getting the proof of work hash :) let mut hashing_blob = Vec::with_capacity(9 + hashable.len()); - write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap(); + write_varint( + &u64::try_from(hashable.len()).expect("length of block hash's preimage exceeded u64::MAX"), + &mut hashing_blob, + ) + .expect("write failed but doesn't fail"); hashing_blob.append(&mut hashable); let hash = keccak256(hashing_blob); diff --git a/networks/monero/src/merkle.rs b/networks/monero/src/merkle.rs index 6c689618..2be31df2 100644 --- a/networks/monero/src/merkle.rs +++ b/networks/monero/src/merkle.rs @@ -28,7 +28,7 @@ pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] { let mut paired_hashes = Vec::with_capacity(overage); while let Some(left) = rightmost.next() { - let right = rightmost.next().unwrap(); + let right = rightmost.next().expect("rightmost is of even length"); paired_hashes.push(keccak256([left.as_ref(), &right].concat())); } drop(rightmost); diff --git a/networks/monero/src/ringct.rs b/networks/monero/src/ringct.rs index 4cc42570..198a73fd 100644 --- a/networks/monero/src/ringct.rs +++ b/networks/monero/src/ringct.rs @@ -326,7 +326,9 @@ impl RctPrunable { /// Serialize the RctPrunable to a `Vec`. pub fn serialize(&self, rct_type: RctType) -> Vec { let mut serialized = vec![]; - self.write(&mut serialized, rct_type).unwrap(); + self + .write(&mut serialized, rct_type) + .expect("write failed but doesn't fail"); serialized } @@ -441,7 +443,7 @@ impl RctProofs { /// Serialize the RctProofs to a `Vec`. pub fn serialize(&self) -> Vec { let mut serialized = vec![]; - self.write(&mut serialized).unwrap(); + self.write(&mut serialized).expect("write failed but doesn't fail"); serialized } diff --git a/networks/monero/src/transaction.rs b/networks/monero/src/transaction.rs index 8d998ec6..5f8db9a7 100644 --- a/networks/monero/src/transaction.rs +++ b/networks/monero/src/transaction.rs @@ -53,7 +53,7 @@ impl Input { /// Serialize the Input to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = vec![]; - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -106,7 +106,7 @@ impl Output { /// Write the Output to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(8 + 1 + 32); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -163,7 +163,7 @@ impl Timelock { /// Serialize the Timelock to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(1); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -259,8 +259,8 @@ impl TransactionPrefix { fn hash(&self, version: u64) -> [u8; 32] { let mut buf = vec![]; - write_varint(&version, &mut buf).unwrap(); - self.write(&mut buf).unwrap(); + write_varint(&version, &mut buf).expect("write failed but doesn't fail"); + self.write(&mut buf).expect("write failed but doesn't fail"); keccak256(buf) } } @@ -451,7 +451,7 @@ impl Transaction

{ /// Write the Transaction to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(2048); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -493,15 +493,16 @@ impl Transaction

{ let mut buf = Vec::with_capacity(512); // We don't use `self.write` as that may write the signatures (if this isn't pruned) - write_varint(&self.version(), &mut buf).unwrap(); - prefix.write(&mut buf).unwrap(); + write_varint(&self.version(), &mut buf) + .expect("write failed but doesn't fail"); + prefix.write(&mut buf).expect("write failed but doesn't fail"); // We explicitly write the signatures ourselves here let PrunableHash::V1(signatures) = prunable else { panic!("hashing v1 TX with non-v1 prunable data") }; for signature in signatures { - signature.write(&mut buf).unwrap(); + signature.write(&mut buf).expect("write failed but doesn't fail"); } keccak256(buf) @@ -513,7 +514,10 @@ impl Transaction

{ if let Some(proofs) = proofs { let mut buf = Vec::with_capacity(512); - proofs.base().write(&mut buf, proofs.rct_type()).unwrap(); + proofs + .base() + .write(&mut buf, proofs.rct_type()) + .expect("write failed but doesn't fail"); hashes.extend(keccak256(&buf)); } else { // Serialization of RctBase::Null @@ -540,7 +544,10 @@ impl Transaction { Transaction::V2 { proofs, .. } => { self.hash_with_prunable_hash(PrunableHash::V2(if let Some(proofs) = proofs { let mut buf = Vec::with_capacity(1024); - proofs.prunable.write(&mut buf, proofs.rct_type()).unwrap(); + proofs + .prunable + .write(&mut buf, proofs.rct_type()) + .expect("write failed but doesn't fail"); keccak256(buf) } else { [0; 32] @@ -563,7 +570,10 @@ impl Transaction { Transaction::V2 { proofs, .. } => self.hash_with_prunable_hash({ let Some(proofs) = proofs else { None? }; let mut buf = Vec::with_capacity(1024); - proofs.prunable.signature_write(&mut buf).unwrap(); + proofs + .prunable + .signature_write(&mut buf) + .expect("write failed but doesn't fail"); PrunableHash::V2(keccak256(buf)) }), }) diff --git a/networks/monero/wallet/address/src/base58check.rs b/networks/monero/wallet/address/src/base58check.rs index 003f21f1..45264bdf 100644 --- a/networks/monero/wallet/address/src/base58check.rs +++ b/networks/monero/wallet/address/src/base58check.rs @@ -76,8 +76,10 @@ pub(crate) fn decode(data: &str) -> Option> { break; } } + let used_bytes = used_bytes + .expect("chunk of bounded length exhaustively searched but couldn't find matching length"); // Only push on the used bytes - res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes.unwrap()) ..]); + res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes) ..]); } Some(res) diff --git a/networks/monero/wallet/address/src/lib.rs b/networks/monero/wallet/address/src/lib.rs index 194d4469..cc463630 100644 --- a/networks/monero/wallet/address/src/lib.rs +++ b/networks/monero/wallet/address/src/lib.rs @@ -357,21 +357,21 @@ pub struct Address { impl fmt::Debug for Address { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - let hex = |bytes: &[u8]| -> String { + let hex = |bytes: &[u8]| -> Result { let mut res = String::with_capacity(2 + (2 * bytes.len())); res.push_str("0x"); for b in bytes { - write!(&mut res, "{b:02x}").unwrap(); + write!(&mut res, "{b:02x}")?; } - res + Ok(res) }; fmt .debug_struct("Address") .field("network", &self.network) .field("kind", &self.kind) - .field("spend", &hex(&self.spend.compress().to_bytes())) - .field("view", &hex(&self.view.compress().to_bytes())) + .field("spend", &hex(&self.spend.compress().to_bytes())?) + .field("view", &hex(&self.view.compress().to_bytes())?) // This is not a real field yet is the most valuable thing to know when debugging .field("(address)", &self.to_string()) .finish() @@ -389,7 +389,8 @@ impl fmt::Display for Address { if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind { let features_uint = (u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress); - write_varint(&features_uint, &mut data).unwrap(); + write_varint(&features_uint, &mut data) + .expect("write failed but doesn't fail"); } if let Some(id) = self.kind.payment_id() { data.extend(id); diff --git a/networks/monero/wallet/src/decoys.rs b/networks/monero/wallet/src/decoys.rs index 4a4faae5..fc776948 100644 --- a/networks/monero/wallet/src/decoys.rs +++ b/networks/monero/wallet/src/decoys.rs @@ -17,7 +17,7 @@ use crate::{ WalletOutput, }; -const RECENT_WINDOW: usize = 15; +const RECENT_WINDOW: u64 = 15; const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME; #[allow(clippy::cast_precision_loss)] const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64; @@ -27,7 +27,7 @@ async fn select_n( rpc: &impl DecoyRpc, height: usize, real_output: u64, - ring_len: usize, + ring_len: u8, fingerprintable_deterministic: bool, ) -> Result, RpcError> { if height < DEFAULT_LOCK_WINDOW { @@ -48,8 +48,9 @@ async fn select_n( // This assumes that each miner TX had one output (as sane) and checks we have sufficient // outputs even when excluding them (due to their own timelock requirements) // Considering this a temporal error for very new chains, it's sufficiently sane to have - if highest_output_exclusive_bound.saturating_sub(u64::try_from(COINBASE_LOCK_WINDOW).unwrap()) < - u64::try_from(ring_len).unwrap() + if highest_output_exclusive_bound.saturating_sub( + u64::try_from(COINBASE_LOCK_WINDOW).expect("coinbase lock window exceeds 2^{64}"), + ) < u64::from(ring_len) { Err(RpcError::InternalError("not enough decoy candidates".to_string()))?; } @@ -67,7 +68,7 @@ async fn select_n( let mut do_not_select = HashSet::new(); do_not_select.insert(real_output); - let decoy_count = ring_len - 1; + let decoy_count = usize::from(ring_len - 1); let mut res = Vec::with_capacity(decoy_count); let mut iters = 0; @@ -87,8 +88,9 @@ async fn select_n( // We check both that we aren't at the maximum amount of iterations and that the not-yet // selected candidates exceed the amount of candidates necessary to trigger the next iteration if (iters == MAX_ITERS) || - ((highest_output_exclusive_bound - u64::try_from(do_not_select.len()).unwrap()) < - u64::try_from(ring_len).unwrap()) + ((highest_output_exclusive_bound - + u64::try_from(do_not_select.len()).expect("amount of ignored decoys exceeds 2^{64}")) < + u64::from(ring_len)) { Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?; } @@ -99,13 +101,18 @@ async fn select_n( // Use a gamma distribution, as Monero does // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45 // /src/wallet/wallet2.cpp#L142-L143 - let mut age = Gamma::::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp(); + let mut age = Gamma::::new(19.28, 1.0 / 1.61) + .expect("constant Gamma distribution could no longer be created") + .sample(rng) + .exp(); #[allow(clippy::cast_precision_loss)] if age > TIP_APPLICATION { age -= TIP_APPLICATION; } else { // f64 does not have try_from available, which is why these are written with `as` - age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64; + age = (rng.next_u64() % + (RECENT_WINDOW * u64::try_from(BLOCK_TIME).expect("BLOCK_TIME exceeded u64::MAX"))) + as f64; } #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)] @@ -135,7 +142,11 @@ async fn select_n( candidates.push(real_output); // Sort candidates so the real spends aren't the ones at the end candidates.sort(); - Some(candidates.binary_search(&real_output).unwrap()) + Some( + candidates + .binary_search(&real_output) + .expect("selected a ring which didn't include the real spend"), + ) } else { None }; @@ -169,11 +180,15 @@ async fn select_n( async fn select_decoys( rng: &mut R, rpc: &impl DecoyRpc, - ring_len: usize, + ring_len: u8, height: usize, input: &WalletOutput, fingerprintable_deterministic: bool, ) -> Result { + if ring_len == 0 { + Err(RpcError::InternalError("requesting a ring of length 0".to_string()))?; + } + // Select all decoys for this transaction, assuming we generate a sane transaction // We should almost never naturally generate an insane transaction, hence why this doesn't // bother with an overage @@ -215,10 +230,13 @@ async fn select_decoys( Decoys::new( offsets, // Binary searches for the real spend since we don't know where it sorted to - u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)).unwrap(), + // TODO: Define our own collection whose `len` function returns `u8` to ensure this bound + // with types + u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)) + .expect("ring of size <= u8::MAX had an index exceeding u8::MAX"), ring.into_iter().map(|output| output.1).collect(), ) - .unwrap(), + .expect("selected a syntactically-invalid set of Decoys"), ) } @@ -234,7 +252,7 @@ impl OutputWithDecoys { pub async fn new( rng: &mut (impl Send + Sync + RngCore + CryptoRng), rpc: &impl DecoyRpc, - ring_len: usize, + ring_len: u8, height: usize, output: WalletOutput, ) -> Result { @@ -253,7 +271,7 @@ impl OutputWithDecoys { pub async fn fingerprintable_deterministic_new( rng: &mut (impl Send + Sync + RngCore + CryptoRng), rpc: &impl DecoyRpc, - ring_len: usize, + ring_len: u8, height: usize, output: WalletOutput, ) -> Result { @@ -297,7 +315,7 @@ impl OutputWithDecoys { /// defined serialization. pub fn serialize(&self) -> Vec { let mut serialized = Vec::with_capacity(128); - self.write(&mut serialized).unwrap(); + self.write(&mut serialized).expect("write failed but doesn't fail"); serialized } diff --git a/networks/monero/wallet/src/extra.rs b/networks/monero/wallet/src/extra.rs index 2c0706fd..5041a3cf 100644 --- a/networks/monero/wallet/src/extra.rs +++ b/networks/monero/wallet/src/extra.rs @@ -67,7 +67,7 @@ impl PaymentId { /// Serialize the PaymentId to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(1 + 8); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -100,7 +100,7 @@ pub enum ExtraField { /// /// This is used within miner transactions who are merge-mining Monero to specify the foreign /// block they mined. - MergeMining(usize, [u8; 32]), + MergeMining(u64, [u8; 32]), /// The additional transaction keys. /// /// These are the per-output commitments to the randomness used for deriving outputs. @@ -132,7 +132,7 @@ impl ExtraField { } ExtraField::MergeMining(height, merkle) => { w.write_all(&[3])?; - write_varint(&u64::try_from(*height).unwrap(), w)?; + write_varint(height, w)?; w.write_all(merkle)?; } ExtraField::PublicKeys(keys) => { @@ -150,7 +150,7 @@ impl ExtraField { /// Serialize the ExtraField to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(1 + 8); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } @@ -280,7 +280,7 @@ impl Extra { /// Serialize the Extra to a `Vec`. pub fn serialize(&self) -> Vec { let mut buf = vec![]; - self.write(&mut buf).unwrap(); + self.write(&mut buf).expect("write failed but doesn't fail"); buf } diff --git a/networks/monero/wallet/src/lib.rs b/networks/monero/wallet/src/lib.rs index 035c4036..703ba69c 100644 --- a/networks/monero/wallet/src/lib.rs +++ b/networks/monero/wallet/src/lib.rs @@ -61,7 +61,7 @@ impl SharedKeyDerivations { // If Gen, this should be the only input, making this loop somewhat pointless // This works and even if there were somehow multiple inputs, it'd be a false negative Input::Gen(height) => { - write_varint(height, &mut u).unwrap(); + write_varint(height, &mut u).expect("write failed but doesn't fail"); } Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()), } @@ -83,7 +83,8 @@ impl SharedKeyDerivations { // || o { let output_derivation: &mut Vec = output_derivation.as_mut(); - write_varint(&o, output_derivation).unwrap(); + write_varint(&o, output_derivation) + .expect("write failed but doesn't fail"); } let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0]; @@ -145,7 +146,11 @@ impl SharedKeyDerivations { let amount_scalar = Scalar::from_bytes_mod_order(*amount) - amount_shared_sec_scalar; // d2b from rctTypes.cpp - let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap()); + let amount = u64::from_le_bytes( + amount_scalar.to_bytes()[.. 8] + .try_into() + .expect("32-byte array couldn't have an 8-byte slice taken"), + ); Commitment::new(mask, amount) } diff --git a/networks/monero/wallet/src/output.rs b/networks/monero/wallet/src/output.rs index 82924b15..933d7ae7 100644 --- a/networks/monero/wallet/src/output.rs +++ b/networks/monero/wallet/src/output.rs @@ -18,7 +18,7 @@ use crate::{ #[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] pub(crate) struct AbsoluteId { pub(crate) transaction: [u8; 32], - pub(crate) index_in_transaction: u32, + pub(crate) index_in_transaction: u64, } impl core::fmt::Debug for AbsoluteId { @@ -46,7 +46,7 @@ impl AbsoluteId { /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol /// defined serialization. fn read(r: &mut R) -> io::Result { - Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u32(r)? }) + Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u64(r)? }) } } @@ -128,11 +128,11 @@ impl OutputData { self.commitment.write(w) } - /* + /* Commented as it's unused, due to self being private /// Serialize the OutputData to a `Vec`. pub fn serialize(&self) -> Vec { let mut res = Vec::with_capacity(32 + 32 + 40); - self.write(&mut res).unwrap(); + self.write(&mut res).expect("write failed but doesn't fail"); res } */ @@ -194,9 +194,17 @@ impl Metadata { w.write_all(&[0])?; } - w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?; + w.write_all( + &u64::try_from(self.arbitrary_data.len()) + .expect("amount of arbitrary data chunks exceeded u64::MAX") + .to_le_bytes(), + )?; for part in &self.arbitrary_data { - w.write_all(&[u8::try_from(part.len()).unwrap()])?; + // TODO: Define our own collection whose `len` function returns `u8` to ensure this bound + // with types + w.write_all(&[ + u8::try_from(part.len()).expect("piece of arbitrary data exceeded max length of u8::MAX") + ])?; w.write_all(part)?; } Ok(()) @@ -224,7 +232,7 @@ impl Metadata { payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None }, arbitrary_data: { let mut data = vec![]; - for _ in 0 .. read_u32(r)? { + for _ in 0 .. read_u64(r)? { let len = read_byte(r)?; data.push(read_raw_vec(read_byte, usize::from(len), r)?); } @@ -260,7 +268,7 @@ impl WalletOutput { } /// The index of the output within the transaction. - pub fn index_in_transaction(&self) -> u32 { + pub fn index_in_transaction(&self) -> u64 { self.absolute_id.index_in_transaction } @@ -349,7 +357,7 @@ impl WalletOutput { /// defined serialization. pub fn serialize(&self) -> Vec { let mut serialized = Vec::with_capacity(128); - self.write(&mut serialized).unwrap(); + self.write(&mut serialized).expect("write failed but doesn't fail"); serialized } diff --git a/networks/monero/wallet/src/scan.rs b/networks/monero/wallet/src/scan.rs index 19f4d50f..342f000d 100644 --- a/networks/monero/wallet/src/scan.rs +++ b/networks/monero/wallet/src/scan.rs @@ -228,14 +228,11 @@ impl InternalScanner { // Decrypt the payment ID let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh)); + let o = u64::try_from(o).expect("couldn't convert output index (usize) to u64"); + res.push(WalletOutput { - absolute_id: AbsoluteId { - transaction: tx_hash, - index_in_transaction: o.try_into().unwrap(), - }, - relative_id: RelativeId { - index_on_blockchain: output_index_for_first_ringct_output + u64::try_from(o).unwrap(), - }, + absolute_id: AbsoluteId { transaction: tx_hash, index_in_transaction: o }, + relative_id: RelativeId { index_on_blockchain: output_index_for_first_ringct_output + o }, data: OutputData { key: output_key, key_offset, commitment }, metadata: Metadata { additional_timelock: tx.prefix().additional_timelock, @@ -295,7 +292,8 @@ impl InternalScanner { // Update the RingCT starting index for the next TX if matches!(tx, Transaction::V2 { .. }) { - output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len()).unwrap() + output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len()) + .expect("couldn't convert amount of outputs (usize) to u64") } } diff --git a/networks/monero/wallet/src/send/mod.rs b/networks/monero/wallet/src/send/mod.rs index d3adb166..8b4e3e34 100644 --- a/networks/monero/wallet/src/send/mod.rs +++ b/networks/monero/wallet/src/send/mod.rs @@ -446,7 +446,7 @@ impl SignableTransaction { /// defined serialization. pub fn serialize(&self) -> Vec { let mut buf = Vec::with_capacity(256); - self.write(&mut buf).unwrap(); + self.write(&mut buf).expect("write failed but doesn't fail"); buf } @@ -553,9 +553,13 @@ impl SignableTransaction { let mut tx = tx.transaction_without_signatures(); // Sign the CLSAGs - let clsags_and_pseudo_outs = - Clsag::sign(rng, clsag_signs, mask_sum, tx.signature_hash().unwrap()) - .map_err(SendError::ClsagError)?; + let clsags_and_pseudo_outs = Clsag::sign( + rng, + clsag_signs, + mask_sum, + tx.signature_hash().expect("signing a transaction which isn't signed?"), + ) + .map_err(SendError::ClsagError)?; // Fill in the CLSAGs/pseudo-outs let inputs_len = tx.prefix().inputs.len(); diff --git a/networks/monero/wallet/src/send/multisig.rs b/networks/monero/wallet/src/send/multisig.rs index b3d58ba5..f78f0fcd 100644 --- a/networks/monero/wallet/src/send/multisig.rs +++ b/networks/monero/wallet/src/send/multisig.rs @@ -251,7 +251,7 @@ impl SignMachine for TransactionSignMachine { } let tx = tx.transaction_without_signatures(); - let msg = tx.signature_hash().unwrap(); + let msg = tx.signature_hash().expect("signing a transaction which isn't signed?"); // Iterate over each CLSAG calling sign let mut shares = Vec::with_capacity(to_sign.len()); diff --git a/networks/monero/wallet/src/send/tx.rs b/networks/monero/wallet/src/send/tx.rs index 0ebd47f1..ae6490d7 100644 --- a/networks/monero/wallet/src/send/tx.rs +++ b/networks/monero/wallet/src/send/tx.rs @@ -73,7 +73,9 @@ impl SignableTransaction { { let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes(); let mut id_vec = Vec::with_capacity(1 + 8); - PaymentId::Encrypted(id).write(&mut id_vec).unwrap(); + PaymentId::Encrypted(id) + .write(&mut id_vec) + .expect("write failed but doesn't fail"); extra.push_nonce(id_vec); } else { /* @@ -96,7 +98,9 @@ impl SignableTransaction { .expect("multiple change outputs?"); let mut id_vec = Vec::with_capacity(1 + 8); // The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask - PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap(); + PaymentId::Encrypted(*payment_id_xor) + .write(&mut id_vec) + .expect("write failed but doesn't fail"); extra.push_nonce(id_vec); } } @@ -109,7 +113,7 @@ impl SignableTransaction { } let mut serialized = Vec::with_capacity(32 * amount_of_keys); - extra.write(&mut serialized).unwrap(); + extra.write(&mut serialized).expect("write failed but doesn't fail"); serialized } @@ -180,7 +184,8 @@ impl SignableTransaction { push_scalar(&mut bp); } for _ in 0 .. 2 { - write_varint(&lr_len, &mut bp).unwrap(); + write_varint(&lr_len, &mut bp) + .expect("write failed but doesn't fail"); for _ in 0 .. lr_len { push_point(&mut bp); } @@ -204,7 +209,8 @@ impl SignableTransaction { push_scalar(&mut bp); } for _ in 0 .. 2 { - write_varint(&lr_len, &mut bp).unwrap(); + write_varint(&lr_len, &mut bp) + .expect("write failed but doesn't fail"); for _ in 0 .. lr_len { push_point(&mut bp); } @@ -261,7 +267,8 @@ impl SignableTransaction { break; } } - weight_and_fee.unwrap() + weight_and_fee + .expect("length of highest possible fee was greater than highest possible fee length") } } diff --git a/networks/monero/wallet/src/send/tx_keys.rs b/networks/monero/wallet/src/send/tx_keys.rs index 52db422a..8ede4ff3 100644 --- a/networks/monero/wallet/src/send/tx_keys.rs +++ b/networks/monero/wallet/src/send/tx_keys.rs @@ -21,7 +21,9 @@ fn seeded_rng( mut input_keys: Vec, ) -> ChaCha20Rng { // Apply the DST - let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]); + let mut transcript = Zeroizing::new(vec![ + u8::try_from(dst.len()).expect("internal RNG with constant DST had a too-long DST specified") + ]); transcript.extend(dst); // Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript @@ -116,12 +118,12 @@ impl SignableTransaction { fn transaction_keys(&self) -> (Zeroizing, Vec>) { let mut tx_keys = TransactionKeys::new(&self.outgoing_view_key, self.input_keys()); - let tx_key = tx_keys.next().unwrap(); + let tx_key = tx_keys.next().expect("TransactionKeys (never-ending) was exhausted"); let mut additional_keys = vec![]; if self.should_use_additional_keys() { for _ in 0 .. self.payments.len() { - additional_keys.push(tx_keys.next().unwrap()); + additional_keys.push(tx_keys.next().expect("TransactionKeys (never-ending) was exhausted")); } } (tx_key, additional_keys) diff --git a/networks/monero/wallet/tests/runner/mod.rs b/networks/monero/wallet/tests/runner/mod.rs index b83f939a..361e2f8c 100644 --- a/networks/monero/wallet/tests/runner/mod.rs +++ b/networks/monero/wallet/tests/runner/mod.rs @@ -21,7 +21,7 @@ use monero_wallet::{ mod builder; pub use builder::SignableTransactionBuilder; -pub fn ring_len(rct_type: RctType) -> usize { +pub fn ring_len(rct_type: RctType) -> u8 { match rct_type { RctType::ClsagBulletproof => 11, RctType::ClsagBulletproofPlus => 16, @@ -118,7 +118,7 @@ pub fn check_weight_and_fee(tx: &Transaction, fee_rate: FeeRate) { let fee = proofs.base.fee; let weight = tx.weight(); - let expected_weight = fee_rate.calculate_weight_from_fee(fee); + let expected_weight = fee_rate.calculate_weight_from_fee(fee).unwrap(); assert_eq!(weight, expected_weight); let expected_fee = fee_rate.calculate_fee_from_weight(weight); From cbab9486c61cbd0d0d0b35f6f1974f204fd418d6 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 21:35:55 -0400 Subject: [PATCH 010/116] Clarify messages in non-debug assertions --- networks/monero/primitives/src/lib.rs | 6 +++++- networks/monero/ringct/bulletproofs/src/plus/mod.rs | 5 ++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/networks/monero/primitives/src/lib.rs b/networks/monero/primitives/src/lib.rs index 783275fb..0b6ed790 100644 --- a/networks/monero/primitives/src/lib.rs +++ b/networks/monero/primitives/src/lib.rs @@ -73,7 +73,11 @@ pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar { // This library acknowledges its practical impossibility of it occurring, and doesn't bother to // code in logic to handle it. That said, if it ever occurs, something must happen in order to // not generate/verify a proof we believe to be valid when it isn't - assert!(scalar != Scalar::ZERO, "ZERO HASH: {:?}", data.as_ref()); + assert!( + scalar != Scalar::ZERO, + "keccak256(preimage) \\cong 0 \\mod l! Preimage: {:?}", + data.as_ref() + ); scalar } diff --git a/networks/monero/ringct/bulletproofs/src/plus/mod.rs b/networks/monero/ringct/bulletproofs/src/plus/mod.rs index 6b7eb820..465b878a 100644 --- a/networks/monero/ringct/bulletproofs/src/plus/mod.rs +++ b/networks/monero/ringct/bulletproofs/src/plus/mod.rs @@ -65,7 +65,10 @@ impl BpPlusGenerators { pub(crate) fn reduce(&self, generators: usize) -> Self { // Round to the nearest power of 2 let generators = padded_pow_of_2(generators); - assert!(generators <= self.g_bold.len()); + assert!( + generators <= self.g_bold.len(), + "instantiated with less generators than application required" + ); BpPlusGenerators { g_bold: &self.g_bold[.. generators], h_bold: &self.h_bold[.. generators] } } From 188fcc3cb434293c184551a99c930eb1aec9aa08 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 22:30:34 -0400 Subject: [PATCH 011/116] Remove potentially-failing unchecked arithmetic operations for ones which error In response to 9.13.3. Requires a bump to Rust 1.82 to take advantage of `Option::is_none_or`. --- .github/nightly-version | 2 +- networks/monero/primitives/src/lib.rs | 9 ++++++- .../ringct/bulletproofs/src/batch_verifier.rs | 5 ++++ .../monero/ringct/bulletproofs/src/lib.rs | 5 +++- networks/monero/rpc/src/lib.rs | 26 ++++++++++++++++--- networks/monero/src/ringct.rs | 8 +++++- networks/monero/wallet/Cargo.toml | 2 +- .../monero/wallet/address/src/base58check.rs | 5 ++-- networks/monero/wallet/src/decoys.rs | 2 +- networks/monero/wallet/src/scan.rs | 8 +++++- networks/monero/wallet/src/send/mod.rs | 7 ++--- rust-toolchain.toml | 2 +- substrate/client/Cargo.toml | 2 +- 13 files changed, 65 insertions(+), 18 deletions(-) diff --git a/.github/nightly-version b/.github/nightly-version index 9f98e758..e67d5713 100644 --- a/.github/nightly-version +++ b/.github/nightly-version @@ -1 +1 @@ -nightly-2024-07-01 +nightly-2024-09-01 diff --git a/networks/monero/primitives/src/lib.rs b/networks/monero/primitives/src/lib.rs index 0b6ed790..47112d1d 100644 --- a/networks/monero/primitives/src/lib.rs +++ b/networks/monero/primitives/src/lib.rs @@ -166,7 +166,14 @@ impl Decoys { /// `offsets` are the positions of each ring member within the Monero blockchain, offset from the /// prior member's position (with the initial ring member offset from 0). pub fn new(offsets: Vec, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option { - if (offsets.len() != ring.len()) || (usize::from(signer_index) >= ring.len()) { + if (offsets.len() > usize::from(u8::MAX)) || + (offsets.len() != ring.len()) || + (usize::from(signer_index) >= ring.len()) + { + None?; + } + // Check these offsets form representable positions + if offsets.iter().copied().try_fold(0, u64::checked_add).is_none() { None?; } Some(Decoys { offsets, signer_index, ring }) diff --git a/networks/monero/ringct/bulletproofs/src/batch_verifier.rs b/networks/monero/ringct/bulletproofs/src/batch_verifier.rs index 3898801c..103e6bf7 100644 --- a/networks/monero/ringct/bulletproofs/src/batch_verifier.rs +++ b/networks/monero/ringct/bulletproofs/src/batch_verifier.rs @@ -23,6 +23,11 @@ pub(crate) struct InternalBatchVerifier { impl InternalBatchVerifier { #[must_use] fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool { + /* + Technically, this following line can overflow, and joining these `Vec`s _may_ panic if + they're individually acceptable lengths yet their sum isn't. This is so negligible, due to + the amount of memory required, it's dismissed. + */ let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len(); let mut scalars = Vec::with_capacity(capacity); let mut points = Vec::with_capacity(capacity); diff --git a/networks/monero/ringct/bulletproofs/src/lib.rs b/networks/monero/ringct/bulletproofs/src/lib.rs index 13a52b85..29aa7093 100644 --- a/networks/monero/ringct/bulletproofs/src/lib.rs +++ b/networks/monero/ringct/bulletproofs/src/lib.rs @@ -86,13 +86,16 @@ impl Bulletproof { /// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone /// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of /// the weight lost by using a proof smaller than it is fast to compensate for this. + /// + /// If the amount of outputs specified exceeds the maximum amount of outputs, the result for the + /// maximum amount of outputs will be returned. // https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/ // src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124 pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) { #[allow(non_snake_case)] let mut LR_len = 0; let mut n_padded_outputs = 1; - while n_padded_outputs < n_outputs { + while n_padded_outputs < n_outputs.min(MAX_COMMITMENTS) { LR_len += 1; n_padded_outputs = 1 << LR_len; } diff --git a/networks/monero/rpc/src/lib.rs b/networks/monero/rpc/src/lib.rs index 72533465..995ccb1a 100644 --- a/networks/monero/rpc/src/lib.rs +++ b/networks/monero/rpc/src/lib.rs @@ -1146,7 +1146,13 @@ impl DecoyRpc for R { )))?; } - let expected_len = if zero_zero_case { 2 } else { (to - start_height) + 1 }; + let expected_len = if zero_zero_case { + 2 + } else { + (to - start_height).checked_add(1).ok_or_else(|| { + RpcError::InternalError("expected length of distribution exceeded usize".to_string()) + })? + }; // Yet this is actually a height if expected_len != distribution.len() { Err(RpcError::InvalidNode(format!( @@ -1161,6 +1167,20 @@ impl DecoyRpc for R { if zero_zero_case { distribution.pop(); } + + // Check the distribution monotonically increases + { + let mut monotonic = 0; + for d in &distribution { + if *d < monotonic { + Err(RpcError::InvalidNode( + "received output distribution didn't increase monotonically".to_string(), + ))?; + } + monotonic = *d; + } + } + Ok(distribution) } } @@ -1271,8 +1291,8 @@ impl DecoyRpc for R { // https://github.com/monero-project/monero/blob // /cc73fe71162d564ffda8e549b79a350bca53c454/src/cryptonote_core // /blockchain.cpp#L3836 - ((out.height + DEFAULT_LOCK_WINDOW) <= height) && - (Timelock::Block(height - 1 + ACCEPTED_TIMELOCK_DELTA) >= + out.height.checked_add(DEFAULT_LOCK_WINDOW).is_some_and(|locked| locked <= height) && + (Timelock::Block(height.wrapping_add(ACCEPTED_TIMELOCK_DELTA - 1)) >= txs[i].prefix().additional_timelock) } else { out.unlocked diff --git a/networks/monero/src/ringct.rs b/networks/monero/src/ringct.rs index 198a73fd..220f289d 100644 --- a/networks/monero/src/ringct.rs +++ b/networks/monero/src/ringct.rs @@ -343,7 +343,13 @@ impl RctPrunable { Ok(match rct_type { RctType::AggregateMlsagBorromean => RctPrunable::AggregateMlsagBorromean { borromean: read_raw_vec(BorromeanRange::read, outputs, r)?, - mlsag: Mlsag::read(ring_length, inputs + 1, r)?, + mlsag: Mlsag::read( + ring_length, + inputs.checked_add(1).ok_or_else(|| { + io::Error::other("reading a MLSAG for more inputs than representable") + })?, + r, + )?, }, RctType::MlsagBorromean => RctPrunable::MlsagBorromean { borromean: read_raw_vec(BorromeanRange::read, outputs, r)?, diff --git a/networks/monero/wallet/Cargo.toml b/networks/monero/wallet/Cargo.toml index af787e49..c0c34606 100644 --- a/networks/monero/wallet/Cargo.toml +++ b/networks/monero/wallet/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet" authors = ["Luke Parker "] edition = "2021" -rust-version = "1.80" +rust-version = "1.82" [package.metadata.docs.rs] all-features = true diff --git a/networks/monero/wallet/address/src/base58check.rs b/networks/monero/wallet/address/src/base58check.rs index 45264bdf..08113bbe 100644 --- a/networks/monero/wallet/address/src/base58check.rs +++ b/networks/monero/wallet/address/src/base58check.rs @@ -94,11 +94,10 @@ pub(crate) fn encode_check(mut data: Vec) -> String { // Decode an arbitrary-length stream of data, with a checksum pub(crate) fn decode_check(data: &str) -> Option> { - if data.len() < CHECKSUM_LEN { + let mut res = decode(data)?; + if res.len() < CHECKSUM_LEN { None?; } - - let mut res = decode(data)?; let checksum_pos = res.len() - CHECKSUM_LEN; if keccak256(&res[.. checksum_pos])[.. CHECKSUM_LEN] != res[checksum_pos ..] { None?; diff --git a/networks/monero/wallet/src/decoys.rs b/networks/monero/wallet/src/decoys.rs index fc776948..96621054 100644 --- a/networks/monero/wallet/src/decoys.rs +++ b/networks/monero/wallet/src/decoys.rs @@ -18,7 +18,7 @@ use crate::{ }; const RECENT_WINDOW: u64 = 15; -const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME; +const BLOCKS_PER_YEAR: usize = (365 * 24 * 60 * 60) / BLOCK_TIME; #[allow(clippy::cast_precision_loss)] const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64; diff --git a/networks/monero/wallet/src/scan.rs b/networks/monero/wallet/src/scan.rs index 342f000d..79caf3f2 100644 --- a/networks/monero/wallet/src/scan.rs +++ b/networks/monero/wallet/src/scan.rs @@ -232,7 +232,13 @@ impl InternalScanner { res.push(WalletOutput { absolute_id: AbsoluteId { transaction: tx_hash, index_in_transaction: o }, - relative_id: RelativeId { index_on_blockchain: output_index_for_first_ringct_output + o }, + relative_id: RelativeId { + index_on_blockchain: output_index_for_first_ringct_output.checked_add(o).ok_or( + ScanError::InvalidScannableBlock( + "transaction's output's index isn't representable as a u64", + ), + )?, + }, data: OutputData { key: output_key, key_offset, commitment }, metadata: Metadata { additional_timelock: tx.prefix().additional_timelock, diff --git a/networks/monero/wallet/src/send/mod.rs b/networks/monero/wallet/src/send/mod.rs index 8b4e3e34..d0747789 100644 --- a/networks/monero/wallet/src/send/mod.rs +++ b/networks/monero/wallet/src/send/mod.rs @@ -305,12 +305,13 @@ impl SignableTransaction { .payments .iter() .filter_map(|payment| match payment { - InternalPayment::Payment(_, amount) => Some(amount), + InternalPayment::Payment(_, amount) => Some(*amount), InternalPayment::Change(_) => None, }) - .sum::(); + .try_fold(0, u64::checked_add); + let payments_amount = payments_amount.ok_or(SendError::TooManyOutputs)?; let (weight, necessary_fee) = self.weight_and_necessary_fee(); - if in_amount < (payments_amount + necessary_fee) { + if payments_amount.checked_add(necessary_fee).is_none_or(|total_out| in_amount < total_out) { Err(SendError::NotEnoughFunds { inputs: in_amount, outputs: payments_amount, diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 73cb338c..8303f09b 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.80" +channel = "1.82" targets = ["wasm32-unknown-unknown"] profile = "minimal" components = ["rust-src", "rustfmt", "clippy"] diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 629312c0..2186b26c 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/substrate/client" authors = ["Luke Parker "] keywords = ["serai"] edition = "2021" -rust-version = "1.74" +rust-version = "1.82" [package.metadata.docs.rs] all-features = true From d5077ae9662f54b6a8a399a1ecdfefa0115b8cf7 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 23:54:20 -0400 Subject: [PATCH 012/116] Respond to 13.1.1. Uses Zeroizing for username/password in monero-simple-request-rpc. --- Cargo.lock | 1 + networks/monero/rpc/simple-request/Cargo.toml | 1 + networks/monero/rpc/simple-request/src/lib.rs | 15 ++++++++------- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3b55f7b3..9b41db79 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5008,6 +5008,7 @@ dependencies = [ "monero-rpc", "simple-request", "tokio", + "zeroize", ] [[package]] diff --git a/networks/monero/rpc/simple-request/Cargo.toml b/networks/monero/rpc/simple-request/Cargo.toml index a31b14e3..9698c7d6 100644 --- a/networks/monero/rpc/simple-request/Cargo.toml +++ b/networks/monero/rpc/simple-request/Cargo.toml @@ -17,6 +17,7 @@ workspace = true [dependencies] hex = { version = "0.4", default-features = false, features = ["alloc"] } +zeroize = { version = "^1.5", default-features = false, features = ["alloc", "std"] } digest_auth = { version = "0.3", default-features = false } simple-request = { path = "../../../../common/request", version = "0.1", default-features = false, features = ["tls"] } tokio = { version = "1", default-features = false } diff --git a/networks/monero/rpc/simple-request/src/lib.rs b/networks/monero/rpc/simple-request/src/lib.rs index c6a8ecf5..0b53e209 100644 --- a/networks/monero/rpc/simple-request/src/lib.rs +++ b/networks/monero/rpc/simple-request/src/lib.rs @@ -7,6 +7,7 @@ use std::{sync::Arc, io::Read, time::Duration}; use tokio::sync::Mutex; +use zeroize::Zeroizing; use digest_auth::{WwwAuthenticateHeader, AuthContext}; use simple_request::{ hyper::{StatusCode, header::HeaderValue, Request}, @@ -25,8 +26,8 @@ enum Authentication { // This ensures that if a nonce is requested, another caller doesn't make a request invalidating // it Authenticated { - username: String, - password: String, + username: Zeroizing, + password: Zeroizing, #[allow(clippy::type_complexity)] connection: Arc, Client)>>, }, @@ -77,7 +78,7 @@ impl SimpleRequestRpc { ) -> Result { let authentication = if url.contains('@') { // Parse out the username and password - let url_clone = url; + let url_clone = Zeroizing::new(url); let split_url = url_clone.split('@').collect::>(); if split_url.len() != 2 { Err(RpcError::ConnectionError("invalid amount of login specifications".to_string()))?; @@ -114,8 +115,8 @@ impl SimpleRequestRpc { .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?, )?; Authentication::Authenticated { - username: split_userpass[0].to_string(), - password: (*split_userpass.get(1).unwrap_or(&"")).to_string(), + username: Zeroizing::new(split_userpass[0].to_string()), + password: Zeroizing::new((*split_userpass.get(1).unwrap_or(&"")).to_string()), connection: Arc::new(Mutex::new((challenge, client))), } } else { @@ -180,8 +181,8 @@ impl SimpleRequestRpc { *cnonce += 1; let mut context = AuthContext::new_post::<_, _, _, &[u8]>( - username, - password, + <_ as AsRef>::as_ref(username), + <_ as AsRef>::as_ref(password), "/".to_string() + route, None, ); From d363b1c17385700fba2b9f5eeaed786fe2094121 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 23:56:13 -0400 Subject: [PATCH 013/116] Fix #630 --- networks/monero/wallet/src/send/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/networks/monero/wallet/src/send/mod.rs b/networks/monero/wallet/src/send/mod.rs index d0747789..f40de06c 100644 --- a/networks/monero/wallet/src/send/mod.rs +++ b/networks/monero/wallet/src/send/mod.rs @@ -321,7 +321,7 @@ impl SignableTransaction { // The limit is half the no-penalty block size // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/wallet/wallet2.cpp#L110766-L11085 + // /src/wallet/wallet2.cpp#L11076-L11085 // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 // /src/cryptonote_config.h#L61 // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 From c7f825a192edeadfe26014153549bc2fcec2f30b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 23:56:48 -0400 Subject: [PATCH 014/116] Rename Bulletproof::calculate_bp_clawback to Bulletproof::calculate_clawback --- networks/monero/ringct/bulletproofs/src/lib.rs | 8 ++++---- networks/monero/src/transaction.rs | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/networks/monero/ringct/bulletproofs/src/lib.rs b/networks/monero/ringct/bulletproofs/src/lib.rs index 29aa7093..13a7a14e 100644 --- a/networks/monero/ringct/bulletproofs/src/lib.rs +++ b/networks/monero/ringct/bulletproofs/src/lib.rs @@ -91,7 +91,7 @@ impl Bulletproof { /// maximum amount of outputs will be returned. // https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/ // src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124 - pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) { + pub fn calculate_clawback(plus: bool, n_outputs: usize) -> (usize, usize) { #[allow(non_snake_case)] let mut LR_len = 0; let mut n_padded_outputs = 1; @@ -101,15 +101,15 @@ impl Bulletproof { } LR_len += LOG_COMMITMENT_BITS; - let mut bp_clawback = 0; + let mut clawback = 0; if n_padded_outputs > 2 { let fields = Bulletproof::bp_fields(plus); let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2; let size = (fields + (2 * LR_len)) * 32; - bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5; + clawback = ((base * n_padded_outputs) - size) * 4 / 5; } - (bp_clawback, LR_len) + (clawback, LR_len) } /// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof. diff --git a/networks/monero/src/transaction.rs b/networks/monero/src/transaction.rs index 5f8db9a7..d70da5ff 100644 --- a/networks/monero/src/transaction.rs +++ b/networks/monero/src/transaction.rs @@ -609,7 +609,7 @@ impl Transaction { blob_size } else { blob_size + - Bulletproof::calculate_bp_clawback( + Bulletproof::calculate_clawback( bp_plus, match self { Transaction::V1 { .. } => panic!("v1 transaction was BP(+)"), From 387615705ca8bb95a84ef96c714ced3d96c643a2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 23:57:19 -0400 Subject: [PATCH 015/116] Fix #643 --- networks/monero/rpc/src/lib.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/networks/monero/rpc/src/lib.rs b/networks/monero/rpc/src/lib.rs index 995ccb1a..d59ba821 100644 --- a/networks/monero/rpc/src/lib.rs +++ b/networks/monero/rpc/src/lib.rs @@ -401,6 +401,11 @@ pub trait Rpc: Sync + Clone { txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::>()?, ))?; } + if txs.txs.len() != this_count { + Err(RpcError::InvalidNode( + "not missing any transactions yet didn't return all transactions".to_string(), + ))?; + } all_txs.extend(txs.txs); } From bb726b58af5e4d56092504d0bb77d50d6aa67147 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 23:57:38 -0400 Subject: [PATCH 016/116] Fix #654 --- networks/monero/ringct/clsag/src/lib.rs | 71 +++++++++++++++++-------- 1 file changed, 48 insertions(+), 23 deletions(-) diff --git a/networks/monero/ringct/clsag/src/lib.rs b/networks/monero/ringct/clsag/src/lib.rs index c427342c..d312698c 100644 --- a/networks/monero/ringct/clsag/src/lib.rs +++ b/networks/monero/ringct/clsag/src/lib.rs @@ -89,8 +89,8 @@ impl ClsagContext { #[allow(clippy::large_enum_variant)] enum Mode { - Sign(usize, EdwardsPoint, EdwardsPoint), - Verify(Scalar), + Sign { signer_index: u8, A: EdwardsPoint, AH: EdwardsPoint }, + Verify { c1: Scalar, D_serialized: EdwardsPoint }, } // Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences @@ -101,17 +101,17 @@ fn core( I: &EdwardsPoint, pseudo_out: &EdwardsPoint, msg_hash: &[u8; 32], - D: &EdwardsPoint, + D_torsion_free: &EdwardsPoint, s: &[Scalar], A_c1: &Mode, ) -> ((EdwardsPoint, Scalar, Scalar), Scalar) { let n = ring.len(); let images_precomp = match A_c1 { - Mode::Sign(..) => None, - Mode::Verify(..) => Some(VartimeEdwardsPrecomputation::new([I, D])), + Mode::Sign { .. } => None, + Mode::Verify { .. } => Some(VartimeEdwardsPrecomputation::new([I, D_torsion_free])), }; - let D_INV_EIGHT = D * INV_EIGHT(); + let D_inv_eight = D_torsion_free * INV_EIGHT(); // Generate the transcript // Instead of generating multiple, a single transcript is created and then edited as needed @@ -140,7 +140,14 @@ fn core( } to_hash.extend(I.compress().to_bytes()); - to_hash.extend(D_INV_EIGHT.compress().to_bytes()); + match A_c1 { + Mode::Sign { .. } => { + to_hash.extend(D_inv_eight.compress().to_bytes()); + } + Mode::Verify { D_serialized, .. } => { + to_hash.extend(D_serialized.compress().to_bytes()); + } + } to_hash.extend(pseudo_out.compress().to_bytes()); // mu_P with agg_0 let mu_P = keccak256_to_scalar(&to_hash); @@ -163,15 +170,16 @@ fn core( let end; let mut c; match A_c1 { - Mode::Sign(r, A, AH) => { - start = r + 1; - end = r + n; + Mode::Sign { signer_index, A, AH } => { + let signer_index = usize::from(*signer_index); + start = signer_index + 1; + end = signer_index + n; to_hash.extend(A.compress().to_bytes()); to_hash.extend(AH.compress().to_bytes()); c = keccak256_to_scalar(&to_hash); } - Mode::Verify(c1) => { + Mode::Verify { c1, .. } => { start = 0; end = n; c = *c1; @@ -186,10 +194,10 @@ fn core( // (s_i * G) + (c_p * P_i) + (c_c * C_i) let L = match A_c1 { - Mode::Sign(..) => { + Mode::Sign { .. } => { EdwardsPoint::multiscalar_mul([s[i], c_p, c_c], [ED25519_BASEPOINT_POINT, P[i], C[i]]) } - Mode::Verify(..) => { + Mode::Verify { .. } => { G_PRECOMP().vartime_mixed_multiscalar_mul([s[i]], [c_p, c_c], [P[i], C[i]]) } }; @@ -198,8 +206,10 @@ fn core( // (c_p * I) + (c_c * D) + (s_i * PH) let R = match A_c1 { - Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]), - Mode::Verify(..) => images_precomp + Mode::Sign { .. } => { + EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D_torsion_free, &PH]) + } + Mode::Verify { .. } => images_precomp .as_ref() .expect("value populated when verifying wasn't populated") .vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]), @@ -217,7 +227,7 @@ fn core( } // This first tuple is needed to continue signing, the latter is the c to be tested/worked with - ((D_INV_EIGHT, c * mu_P, c * mu_C), c1) + ((D_inv_eight, c * mu_P, c * mu_C), c1) } /// The CLSAG signature, as used in Monero. @@ -250,19 +260,26 @@ impl Clsag { A: EdwardsPoint, AH: EdwardsPoint, ) -> ClsagSignCore { - let r: usize = input.decoys.signer_index().into(); + let signer_index = input.decoys.signer_index(); let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate(); let mask_delta = input.commitment.mask - mask; - let H = hash_to_point(input.decoys.ring()[r][0].compress().0); + let H = hash_to_point(input.decoys.ring()[usize::from(signer_index)][0].compress().0); let D = H * mask_delta; let mut s = Vec::with_capacity(input.decoys.ring().len()); for _ in 0 .. input.decoys.ring().len() { s.push(Scalar::random(rng)); } - let ((D, c_p, c_c), c1) = - core(input.decoys.ring(), I, &pseudo_out, msg_hash, &D, &s, &Mode::Sign(r, A, AH)); + let ((D, c_p, c_c), c1) = core( + input.decoys.ring(), + I, + &pseudo_out, + msg_hash, + &D, + &s, + &Mode::Sign { signer_index, A, AH }, + ); ClsagSignCore { incomplete_clsag: Clsag { D, s, c1 }, @@ -379,12 +396,20 @@ impl Clsag { Err(ClsagError::InvalidImage)?; } - let D = self.D.mul_by_cofactor(); - if D.is_identity() { + let D_torsion_free = self.D.mul_by_cofactor(); + if D_torsion_free.is_identity() { Err(ClsagError::InvalidD)?; } - let (_, c1) = core(ring, I, pseudo_out, msg_hash, &D, &self.s, &Mode::Verify(self.c1)); + let (_, c1) = core( + ring, + I, + pseudo_out, + msg_hash, + &D_torsion_free, + &self.s, + &Mode::Verify { c1: self.c1, D_serialized: self.D }, + ); if c1 != self.c1 { Err(ClsagError::InvalidC1)?; } From 23f986f57a3dca3f9e58c20f36dc77b6e813fbd7 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 8 Aug 2025 23:58:25 -0400 Subject: [PATCH 017/116] Tweak the Substrate runtime as required by the Rust version bump performed --- substrate/runtime/build.rs | 7 ++++++- substrate/runtime/src/abi.rs | 7 ------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/substrate/runtime/build.rs b/substrate/runtime/build.rs index eba52b3e..e9a5dbc7 100644 --- a/substrate/runtime/build.rs +++ b/substrate/runtime/build.rs @@ -1,5 +1,10 @@ use substrate_wasm_builder::WasmBuilder; fn main() { - WasmBuilder::new().with_current_project().export_heap_base().import_memory().build() + WasmBuilder::new() + .with_current_project() + .disable_runtime_version_section_check() + .export_heap_base() + .import_memory() + .build() } diff --git a/substrate/runtime/src/abi.rs b/substrate/runtime/src/abi.rs index 48b4a6c7..71f8271e 100644 --- a/substrate/runtime/src/abi.rs +++ b/substrate/runtime/src/abi.rs @@ -216,7 +216,6 @@ impl TryInto for RuntimeCall { coins::Call::burn_with_instruction { instruction } => { serai_abi::coins::Call::burn_with_instruction { instruction } } - _ => Err(())?, }), RuntimeCall::LiquidityTokens(call) => Call::LiquidityTokens(match call { coins::Call::transfer { to, balance } => { @@ -270,7 +269,6 @@ impl TryInto for RuntimeCall { send_to: send_to.into(), } } - _ => Err(())?, }), RuntimeCall::GenesisLiquidity(call) => Call::GenesisLiquidity(match call { genesis_liquidity::Call::remove_coin_liquidity { balance } => { @@ -279,7 +277,6 @@ impl TryInto for RuntimeCall { genesis_liquidity::Call::oraclize_values { values, signature } => { serai_abi::genesis_liquidity::Call::oraclize_values { values, signature } } - _ => Err(())?, }), RuntimeCall::ValidatorSets(call) => Call::ValidatorSets(match call { validator_sets::Call::set_keys { network, removed_participants, key_pair, signature } => { @@ -315,13 +312,11 @@ impl TryInto for RuntimeCall { validator_sets::Call::claim_deallocation { network, session } => { serai_abi::validator_sets::Call::claim_deallocation { network, session } } - _ => Err(())?, }), RuntimeCall::InInstructions(call) => Call::InInstructions(match call { in_instructions::Call::execute_batch { batch } => { serai_abi::in_instructions::Call::execute_batch { batch } } - _ => Err(())?, }), RuntimeCall::Signals(call) => Call::Signals(match call { signals::Call::register_retirement_signal { in_favor_of } => { @@ -339,7 +334,6 @@ impl TryInto for RuntimeCall { signals::Call::stand_against { signal_id, for_network } => { serai_abi::signals::Call::stand_against { signal_id, for_network } } - _ => Err(())?, }), RuntimeCall::Babe(call) => Call::Babe(match call { babe::Call::report_equivocation { equivocation_proof, key_owner_proof } => { @@ -377,7 +371,6 @@ impl TryInto for RuntimeCall { } _ => Err(())?, }), - _ => Err(())?, }) } } From ffa033d9785bbcd36d39bbae333378212b553482 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:02:16 -0400 Subject: [PATCH 018/116] Clarify transcripting for Clsag::verify, Mlsag::verify, as with Clsag::sign --- networks/monero/ringct/clsag/src/lib.rs | 4 ++++ networks/monero/ringct/mlsag/src/lib.rs | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/networks/monero/ringct/clsag/src/lib.rs b/networks/monero/ringct/clsag/src/lib.rs index d312698c..eb2f81b7 100644 --- a/networks/monero/ringct/clsag/src/lib.rs +++ b/networks/monero/ringct/clsag/src/lib.rs @@ -377,6 +377,10 @@ impl Clsag { } /// Verify a CLSAG signature for the provided context. + /// + /// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which + /// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do + /// not use this if you don't know what you're doing. pub fn verify( &self, ring: &[[EdwardsPoint; 2]], diff --git a/networks/monero/ringct/mlsag/src/lib.rs b/networks/monero/ringct/mlsag/src/lib.rs index f5164b88..ac2e482f 100644 --- a/networks/monero/ringct/mlsag/src/lib.rs +++ b/networks/monero/ringct/mlsag/src/lib.rs @@ -122,6 +122,10 @@ impl Mlsag { } /// Verify a MLSAG. + /// + /// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which + /// makes assumptions on what has already been transcripted and bound to within `msg`. Do not use + /// this if you don't know what you're doing. pub fn verify( &self, msg: &[u8; 32], From 336922101fb8b852c9dc73b780cc69e645048e65 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:23:22 -0400 Subject: [PATCH 019/116] Further harden decoy selection It risked panicking if a non-monotonic distribution was returned. While the provided RPC code won't return non-monotonic distributions, users are allowed to define their own implementations and override the provided method. Said implementations could omit this required check. --- networks/monero/wallet/src/decoys.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/networks/monero/wallet/src/decoys.rs b/networks/monero/wallet/src/decoys.rs index 96621054..b13179f9 100644 --- a/networks/monero/wallet/src/decoys.rs +++ b/networks/monero/wallet/src/decoys.rs @@ -121,7 +121,9 @@ async fn select_n( // Find which block this points to let i = distribution.partition_point(|s| *s < (highest_output_exclusive_bound - 1 - o)); let prev = i.saturating_sub(1); - let n = distribution[i] - distribution[prev]; + let n = distribution[i].checked_sub(distribution[prev]).ok_or_else(|| { + RpcError::InternalError("RPC returned non-monotonic distribution".to_string()) + })?; if n != 0 { // Select an output from within this block let o = distribution[prev] + (rng.next_u64() % n); From 1143d84e1dface41e429def63f32ec42c27929ae Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:29:27 -0400 Subject: [PATCH 020/116] Remove msbuild from packages to remove when the CI starts Apparently, it's no longer installed by default. --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 5994b723..a9b6afa7 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -7,7 +7,7 @@ runs: - name: Remove unused packages shell: bash run: | - sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli + sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" From 4be506414b98f2b88ea54ffeb27278940454360a Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:35:27 -0400 Subject: [PATCH 021/116] Install cargo machete with Rust 1.85 cargo machete now uses Rust's 2024 edition, and 1.85 was the first to ship it. --- .github/workflows/lint.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index da0bdcfa..50877e84 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -79,5 +79,5 @@ jobs: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - name: Verify all dependencies are in use run: | - cargo install cargo-machete - cargo machete + cargo +1.85 install cargo-machete + cargo +1.85 machete From 885000f9704b17f0f0e36ec55eea46437a1a0587 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:36:57 -0400 Subject: [PATCH 022/116] Add update, upgrade, fix-missing call to Ubuntu build dependencies Attempts to fix a CI failure for some misconfiguration... --- .github/actions/build-dependencies/action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index a9b6afa7..f90c1420 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -10,6 +10,7 @@ runs: sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" + sudo apt update -y && sudo apt upgrade -y && sudo apt install --fix-missing -y sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" sudo apt autoremove -y sudo apt clean From 01eb2daa0b104e6a65bc14fe68936c42d8ed4508 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:39:02 -0400 Subject: [PATCH 023/116] Updated dated version of actions/cache --- .github/actions/bitcoin/action.yml | 2 +- .github/actions/monero-wallet-rpc/action.yml | 2 +- .github/actions/monero/action.yml | 2 +- .github/workflows/daily-deny.yml | 2 +- .github/workflows/lint.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/actions/bitcoin/action.yml b/.github/actions/bitcoin/action.yml index 6f628172..21837e53 100644 --- a/.github/actions/bitcoin/action.yml +++ b/.github/actions/bitcoin/action.yml @@ -12,7 +12,7 @@ runs: steps: - name: Bitcoin Daemon Cache id: cache-bitcoind - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 with: path: bitcoin.tar.gz key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }} diff --git a/.github/actions/monero-wallet-rpc/action.yml b/.github/actions/monero-wallet-rpc/action.yml index 2f39c08b..11ca899b 100644 --- a/.github/actions/monero-wallet-rpc/action.yml +++ b/.github/actions/monero-wallet-rpc/action.yml @@ -12,7 +12,7 @@ runs: steps: - name: Monero Wallet RPC Cache id: cache-monero-wallet-rpc - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 with: path: monero-wallet-rpc key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }} diff --git a/.github/actions/monero/action.yml b/.github/actions/monero/action.yml index e37356de..a4302e5a 100644 --- a/.github/actions/monero/action.yml +++ b/.github/actions/monero/action.yml @@ -12,7 +12,7 @@ runs: steps: - name: Monero Daemon Cache id: cache-monerod - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 with: path: /usr/bin/monerod key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }} diff --git a/.github/workflows/daily-deny.yml b/.github/workflows/daily-deny.yml index 5e1d0ac7..16256eed 100644 --- a/.github/workflows/daily-deny.yml +++ b/.github/workflows/daily-deny.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - name: Advisory Cache - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 with: path: ~/.cargo/advisory-db key: rust-advisory-db diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 50877e84..6469a72a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -46,7 +46,7 @@ jobs: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - name: Advisory Cache - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 with: path: ~/.cargo/advisory-db key: rust-advisory-db From ec3cfd3ab7e3cffb34180304f7d5d54198e09779 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:42:47 -0400 Subject: [PATCH 024/116] Explicitly install python3 after removing various unnecessary packages --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index f90c1420..fc74e26e 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -10,7 +10,7 @@ runs: sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" - sudo apt update -y && sudo apt upgrade -y && sudo apt install --fix-missing -y + sudo apt update -y && sudo apt upgrade -y && sudo apt install --fix-missing -y python3 sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" sudo apt autoremove -y sudo apt clean From ad08b410a8516bfd91148cfafec460883a6512a6 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:43:41 -0400 Subject: [PATCH 025/116] Pin cargo-machete to 0.8.0 to prevent other unexpected CI failures --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 6469a72a..48822091 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -79,5 +79,5 @@ jobs: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - name: Verify all dependencies are in use run: | - cargo +1.85 install cargo-machete + cargo +1.85 install cargo-machete --version 0.8.0 cargo +1.85 machete From 6ae0d9fad796f4a2b1e77d048ba3e38bf0734684 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:46:25 -0400 Subject: [PATCH 026/116] Install cargo deny with Rust 1.85 and pin its version --- .github/workflows/daily-deny.yml | 2 +- .github/workflows/lint.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/daily-deny.yml b/.github/workflows/daily-deny.yml index 16256eed..5ac4a1ac 100644 --- a/.github/workflows/daily-deny.yml +++ b/.github/workflows/daily-deny.yml @@ -18,7 +18,7 @@ jobs: key: rust-advisory-db - name: Install cargo deny - run: cargo install --locked cargo-deny + run: cargo +1.85 install cargo-deny --version =0.18.3 - name: Run cargo deny run: cargo deny -L error --all-features check diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 48822091..ef3bfa70 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -52,7 +52,7 @@ jobs: key: rust-advisory-db - name: Install cargo deny - run: cargo install --locked cargo-deny + run: cargo +1.85 install cargo-deny --version =0.18.3 - name: Run cargo deny run: cargo deny -L error --all-features check @@ -79,5 +79,5 @@ jobs: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - name: Verify all dependencies are in use run: | - cargo +1.85 install cargo-machete --version 0.8.0 + cargo +1.85 install cargo-machete --version =0.8.0 cargo +1.85 machete From 4438b51881ee50cc5519c69739eb6b81d68a6184 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:47:26 -0400 Subject: [PATCH 027/116] Expand python packages explicitly installed --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index fc74e26e..08050e23 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -10,7 +10,7 @@ runs: sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" - sudo apt update -y && sudo apt upgrade -y && sudo apt install --fix-missing -y python3 + sudo apt update -y && sudo apt upgrade -y && sudo apt install --fix-missing -y python3 python3.12 libpython3-stdlib sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" sudo apt autoremove -y sudo apt clean From d6f6cf19653d75ff12d18105bc7c322b9d848ff1 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:52:11 -0400 Subject: [PATCH 028/116] Attempt to force remove shim-signed to resolve 'unmet dependencies' issues with shim-signed --- .github/actions/build-dependencies/action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 08050e23..2038f2fa 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -10,7 +10,8 @@ runs: sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" - sudo apt update -y && sudo apt upgrade -y && sudo apt install --fix-missing -y python3 python3.12 libpython3-stdlib + sudo apt update -y && sudo apt install --fix-missing --fix-broken -y python3 python3.12 libpython3-stdlib && sudo apt upgrade -y + sudo apt remove -yf shim-signed sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" sudo apt autoremove -y sudo apt clean From fc850da30e800c7a65fc42527d5c243f7e230e5c Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 00:53:54 -0400 Subject: [PATCH 029/116] Missing --allow-remove-essential flag --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 2038f2fa..4eef948b 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -11,7 +11,7 @@ runs: sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" sudo apt update -y && sudo apt install --fix-missing --fix-broken -y python3 python3.12 libpython3-stdlib && sudo apt upgrade -y - sudo apt remove -yf shim-signed + sudo apt remove -y --allow-remove-essential -f shim-signed sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" sudo apt autoremove -y sudo apt clean From ce447558acfc04b0680fba417fd5f75ec0da3dfd Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 01:23:13 -0400 Subject: [PATCH 030/116] Update Rust versions used in orchestration --- orchestration/runtime/Dockerfile | 4 ++-- orchestration/src/main.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/orchestration/runtime/Dockerfile b/orchestration/runtime/Dockerfile index b09c5949..6ecc1f13 100644 --- a/orchestration/runtime/Dockerfile +++ b/orchestration/runtime/Dockerfile @@ -1,5 +1,5 @@ -# rust:1.80.0-slim-bookworm as of July 27th, 2024 (GMT) -FROM --platform=linux/amd64 rust@sha256:37e6f90f98b3afd15c2526d7abb257a1f4cb7d49808fe3729d9d62020b07b544 as deterministic +# rust:1.89.0-slim-bookworm as of July 27th, 2024 (GMT) +FROM --platform=linux/amd64 rust@sha256:703cfb0f80db8eb8a3452bf5151162472039c1b37fe4fb2957b495a6f0104ae7 as deterministic # Move to a Debian package snapshot RUN rm -rf /etc/apt/sources.list.d/debian.sources && \ diff --git a/orchestration/src/main.rs b/orchestration/src/main.rs index 4655be01..f703a300 100644 --- a/orchestration/src/main.rs +++ b/orchestration/src/main.rs @@ -146,7 +146,7 @@ fn build_serai_service(prelude: &str, release: bool, features: &str, package: &s format!( r#" -FROM rust:1.80-slim-bookworm as builder +FROM rust:1.89-slim-bookworm as builder COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload From 8aaf7f7dc6d767c8aacbc8acdfcad4c8413b53ea Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 01:23:43 -0400 Subject: [PATCH 031/116] Remove (presumably) unnecessary command to explicitly install python --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 4eef948b..0463aae3 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -10,8 +10,8 @@ runs: sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" - sudo apt update -y && sudo apt install --fix-missing --fix-broken -y python3 python3.12 libpython3-stdlib && sudo apt upgrade -y sudo apt remove -y --allow-remove-essential -f shim-signed + # This command would fail, due to shim-signed having unmet dependencies, hence its removal sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" sudo apt autoremove -y sudo apt clean From d9f854b08a21a86699939323a9c21dadbe593ba5 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 02:02:30 -0400 Subject: [PATCH 032/116] Attempt to fix install of clang within runtime Dockerfile --- orchestration/runtime/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/orchestration/runtime/Dockerfile b/orchestration/runtime/Dockerfile index 6ecc1f13..4fe23ed6 100644 --- a/orchestration/runtime/Dockerfile +++ b/orchestration/runtime/Dockerfile @@ -8,7 +8,7 @@ RUN rm -rf /etc/apt/sources.list.d/debian.sources && \ apt update # Install dependencies -RUN apt update && apt upgrade && apt install clang -y +RUN apt update && apt upgrade && apt install -y --fix-missing --fix-broken clang # Add the wasm toolchain RUN rustup target add wasm32-unknown-unknown From 8297d0679de761adf83bd252e5097c99ae0775ec Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 02:03:36 -0400 Subject: [PATCH 033/116] Update substrate to one with a properly defined panic handler as of modern Rust --- Cargo.lock | 210 ++++++++++++++++++++++++++--------------------------- 1 file changed, 105 insertions(+), 105 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9b41db79..8d80e5d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2660,7 +2660,7 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "fork-tree" version = "3.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", ] @@ -2683,7 +2683,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support", "frame-support-procedural", @@ -2708,7 +2708,7 @@ dependencies = [ [[package]] name = "frame-executive" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support", "frame-system", @@ -2737,7 +2737,7 @@ dependencies = [ [[package]] name = "frame-support" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "bitflags 1.3.2", "environmental", @@ -2770,7 +2770,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "Inflector", "cfg-expr", @@ -2788,7 +2788,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 1.3.1", @@ -2800,7 +2800,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "3.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "proc-macro2", "quote", @@ -2810,7 +2810,7 @@ dependencies = [ [[package]] name = "frame-system" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "cfg-if", "frame-support", @@ -2829,7 +2829,7 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "sp-api", @@ -2838,7 +2838,7 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support", "parity-scale-codec", @@ -5508,7 +5508,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support", "frame-system", @@ -5522,7 +5522,7 @@ dependencies = [ [[package]] name = "pallet-babe" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-benchmarking", "frame-support", @@ -5546,7 +5546,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-benchmarking", "frame-support", @@ -5569,7 +5569,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support", "frame-system", @@ -5590,7 +5590,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-benchmarking", "frame-support", @@ -5608,7 +5608,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-support", "frame-system", @@ -5624,7 +5624,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "jsonrpsee", "pallet-transaction-payment-rpc-runtime-api", @@ -5640,7 +5640,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -6826,7 +6826,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "4.1.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "log", "sp-core", @@ -6837,7 +6837,7 @@ dependencies = [ [[package]] name = "sc-authority-discovery" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "futures", @@ -6865,7 +6865,7 @@ dependencies = [ [[package]] name = "sc-basic-authorship" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "futures", "futures-timer", @@ -6888,7 +6888,7 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "sc-client-api", @@ -6903,7 +6903,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "memmap2", "sc-chain-spec-derive", @@ -6922,7 +6922,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", @@ -6933,7 +6933,7 @@ dependencies = [ [[package]] name = "sc-cli" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "chrono", @@ -6972,7 +6972,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "fnv", "futures", @@ -6997,7 +6997,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "hash-db", "kvdb", @@ -7023,7 +7023,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "futures", @@ -7048,7 +7048,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "fork-tree", @@ -7084,7 +7084,7 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "fork-tree", "parity-scale-codec", @@ -7097,7 +7097,7 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "ahash", "array-bytes", @@ -7138,7 +7138,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "futures", @@ -7161,7 +7161,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", @@ -7183,7 +7183,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "sc-allocator", "sp-maybe-compressed-blob", @@ -7195,7 +7195,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "anyhow", "cfg-if", @@ -7212,7 +7212,7 @@ dependencies = [ [[package]] name = "sc-informant" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "anstyle", "futures", @@ -7228,7 +7228,7 @@ dependencies = [ [[package]] name = "sc-keystore" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "parking_lot 0.12.3", @@ -7242,7 +7242,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "async-channel", @@ -7284,7 +7284,7 @@ dependencies = [ [[package]] name = "sc-network-bitswap" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-channel", "cid", @@ -7304,7 +7304,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -7321,7 +7321,7 @@ dependencies = [ [[package]] name = "sc-network-gossip" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "ahash", "futures", @@ -7340,7 +7340,7 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "async-channel", @@ -7361,7 +7361,7 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "async-channel", @@ -7395,7 +7395,7 @@ dependencies = [ [[package]] name = "sc-network-transactions" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "futures", @@ -7413,7 +7413,7 @@ dependencies = [ [[package]] name = "sc-offchain" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "bytes", "fnv", @@ -7444,7 +7444,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -7453,7 +7453,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "futures", "jsonrpsee", @@ -7483,7 +7483,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "jsonrpsee", "parity-scale-codec", @@ -7502,7 +7502,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "http 0.2.12", "jsonrpsee", @@ -7517,7 +7517,7 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "futures", @@ -7543,7 +7543,7 @@ dependencies = [ [[package]] name = "sc-service" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "directories", @@ -7606,7 +7606,7 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "log", "parity-scale-codec", @@ -7617,7 +7617,7 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "6.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "futures", "libc", @@ -7636,7 +7636,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "chrono", "futures", @@ -7655,7 +7655,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "anstyle", "chrono", @@ -7683,7 +7683,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", @@ -7694,7 +7694,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "futures", @@ -7720,7 +7720,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "futures", @@ -7736,7 +7736,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-channel", "futures", @@ -8937,7 +8937,7 @@ dependencies = [ [[package]] name = "sp-api" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "hash-db", "log", @@ -8958,7 +8958,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "Inflector", "blake2", @@ -8972,7 +8972,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "23.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "scale-info", @@ -8985,7 +8985,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "16.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "integer-sqrt", "num-traits", @@ -8999,7 +8999,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "scale-info", @@ -9011,7 +9011,7 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "sp-api", "sp-inherents", @@ -9022,7 +9022,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "futures", "log", @@ -9040,7 +9040,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "futures", @@ -9054,7 +9054,7 @@ dependencies = [ [[package]] name = "sp-consensus-babe" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "parity-scale-codec", @@ -9073,7 +9073,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "finality-grandpa", "log", @@ -9091,7 +9091,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "scale-info", @@ -9103,7 +9103,7 @@ dependencies = [ [[package]] name = "sp-core" version = "21.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "array-bytes", "bitflags 1.3.2", @@ -9146,7 +9146,7 @@ dependencies = [ [[package]] name = "sp-core-hashing" version = "9.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "blake2b_simd", "byteorder", @@ -9158,7 +9158,7 @@ dependencies = [ [[package]] name = "sp-core-hashing-proc-macro" version = "9.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "quote", "sp-core-hashing", @@ -9168,7 +9168,7 @@ dependencies = [ [[package]] name = "sp-database" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "kvdb", "parking_lot 0.12.3", @@ -9177,7 +9177,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "8.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "proc-macro2", "quote", @@ -9187,7 +9187,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.19.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "environmental", "parity-scale-codec", @@ -9198,7 +9198,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -9212,7 +9212,7 @@ dependencies = [ [[package]] name = "sp-io" version = "23.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "bytes", "ed25519", @@ -9234,7 +9234,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "24.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "lazy_static", "sp-core", @@ -9245,7 +9245,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.27.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", @@ -9257,7 +9257,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "4.1.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "thiserror", "zstd 0.12.4", @@ -9266,7 +9266,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.1.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-metadata", "parity-scale-codec", @@ -9277,7 +9277,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "sp-api", "sp-core", @@ -9287,7 +9287,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "8.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "backtrace", "lazy_static", @@ -9297,7 +9297,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "6.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "rustc-hash 1.1.0", "serde", @@ -9307,7 +9307,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "24.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "either", "hash256-std-hasher", @@ -9329,7 +9329,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "17.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -9347,7 +9347,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "11.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "Inflector", "proc-macro-crate 1.3.1", @@ -9359,7 +9359,7 @@ dependencies = [ [[package]] name = "sp-session" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "scale-info", @@ -9374,7 +9374,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -9388,7 +9388,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.28.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "hash-db", "log", @@ -9409,12 +9409,12 @@ dependencies = [ [[package]] name = "sp-std" version = "8.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" [[package]] name = "sp-storage" version = "13.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9427,7 +9427,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "async-trait", "parity-scale-codec", @@ -9440,7 +9440,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "10.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "sp-std", @@ -9452,7 +9452,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "sp-api", "sp-runtime", @@ -9461,7 +9461,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "22.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "ahash", "hash-db", @@ -9484,7 +9484,7 @@ dependencies = [ [[package]] name = "sp-version" version = "22.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9501,7 +9501,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "8.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "proc-macro2", @@ -9512,7 +9512,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "14.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -9525,7 +9525,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "20.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "parity-scale-codec", "scale-info", @@ -9725,12 +9725,12 @@ dependencies = [ [[package]] name = "substrate-build-script-utils" version = "3.0.0" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" [[package]] name = "substrate-frame-rpc-system" version = "4.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "frame-system-rpc-runtime-api", "futures", @@ -9749,7 +9749,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.10.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "hyper 0.14.30", "log", @@ -9761,7 +9761,7 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "5.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a" +source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "anstyle", "build-helper", From 9536282418a340b6dfe0f2bf9fae0c2b39ffe2aa Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 02:13:33 -0400 Subject: [PATCH 034/116] Update which deb archive to use within the runtime Dockerfile --- orchestration/runtime/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/orchestration/runtime/Dockerfile b/orchestration/runtime/Dockerfile index 4fe23ed6..145b4cab 100644 --- a/orchestration/runtime/Dockerfile +++ b/orchestration/runtime/Dockerfile @@ -1,14 +1,14 @@ -# rust:1.89.0-slim-bookworm as of July 27th, 2024 (GMT) -FROM --platform=linux/amd64 rust@sha256:703cfb0f80db8eb8a3452bf5151162472039c1b37fe4fb2957b495a6f0104ae7 as deterministic +# rust:1.89.0-slim-bookworm as of August 1st, 2025 (GMT) +FROM --platform=linux/amd64 rust@sha256:703cfb0f80db8eb8a3452bf5151162472039c1b37fe4fb2957b495a6f0104ae7 AS deterministic # Move to a Debian package snapshot RUN rm -rf /etc/apt/sources.list.d/debian.sources && \ rm -rf /var/lib/apt/lists/* && \ - echo "deb [arch=amd64] http://snapshot.debian.org/archive/debian/20240301T000000Z bookworm main" > /etc/apt/sources.list && \ + echo "deb [arch=amd64] http://snapshot.debian.org/archive/debian/20250801T000000Z bookworm main" > /etc/apt/sources.list && \ apt update # Install dependencies -RUN apt update && apt upgrade && apt install -y --fix-missing --fix-broken clang +RUN apt update -y && apt upgrade -y && apt install -y clang # Add the wasm toolchain RUN rustup target add wasm32-unknown-unknown From 60e55656aa10615ab63175ca1eb2c3b31d6a6f6c Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 03:37:07 -0400 Subject: [PATCH 035/116] deny --hide-inclusion-graph --- .github/workflows/daily-deny.yml | 2 +- .github/workflows/lint.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/daily-deny.yml b/.github/workflows/daily-deny.yml index 5ac4a1ac..6208e192 100644 --- a/.github/workflows/daily-deny.yml +++ b/.github/workflows/daily-deny.yml @@ -21,4 +21,4 @@ jobs: run: cargo +1.85 install cargo-deny --version =0.18.3 - name: Run cargo deny - run: cargo deny -L error --all-features check + run: cargo deny -L error --all-features check --hide-inclusion-graph diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ef3bfa70..8d74f5ac 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -55,7 +55,7 @@ jobs: run: cargo +1.85 install cargo-deny --version =0.18.3 - name: Run cargo deny - run: cargo deny -L error --all-features check + run: cargo deny -L error --all-features check --hide-inclusion-graph fmt: runs-on: ubuntu-latest From 5c895efcd0dd6a4cd79a43e5748ee6fcfc0a23af Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 03:37:44 -0400 Subject: [PATCH 036/116] Downgrade tests requiring Docker from Ubuntu latest to Ubuntu 22.04 Attempts to resolve containers immediately exiting for some specific test runs. --- .github/workflows/coordinator-tests.yml | 2 +- .github/workflows/full-stack-tests.yml | 2 +- .github/workflows/message-queue-tests.yml | 2 +- .github/workflows/monero-tests.yaml | 4 ++-- .github/workflows/networks-tests.yml | 2 +- .github/workflows/processor-tests.yml | 2 +- .github/workflows/reproducible-runtime.yml | 2 +- .github/workflows/tests.yml | 6 +++--- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/coordinator-tests.yml b/.github/workflows/coordinator-tests.yml index b956f752..c11e3b31 100644 --- a/.github/workflows/coordinator-tests.yml +++ b/.github/workflows/coordinator-tests.yml @@ -29,7 +29,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/full-stack-tests.yml b/.github/workflows/full-stack-tests.yml index 7bcce866..622f9446 100644 --- a/.github/workflows/full-stack-tests.yml +++ b/.github/workflows/full-stack-tests.yml @@ -11,7 +11,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/message-queue-tests.yml b/.github/workflows/message-queue-tests.yml index aa6f9328..40198e8b 100644 --- a/.github/workflows/message-queue-tests.yml +++ b/.github/workflows/message-queue-tests.yml @@ -25,7 +25,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/monero-tests.yaml b/.github/workflows/monero-tests.yaml index a72a85a5..886d576e 100644 --- a/.github/workflows/monero-tests.yaml +++ b/.github/workflows/monero-tests.yaml @@ -18,7 +18,7 @@ on: jobs: # Only run these once since they will be consistent regardless of any node unit-tests: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac @@ -43,7 +43,7 @@ jobs: # Doesn't run unit tests with features as the tests workflow will integration-tests: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 # Test against all supported protocol versions strategy: matrix: diff --git a/.github/workflows/networks-tests.yml b/.github/workflows/networks-tests.yml index 5966a6a8..a807282f 100644 --- a/.github/workflows/networks-tests.yml +++ b/.github/workflows/networks-tests.yml @@ -19,7 +19,7 @@ on: jobs: test-networks: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/processor-tests.yml b/.github/workflows/processor-tests.yml index 5f6043eb..3c15e694 100644 --- a/.github/workflows/processor-tests.yml +++ b/.github/workflows/processor-tests.yml @@ -29,7 +29,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/reproducible-runtime.yml b/.github/workflows/reproducible-runtime.yml index 2c418bd5..e8f082cc 100644 --- a/.github/workflows/reproducible-runtime.yml +++ b/.github/workflows/reproducible-runtime.yml @@ -25,7 +25,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 05c25972..615cc273 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -27,7 +27,7 @@ on: jobs: test-infra: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac @@ -47,7 +47,7 @@ jobs: -p serai-docker-tests test-substrate: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac @@ -77,7 +77,7 @@ jobs: -p serai-node test-serai-client: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac From 8b60feed922cc91d8f2ab97116e0e69074d0e48b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 12:20:58 -0400 Subject: [PATCH 037/116] Normalize FROM AS casing in Dockerfiles --- orchestration/src/main.rs | 6 +++--- orchestration/src/mimalloc.rs | 4 ++-- orchestration/src/networks/bitcoin.rs | 2 +- orchestration/src/networks/ethereum/consensus/lighthouse.rs | 2 +- orchestration/src/networks/ethereum/consensus/nimbus.rs | 2 +- orchestration/src/networks/ethereum/execution/reth.rs | 2 +- orchestration/src/networks/monero.rs | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/orchestration/src/main.rs b/orchestration/src/main.rs index f703a300..1bad77fb 100644 --- a/orchestration/src/main.rs +++ b/orchestration/src/main.rs @@ -92,7 +92,7 @@ fn os(os: Os, additional_root: &str, user: &str) -> String { match os { Os::Alpine => format!( r#" -FROM alpine:latest as image +FROM alpine:latest AS image COPY --from=mimalloc-alpine libmimalloc.so /usr/lib ENV LD_PRELOAD=libmimalloc.so @@ -117,7 +117,7 @@ WORKDIR /home/{user} Os::Debian => format!( r#" -FROM debian:bookworm-slim as image +FROM debian:bookworm-slim AS image COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload @@ -146,7 +146,7 @@ fn build_serai_service(prelude: &str, release: bool, features: &str, package: &s format!( r#" -FROM rust:1.89-slim-bookworm as builder +FROM rust:1.89-slim-bookworm AS builder COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload diff --git a/orchestration/src/mimalloc.rs b/orchestration/src/mimalloc.rs index cb400ca6..51f5bfe7 100644 --- a/orchestration/src/mimalloc.rs +++ b/orchestration/src/mimalloc.rs @@ -2,7 +2,7 @@ use crate::Os; pub fn mimalloc(os: Os) -> &'static str { const ALPINE_MIMALLOC: &str = r#" -FROM alpine:latest as mimalloc-alpine +FROM alpine:latest AS mimalloc-alpine RUN apk update && apk upgrade && apk --no-cache add gcc g++ libc-dev make cmake git RUN git clone https://github.com/microsoft/mimalloc && \ @@ -16,7 +16,7 @@ RUN git clone https://github.com/microsoft/mimalloc && \ "#; const DEBIAN_MIMALLOC: &str = r#" -FROM debian:bookworm-slim as mimalloc-debian +FROM debian:bookworm-slim AS mimalloc-debian RUN apt update && apt upgrade -y && apt install -y gcc g++ make cmake git RUN git clone https://github.com/microsoft/mimalloc && \ diff --git a/orchestration/src/networks/bitcoin.rs b/orchestration/src/networks/bitcoin.rs index 128858ac..df97d7f5 100644 --- a/orchestration/src/networks/bitcoin.rs +++ b/orchestration/src/networks/bitcoin.rs @@ -5,7 +5,7 @@ use crate::{Network, Os, mimalloc, os, write_dockerfile}; pub fn bitcoin(orchestration_path: &Path, network: Network) { #[rustfmt::skip] const DOWNLOAD_BITCOIN: &str = r#" -FROM alpine:latest as bitcoin +FROM alpine:latest AS bitcoin ENV BITCOIN_VERSION=27.1 diff --git a/orchestration/src/networks/ethereum/consensus/lighthouse.rs b/orchestration/src/networks/ethereum/consensus/lighthouse.rs index 3434117d..00df59ea 100644 --- a/orchestration/src/networks/ethereum/consensus/lighthouse.rs +++ b/orchestration/src/networks/ethereum/consensus/lighthouse.rs @@ -5,7 +5,7 @@ pub fn lighthouse(network: Network) -> (String, String, String) { #[rustfmt::skip] const DOWNLOAD_LIGHTHOUSE: &str = r#" -FROM alpine:latest as lighthouse +FROM alpine:latest AS lighthouse ENV LIGHTHOUSE_VERSION=5.1.3 diff --git a/orchestration/src/networks/ethereum/consensus/nimbus.rs b/orchestration/src/networks/ethereum/consensus/nimbus.rs index 94649e4f..325263c2 100644 --- a/orchestration/src/networks/ethereum/consensus/nimbus.rs +++ b/orchestration/src/networks/ethereum/consensus/nimbus.rs @@ -20,7 +20,7 @@ pub fn nimbus(network: Network) -> (String, String, String) { #[rustfmt::skip] let download_nimbus = format!(r#" -FROM alpine:latest as nimbus +FROM alpine:latest AS nimbus ENV NIMBUS_VERSION=24.3.0 ENV NIMBUS_COMMIT=dc19b082 diff --git a/orchestration/src/networks/ethereum/execution/reth.rs b/orchestration/src/networks/ethereum/execution/reth.rs index 65b096dd..aeef39d5 100644 --- a/orchestration/src/networks/ethereum/execution/reth.rs +++ b/orchestration/src/networks/ethereum/execution/reth.rs @@ -5,7 +5,7 @@ pub fn reth(network: Network) -> (String, String, String) { #[rustfmt::skip] const DOWNLOAD_RETH: &str = r#" -FROM alpine:latest as reth +FROM alpine:latest AS reth ENV RETH_VERSION=0.2.0-beta.6 diff --git a/orchestration/src/networks/monero.rs b/orchestration/src/networks/monero.rs index 50a8bec9..9b8f24da 100644 --- a/orchestration/src/networks/monero.rs +++ b/orchestration/src/networks/monero.rs @@ -22,7 +22,7 @@ fn monero_internal( #[rustfmt::skip] let download_monero = format!(r#" -FROM alpine:latest as monero +FROM alpine:latest AS monero RUN apk --no-cache add gnupg From 68c7acdbef67527fa3ea4e892a3189c01bbb8811 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 12:26:26 -0400 Subject: [PATCH 038/116] Attempt using rootless Docker in CI via the setup-docker-action Restores using ubuntu-latest. Basically, at some point in the last year the existing Docker e2e tests started failing. I'm unclear if this is an issue with the OS, the docker packages, or what. This just tries to find a solution. --- .github/actions/build-dependencies/action.yml | 25 ++++++++++++++++--- .github/workflows/coordinator-tests.yml | 2 +- .github/workflows/full-stack-tests.yml | 2 +- .github/workflows/message-queue-tests.yml | 2 +- .github/workflows/monero-tests.yaml | 4 +-- .github/workflows/networks-tests.yml | 2 +- .github/workflows/processor-tests.yml | 2 +- .github/workflows/reproducible-runtime.yml | 2 +- .github/workflows/tests.yml | 6 ++--- 9 files changed, 33 insertions(+), 14 deletions(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 0463aae3..da86d011 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -13,9 +13,6 @@ runs: sudo apt remove -y --allow-remove-essential -f shim-signed # This command would fail, due to shim-signed having unmet dependencies, hence its removal sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" - sudo apt autoremove -y - sudo apt clean - docker system prune -a --volumes if: runner.os == 'Linux' - name: Remove unused packages @@ -47,5 +44,27 @@ runs: svm install 0.8.25 svm use 0.8.25 + - name: Remove preinstalled Docker + shell: bash + run: | + docker system prune -a --volumes + sudo apt remove -y *docker* + if: runner.os == "Linux" + + - name: Update system dependencies + shell: bash + run: | + sudo apt update -y + sudo apt upgrade -y + sudo apt autoremove -y + sudo apt clean + if: runner.os == "Linux" + + - name: Install rootless Docker + uses: docker/setup-docker-action@b60f85385d03ac8acfca6d9996982511d8620a19 + with: + rootless: true + if: runner.os == "Linux" + # - name: Cache Rust # uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43 diff --git a/.github/workflows/coordinator-tests.yml b/.github/workflows/coordinator-tests.yml index c11e3b31..b956f752 100644 --- a/.github/workflows/coordinator-tests.yml +++ b/.github/workflows/coordinator-tests.yml @@ -29,7 +29,7 @@ on: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/full-stack-tests.yml b/.github/workflows/full-stack-tests.yml index 622f9446..7bcce866 100644 --- a/.github/workflows/full-stack-tests.yml +++ b/.github/workflows/full-stack-tests.yml @@ -11,7 +11,7 @@ on: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/message-queue-tests.yml b/.github/workflows/message-queue-tests.yml index 40198e8b..aa6f9328 100644 --- a/.github/workflows/message-queue-tests.yml +++ b/.github/workflows/message-queue-tests.yml @@ -25,7 +25,7 @@ on: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/monero-tests.yaml b/.github/workflows/monero-tests.yaml index 886d576e..a72a85a5 100644 --- a/.github/workflows/monero-tests.yaml +++ b/.github/workflows/monero-tests.yaml @@ -18,7 +18,7 @@ on: jobs: # Only run these once since they will be consistent regardless of any node unit-tests: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac @@ -43,7 +43,7 @@ jobs: # Doesn't run unit tests with features as the tests workflow will integration-tests: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest # Test against all supported protocol versions strategy: matrix: diff --git a/.github/workflows/networks-tests.yml b/.github/workflows/networks-tests.yml index a807282f..5966a6a8 100644 --- a/.github/workflows/networks-tests.yml +++ b/.github/workflows/networks-tests.yml @@ -19,7 +19,7 @@ on: jobs: test-networks: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/processor-tests.yml b/.github/workflows/processor-tests.yml index 3c15e694..5f6043eb 100644 --- a/.github/workflows/processor-tests.yml +++ b/.github/workflows/processor-tests.yml @@ -29,7 +29,7 @@ on: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/reproducible-runtime.yml b/.github/workflows/reproducible-runtime.yml index e8f082cc..2c418bd5 100644 --- a/.github/workflows/reproducible-runtime.yml +++ b/.github/workflows/reproducible-runtime.yml @@ -25,7 +25,7 @@ on: jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 615cc273..05c25972 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -27,7 +27,7 @@ on: jobs: test-infra: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac @@ -47,7 +47,7 @@ jobs: -p serai-docker-tests test-substrate: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac @@ -77,7 +77,7 @@ jobs: -p serai-node test-serai-client: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac From ecb7df85b0d81446751dd2a66ad291c36b69f5f9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 12:38:58 -0400 Subject: [PATCH 039/116] if: runner.os == 'Linux', with single quotes --- .github/actions/build-dependencies/action.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index da86d011..3187cc07 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -49,7 +49,7 @@ runs: run: | docker system prune -a --volumes sudo apt remove -y *docker* - if: runner.os == "Linux" + if: runner.os == 'Linux' - name: Update system dependencies shell: bash @@ -58,13 +58,13 @@ runs: sudo apt upgrade -y sudo apt autoremove -y sudo apt clean - if: runner.os == "Linux" + if: runner.os == 'Linux' - name: Install rootless Docker uses: docker/setup-docker-action@b60f85385d03ac8acfca6d9996982511d8620a19 with: rootless: true - if: runner.os == "Linux" + if: runner.os == 'Linux' # - name: Cache Rust # uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43 From 25324c3cd598c806c70798498613e8360cb94609 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 12:49:56 -0400 Subject: [PATCH 040/116] Add uidmap dependency for rootless Docker --- .github/actions/build-dependencies/action.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 3187cc07..f699d986 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -49,6 +49,8 @@ runs: run: | docker system prune -a --volumes sudo apt remove -y *docker* + # Install uidmap which will be required for the explicitly installed Docker + sudo apt install uidmap if: runner.os == 'Linux' - name: Update system dependencies From 54c9d19726dd5184b3c9869deab7eab06f22bbdf Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 13:34:25 -0400 Subject: [PATCH 041/116] Have docker install set host --- .github/actions/build-dependencies/action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index f699d986..94f119f4 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -66,6 +66,7 @@ runs: uses: docker/setup-docker-action@b60f85385d03ac8acfca6d9996982511d8620a19 with: rootless: true + set-host: true if: runner.os == 'Linux' # - name: Cache Rust From 8fcfa6d3d5ebd6b241c504cb9dafbbb0889610c1 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 14:06:27 -0400 Subject: [PATCH 042/116] Add dedicated error for when amounts aren't representable within a u64 Fixes the issue where _inputs_ could still overflow u64::MAX and cause a panic. --- networks/monero/wallet/src/send/mod.rs | 56 ++++++++++++++++++-------- processor/src/networks/monero.rs | 3 ++ 2 files changed, 42 insertions(+), 17 deletions(-) diff --git a/networks/monero/wallet/src/send/mod.rs b/networks/monero/wallet/src/send/mod.rs index f40de06c..e0c59e20 100644 --- a/networks/monero/wallet/src/send/mod.rs +++ b/networks/monero/wallet/src/send/mod.rs @@ -177,6 +177,17 @@ pub enum SendError { /// The created transaction was too large. #[cfg_attr(feature = "std", error("too large of a transaction"))] TooLargeTransaction, + /// The transactions' amounts could not be represented within a `u64`. + #[cfg_attr( + feature = "std", + error("transaction amounts exceed u64::MAX (in {in_amount}, out {out_amount})") + )] + AmountsUnrepresentable { + /// The amount in (via inputs). + in_amount: u128, + /// The amount which would be out (between outputs and the fee). + out_amount: u128, + }, /// This transaction could not pay for itself. #[cfg_attr( feature = "std", @@ -300,23 +311,34 @@ impl SignableTransaction { } // Make sure we have enough funds - let in_amount = self.inputs.iter().map(|input| input.commitment().amount).sum::(); - let payments_amount = self - .payments - .iter() - .filter_map(|payment| match payment { - InternalPayment::Payment(_, amount) => Some(*amount), - InternalPayment::Change(_) => None, - }) - .try_fold(0, u64::checked_add); - let payments_amount = payments_amount.ok_or(SendError::TooManyOutputs)?; - let (weight, necessary_fee) = self.weight_and_necessary_fee(); - if payments_amount.checked_add(necessary_fee).is_none_or(|total_out| in_amount < total_out) { - Err(SendError::NotEnoughFunds { - inputs: in_amount, - outputs: payments_amount, - necessary_fee: Some(necessary_fee), - })?; + let weight; + { + let in_amount: u128 = + self.inputs.iter().map(|input| u128::from(input.commitment().amount)).sum(); + let payments_amount: u128 = self + .payments + .iter() + .filter_map(|payment| match payment { + InternalPayment::Payment(_, amount) => Some(u128::from(*amount)), + InternalPayment::Change(_) => None, + }) + .sum(); + let necessary_fee; + (weight, necessary_fee) = self.weight_and_necessary_fee(); + let out_amount = payments_amount + u128::from(necessary_fee); + let in_out_amount = u64::try_from(in_amount) + .and_then(|in_amount| u64::try_from(out_amount).map(|out_amount| (in_amount, out_amount))); + let Ok((in_amount, out_amount)) = in_out_amount else { + Err(SendError::AmountsUnrepresentable { in_amount, out_amount })? + }; + if in_amount < out_amount { + Err(SendError::NotEnoughFunds { + inputs: in_amount, + outputs: u64::try_from(payments_amount) + .expect("total out fit within u64 but not part of total out"), + necessary_fee: Some(necessary_fee), + })?; + } } // The limit is half the no-penalty block size diff --git a/processor/src/networks/monero.rs b/processor/src/networks/monero.rs index 4e70c002..6813a76f 100644 --- a/processor/src/networks/monero.rs +++ b/processor/src/networks/monero.rs @@ -390,6 +390,8 @@ impl Monero { MakeSignableTransactionResult::SignableTransaction(signable) } })), + // AmountsUnrepresentable is unreachable on Monero without 100% of the supply before tail + // emission or fundamental corruption Err(e) => match e { SendError::UnsupportedRctType => { panic!("trying to use an RctType unsupported by monero-wallet") @@ -398,6 +400,7 @@ impl Monero { SendError::InvalidDecoyQuantity | SendError::NoOutputs | SendError::TooManyOutputs | + SendError::AmountsUnrepresentable { .. } | SendError::NoChange | SendError::TooMuchArbitraryData | SendError::TooLargeTransaction | From f2595c4939c10b20fb2bf6007d20162077735390 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 14:15:53 -0400 Subject: [PATCH 043/116] Tweak how subtrate-client tests waits to connect to the Monero node --- substrate/client/tests/common/mod.rs | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/substrate/client/tests/common/mod.rs b/substrate/client/tests/common/mod.rs index 7dda7d0a..797f4536 100644 --- a/substrate/client/tests/common/mod.rs +++ b/substrate/client/tests/common/mod.rs @@ -49,17 +49,24 @@ macro_rules! serai_test { test.provide_container(composition); test.run_async(|ops| async move { // Sleep until the Substrate RPC starts - let serai_rpc = ops.handle(handle).host_port(9944).unwrap(); - let serai_rpc = format!("http://{}:{}", serai_rpc.0, serai_rpc.1); - // Bound execution to 60 seconds - for _ in 0 .. 60 { + let mut ticks = 0; + let serai_rpc = loop { + // Bound execution to 60 seconds + if ticks > 60 { + panic!("Serai node didn't start within 60 seconds"); + } tokio::time::sleep(core::time::Duration::from_secs(1)).await; + ticks += 1; + + let Ok(serai_rpc) = ops.handle(handle).host_port(9944) else { continue }; + let serai_rpc = format!("http://{}:{}", serai_rpc.0, serai_rpc.1); + let Ok(client) = Serai::new(serai_rpc.clone()).await else { continue }; if client.latest_finalized_block_hash().await.is_err() { continue; } - break; - } + break serai_rpc; + }; #[allow(clippy::redundant_closure_call)] $test(Serai::new(serai_rpc).await.unwrap()).await; }).await; From 35b113768b52b2cf4fa957764ab5416ae03f25fe Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 9 Aug 2025 14:32:14 -0400 Subject: [PATCH 044/116] Attempt downgrading docker from .28 to .27 --- .github/actions/build-dependencies/action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 94f119f4..a4afc66e 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,6 +67,7 @@ runs: with: rootless: true set-host: true + version: type=image,tag=27.5.1 if: runner.os == 'Linux' # - name: Cache Rust From 9f1c5268a5df34d6c74fcd08470458760f11d2b7 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 11:25:41 -0400 Subject: [PATCH 045/116] Attempt downgrading Docker from 27 to 26 --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index a4afc66e..97b3bffc 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,7 +67,7 @@ runs: with: rootless: true set-host: true - version: type=image,tag=27.5.1 + version: type=image,tag=26.1.4 if: runner.os == 'Linux' # - name: Cache Rust From 75c38560f40d6065933633a859110ed4af1a6df2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 11:34:09 -0400 Subject: [PATCH 046/116] Bookworm -> Bullseye, except for the runtime --- orchestration/src/main.rs | 4 ++-- orchestration/src/mimalloc.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/orchestration/src/main.rs b/orchestration/src/main.rs index 1bad77fb..64271d46 100644 --- a/orchestration/src/main.rs +++ b/orchestration/src/main.rs @@ -117,7 +117,7 @@ WORKDIR /home/{user} Os::Debian => format!( r#" -FROM debian:bookworm-slim AS image +FROM debian:bullseye-slim AS image COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload @@ -146,7 +146,7 @@ fn build_serai_service(prelude: &str, release: bool, features: &str, package: &s format!( r#" -FROM rust:1.89-slim-bookworm AS builder +FROM rust:1.89-slim-bullseye AS builder COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload diff --git a/orchestration/src/mimalloc.rs b/orchestration/src/mimalloc.rs index 51f5bfe7..0f412e12 100644 --- a/orchestration/src/mimalloc.rs +++ b/orchestration/src/mimalloc.rs @@ -16,7 +16,7 @@ RUN git clone https://github.com/microsoft/mimalloc && \ "#; const DEBIAN_MIMALLOC: &str = r#" -FROM debian:bookworm-slim AS mimalloc-debian +FROM debian:bullseye-slim AS mimalloc-debian RUN apt update && apt upgrade -y && apt install -y gcc g++ make cmake git RUN git clone https://github.com/microsoft/mimalloc && \ From 0780deb643e604ca80951d6fcfbfd3a2ce8b0608 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 11:37:32 -0400 Subject: [PATCH 047/116] Use three separate commands within the Bitcoin Dockerfile to download the release Attempts to debug which is failing, as right now, the command as a whole is within the CI. --- orchestration/src/networks/bitcoin.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/orchestration/src/networks/bitcoin.rs b/orchestration/src/networks/bitcoin.rs index df97d7f5..b43b4672 100644 --- a/orchestration/src/networks/bitcoin.rs +++ b/orchestration/src/networks/bitcoin.rs @@ -12,9 +12,9 @@ ENV BITCOIN_VERSION=27.1 RUN apk --no-cache add git gnupg # Download Bitcoin -RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz \ - && wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS \ - && wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS.asc +RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz +RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS +RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS.asc # Verify all sigs and check for a valid signature from laanwj -- 71A3 RUN git clone https://github.com/bitcoin-core/guix.sigs && \ From f51d77d26ac865996811dfb6f7c96fcfcc02f0e8 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 11:56:39 -0400 Subject: [PATCH 048/116] Fix tweaked Substrate connection code in serai-client tests --- substrate/client/tests/common/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrate/client/tests/common/mod.rs b/substrate/client/tests/common/mod.rs index 797f4536..560eb8ab 100644 --- a/substrate/client/tests/common/mod.rs +++ b/substrate/client/tests/common/mod.rs @@ -58,7 +58,7 @@ macro_rules! serai_test { tokio::time::sleep(core::time::Duration::from_secs(1)).await; ticks += 1; - let Ok(serai_rpc) = ops.handle(handle).host_port(9944) else { continue }; + let Some(serai_rpc) = ops.handle(handle).host_port(9944) else { continue }; let serai_rpc = format!("http://{}:{}", serai_rpc.0, serai_rpc.1); let Ok(client) = Serai::new(serai_rpc.clone()).await else { continue }; From f8aee9b3c8f090c90ba8f04dff4349eb0a26b43a Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 11:56:56 -0400 Subject: [PATCH 049/116] Add `overflow-checks = true` recommandation to monero-serai --- networks/monero/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/networks/monero/README.md b/networks/monero/README.md index 50146e2e..24fc1480 100644 --- a/networks/monero/README.md +++ b/networks/monero/README.md @@ -6,6 +6,9 @@ the Monero protocol. This library is usable under no-std when the `std` feature (on by default) is disabled. +Recommended usage of the library is with `overflow-checks = true`, even for +release builds. + ### Wallet Functionality monero-serai originally included wallet functionality. That has been moved to From b934e484ccb5e9509b3d505082922e03e1a968ab Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 12:12:54 -0400 Subject: [PATCH 050/116] Replace busybox wget with wget on alpine to attempt to resolve DNS issues See https://github.com/alpinelinux/docker-alpine/issues/155. --- orchestration/src/networks/bitcoin.rs | 2 +- orchestration/src/networks/ethereum/consensus/lighthouse.rs | 2 +- orchestration/src/networks/ethereum/consensus/nimbus.rs | 2 ++ orchestration/src/networks/ethereum/execution/reth.rs | 2 +- orchestration/src/networks/monero.rs | 2 +- 5 files changed, 6 insertions(+), 4 deletions(-) diff --git a/orchestration/src/networks/bitcoin.rs b/orchestration/src/networks/bitcoin.rs index b43b4672..e222f1a6 100644 --- a/orchestration/src/networks/bitcoin.rs +++ b/orchestration/src/networks/bitcoin.rs @@ -9,7 +9,7 @@ FROM alpine:latest AS bitcoin ENV BITCOIN_VERSION=27.1 -RUN apk --no-cache add git gnupg +RUN apk --no-cache add wget git gnupg # Download Bitcoin RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz diff --git a/orchestration/src/networks/ethereum/consensus/lighthouse.rs b/orchestration/src/networks/ethereum/consensus/lighthouse.rs index 00df59ea..f66ad7fa 100644 --- a/orchestration/src/networks/ethereum/consensus/lighthouse.rs +++ b/orchestration/src/networks/ethereum/consensus/lighthouse.rs @@ -9,7 +9,7 @@ FROM alpine:latest AS lighthouse ENV LIGHTHOUSE_VERSION=5.1.3 -RUN apk --no-cache add git gnupg +RUN apk --no-cache add wget git gnupg # Download lighthouse RUN wget https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz diff --git a/orchestration/src/networks/ethereum/consensus/nimbus.rs b/orchestration/src/networks/ethereum/consensus/nimbus.rs index 325263c2..fba955ab 100644 --- a/orchestration/src/networks/ethereum/consensus/nimbus.rs +++ b/orchestration/src/networks/ethereum/consensus/nimbus.rs @@ -25,6 +25,8 @@ FROM alpine:latest AS nimbus ENV NIMBUS_VERSION=24.3.0 ENV NIMBUS_COMMIT=dc19b082 +RUN apk --no-cache add wget + # Download nimbus RUN wget https://github.com/status-im/nimbus-eth2/releases/download/v${{NIMBUS_VERSION}}/nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz diff --git a/orchestration/src/networks/ethereum/execution/reth.rs b/orchestration/src/networks/ethereum/execution/reth.rs index aeef39d5..dca3a192 100644 --- a/orchestration/src/networks/ethereum/execution/reth.rs +++ b/orchestration/src/networks/ethereum/execution/reth.rs @@ -9,7 +9,7 @@ FROM alpine:latest AS reth ENV RETH_VERSION=0.2.0-beta.6 -RUN apk --no-cache add git gnupg +RUN apk --no-cache add wget git gnupg # Download reth RUN wget https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz diff --git a/orchestration/src/networks/monero.rs b/orchestration/src/networks/monero.rs index 9b8f24da..9180b5f7 100644 --- a/orchestration/src/networks/monero.rs +++ b/orchestration/src/networks/monero.rs @@ -24,7 +24,7 @@ fn monero_internal( let download_monero = format!(r#" FROM alpine:latest AS monero -RUN apk --no-cache add gnupg +RUN apk --no-cache add wget gnupg # Download Monero RUN wget https://downloads.getmonero.org/cli/monero-linux-{arch}-v{MONERO_VERSION}.tar.bz2 From 9ddad794b48bb32827e664793fb282bca589d7f3 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 12:23:07 -0400 Subject: [PATCH 051/116] Use `wget -4` for the same reason as the prior commit --- orchestration/src/networks/ethereum/consensus/lighthouse.rs | 4 ++-- orchestration/src/networks/ethereum/consensus/nimbus.rs | 2 +- orchestration/src/networks/ethereum/execution/reth.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/orchestration/src/networks/ethereum/consensus/lighthouse.rs b/orchestration/src/networks/ethereum/consensus/lighthouse.rs index f66ad7fa..f40ac297 100644 --- a/orchestration/src/networks/ethereum/consensus/lighthouse.rs +++ b/orchestration/src/networks/ethereum/consensus/lighthouse.rs @@ -12,8 +12,8 @@ ENV LIGHTHOUSE_VERSION=5.1.3 RUN apk --no-cache add wget git gnupg # Download lighthouse -RUN wget https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz -RUN wget https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc +RUN wget -4 https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz +RUN wget -4 https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc # Verify the signature gpg --keyserver keyserver.ubuntu.com --recv-keys 15E66D941F697E28F49381F426416DC3F30674B0 diff --git a/orchestration/src/networks/ethereum/consensus/nimbus.rs b/orchestration/src/networks/ethereum/consensus/nimbus.rs index fba955ab..a29bf5fe 100644 --- a/orchestration/src/networks/ethereum/consensus/nimbus.rs +++ b/orchestration/src/networks/ethereum/consensus/nimbus.rs @@ -28,7 +28,7 @@ ENV NIMBUS_COMMIT=dc19b082 RUN apk --no-cache add wget # Download nimbus -RUN wget https://github.com/status-im/nimbus-eth2/releases/download/v${{NIMBUS_VERSION}}/nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz +RUN wget -4 https://github.com/status-im/nimbus-eth2/releases/download/v${{NIMBUS_VERSION}}/nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz # Extract nimbus RUN tar xvf nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz diff --git a/orchestration/src/networks/ethereum/execution/reth.rs b/orchestration/src/networks/ethereum/execution/reth.rs index dca3a192..91080b1d 100644 --- a/orchestration/src/networks/ethereum/execution/reth.rs +++ b/orchestration/src/networks/ethereum/execution/reth.rs @@ -12,8 +12,8 @@ ENV RETH_VERSION=0.2.0-beta.6 RUN apk --no-cache add wget git gnupg # Download reth -RUN wget https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz -RUN wget https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc +RUN wget -4 https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz +RUN wget -4 https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc # Verify the signature gpg --keyserver keyserver.ubuntu.com --recv-keys A3AE097C89093A124049DF1F5391A3C4100530B4 From 8d209c652e301cb9074f8a5a3e06abae75a56dc0 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 17:50:35 -0400 Subject: [PATCH 052/116] Add missing "-4" arguments to wget --- orchestration/src/networks/bitcoin.rs | 6 +++--- orchestration/src/networks/monero.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/orchestration/src/networks/bitcoin.rs b/orchestration/src/networks/bitcoin.rs index e222f1a6..9136c997 100644 --- a/orchestration/src/networks/bitcoin.rs +++ b/orchestration/src/networks/bitcoin.rs @@ -12,9 +12,9 @@ ENV BITCOIN_VERSION=27.1 RUN apk --no-cache add wget git gnupg # Download Bitcoin -RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz -RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS -RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS.asc +RUN wget -4 https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz +RUN wget -4 https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS +RUN wget -4 https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS.asc # Verify all sigs and check for a valid signature from laanwj -- 71A3 RUN git clone https://github.com/bitcoin-core/guix.sigs && \ diff --git a/orchestration/src/networks/monero.rs b/orchestration/src/networks/monero.rs index 9180b5f7..8103ff24 100644 --- a/orchestration/src/networks/monero.rs +++ b/orchestration/src/networks/monero.rs @@ -27,7 +27,7 @@ FROM alpine:latest AS monero RUN apk --no-cache add wget gnupg # Download Monero -RUN wget https://downloads.getmonero.org/cli/monero-linux-{arch}-v{MONERO_VERSION}.tar.bz2 +RUN wget -4 https://downloads.getmonero.org/cli/monero-linux-{arch}-v{MONERO_VERSION}.tar.bz2 # Verify Binary -- fingerprint from https://github.com/monero-project/monero-site/issues/1949 ADD orchestration/{}/networks/monero/hashes-v{MONERO_VERSION}.txt . From df3b60376ad021e5dd14970208d3f8a83fde6d50 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 18:36:44 -0400 Subject: [PATCH 053/116] Restore Debian 12 Bookworm over Debian 11 Bullseye --- orchestration/src/main.rs | 4 ++-- orchestration/src/mimalloc.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/orchestration/src/main.rs b/orchestration/src/main.rs index 64271d46..1bad77fb 100644 --- a/orchestration/src/main.rs +++ b/orchestration/src/main.rs @@ -117,7 +117,7 @@ WORKDIR /home/{user} Os::Debian => format!( r#" -FROM debian:bullseye-slim AS image +FROM debian:bookworm-slim AS image COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload @@ -146,7 +146,7 @@ fn build_serai_service(prelude: &str, release: bool, features: &str, package: &s format!( r#" -FROM rust:1.89-slim-bullseye AS builder +FROM rust:1.89-slim-bookworm AS builder COPY --from=mimalloc-debian libmimalloc.so /usr/lib RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload diff --git a/orchestration/src/mimalloc.rs b/orchestration/src/mimalloc.rs index 0f412e12..51f5bfe7 100644 --- a/orchestration/src/mimalloc.rs +++ b/orchestration/src/mimalloc.rs @@ -16,7 +16,7 @@ RUN git clone https://github.com/microsoft/mimalloc && \ "#; const DEBIAN_MIMALLOC: &str = r#" -FROM debian:bullseye-slim AS mimalloc-debian +FROM debian:bookworm-slim AS mimalloc-debian RUN apt update && apt upgrade -y && apt install -y gcc g++ make cmake git RUN git clone https://github.com/microsoft/mimalloc && \ From 354c408e3e7e717829da87f85780a33cd4a932eb Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 18:37:02 -0400 Subject: [PATCH 054/116] Stop using an older version of Docker --- .github/actions/build-dependencies/action.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 97b3bffc..94f119f4 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,7 +67,6 @@ runs: with: rootless: true set-host: true - version: type=image,tag=26.1.4 if: runner.os == 'Linux' # - name: Cache Rust From b9f554111dedf1b08968c7102e925bacdeea472b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 18:57:13 -0400 Subject: [PATCH 055/116] Attempt to use Docker 24 Long-shot premised on an old forum post on how downgrading to Docker 24 solved their instance of the error we face, though our conditions for it are presumably different. --- .github/actions/build-dependencies/action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 94f119f4..b65d6411 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,6 +67,7 @@ runs: with: rootless: true set-host: true + version: type=image,tag=24.0.6 if: runner.os == 'Linux' # - name: Cache Rust From b000740470fcd2c5042834d7ecac7d4274685ec3 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 20:13:24 -0400 Subject: [PATCH 056/116] Docker 25 since 24 doesn't have an active tag anymore --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index b65d6411..77651c06 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,7 +67,7 @@ runs: with: rootless: true set-host: true - version: type=image,tag=24.0.6 + version: type=image,tag=25.0.7 if: runner.os == 'Linux' # - name: Cache Rust From 8df5aa2e2d0ae3b587c3bce2ed08fc1a007af042 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 10 Aug 2025 20:51:56 -0400 Subject: [PATCH 057/116] Forward docker stderr to stdout in case stderr is being dropped for some reason --- substrate/client/tests/common/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrate/client/tests/common/mod.rs b/substrate/client/tests/common/mod.rs index 560eb8ab..6281309e 100644 --- a/substrate/client/tests/common/mod.rs +++ b/substrate/client/tests/common/mod.rs @@ -40,7 +40,7 @@ macro_rules! serai_test { .set_handle(handle) .set_start_policy(StartPolicy::Strict) .set_log_options(Some(LogOptions { - action: LogAction::Forward, + action: LogAction::ForwardToStdOut, policy: LogPolicy::Always, source: LogSource::Both, })); From bc81614894a0e676bde9b3e25194631a26972dc8 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 12 Aug 2025 00:38:26 -0400 Subject: [PATCH 058/116] Attempt Docker 24 again --- .github/actions/build-dependencies/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 77651c06..0c84bee4 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,7 +67,7 @@ runs: with: rootless: true set-host: true - version: type=image,tag=25.0.7 + version: type=archive,version=24.0.9 if: runner.os == 'Linux' # - name: Cache Rust From fd2d8b4f0a89d0d6b6302d3701053d5cb2864b24 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 12 Aug 2025 00:41:46 -0400 Subject: [PATCH 059/116] Use Rust 1.89 when installing bins via cargo, version pin svm-rs svm-rs just released a new version requiring 1.89 to compile. This process to not install _any_ software with 1.85 to minimize how many toolchains we have in use. --- .github/actions/build-dependencies/action.yml | 2 +- .github/workflows/daily-deny.yml | 2 +- .github/workflows/lint.yml | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 0c84bee4..366ce18c 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -40,7 +40,7 @@ runs: - name: Install solc shell: bash run: | - cargo install svm-rs + cargo +1.89 install svm-rs --version =0.5.18 svm install 0.8.25 svm use 0.8.25 diff --git a/.github/workflows/daily-deny.yml b/.github/workflows/daily-deny.yml index 6208e192..b11cdaf9 100644 --- a/.github/workflows/daily-deny.yml +++ b/.github/workflows/daily-deny.yml @@ -18,7 +18,7 @@ jobs: key: rust-advisory-db - name: Install cargo deny - run: cargo +1.85 install cargo-deny --version =0.18.3 + run: cargo +1.89 install cargo-deny --version =0.18.3 - name: Run cargo deny run: cargo deny -L error --all-features check --hide-inclusion-graph diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 8d74f5ac..c70360b5 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -52,7 +52,7 @@ jobs: key: rust-advisory-db - name: Install cargo deny - run: cargo +1.85 install cargo-deny --version =0.18.3 + run: cargo +1.89 install cargo-deny --version =0.18.3 - name: Run cargo deny run: cargo deny -L error --all-features check --hide-inclusion-graph @@ -79,5 +79,5 @@ jobs: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - name: Verify all dependencies are in use run: | - cargo +1.85 install cargo-machete --version =0.8.0 - cargo +1.85 machete + cargo +1.89 install cargo-machete --version =0.8.0 + cargo +1.89 machete From e3809b2ff1a0a8a21a5fb3c931f552fd54251f7d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 12 Aug 2025 01:26:40 -0400 Subject: [PATCH 060/116] Remove unnecessary edits to Docker config in an attempt to fix the CI --- .github/actions/build-dependencies/action.yml | 1 - substrate/client/tests/common/mod.rs | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 366ce18c..1c6c7f56 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -67,7 +67,6 @@ runs: with: rootless: true set-host: true - version: type=archive,version=24.0.9 if: runner.os == 'Linux' # - name: Cache Rust diff --git a/substrate/client/tests/common/mod.rs b/substrate/client/tests/common/mod.rs index 6281309e..560eb8ab 100644 --- a/substrate/client/tests/common/mod.rs +++ b/substrate/client/tests/common/mod.rs @@ -40,7 +40,7 @@ macro_rules! serai_test { .set_handle(handle) .set_start_policy(StartPolicy::Strict) .set_log_options(Some(LogOptions { - action: LogAction::ForwardToStdOut, + action: LogAction::Forward, policy: LogPolicy::Always, source: LogSource::Both, })); From a32b97be88a6f70bc3d0ed5e9aab6b115c32df29 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 10:55:05 -0400 Subject: [PATCH 061/116] Move to wasm32v1-none from wasm32-unknown-unknown Works towards fixing how the Substrate node Docker image no longer works. --- .github/workflows/lint.yml | 2 +- .github/workflows/pages.yml | 2 +- Cargo.lock | 404 +++++++++++++++++-------------- orchestration/runtime/Dockerfile | 2 +- orchestration/src/main.rs | 2 +- rust-toolchain.toml | 2 +- spec/Getting Started.md | 4 +- substrate/runtime/Cargo.toml | 2 +- substrate/runtime/build.rs | 10 +- 9 files changed, 233 insertions(+), 197 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c70360b5..eae3c59a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -26,7 +26,7 @@ jobs: uses: ./.github/actions/build-dependencies - name: Install nightly rust - run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy + run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c clippy - name: Run Clippy run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml index f196c450..069517c8 100644 --- a/.github/workflows/pages.yml +++ b/.github/workflows/pages.yml @@ -69,7 +69,7 @@ jobs: uses: ./.github/actions/build-dependencies - name: Buld Rust docs run: | - rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c rust-docs + rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c rust-docs RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --all-features mv target/doc docs/_site/rust diff --git a/Cargo.lock b/Cargo.lock index 8d80e5d8..73cd8128 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -207,7 +207,7 @@ dependencies = [ "alloy-sol-types", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", "tracing", ] @@ -229,7 +229,7 @@ dependencies = [ "async-trait", "auto_impl", "futures-utils-wasm", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -257,7 +257,7 @@ dependencies = [ "rand", "serde_json", "tempfile", - "thiserror", + "thiserror 1.0.64", "tracing", "url", ] @@ -315,7 +315,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", ] @@ -339,7 +339,7 @@ checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -403,7 +403,7 @@ dependencies = [ "auto_impl", "elliptic-curve", "k256", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -428,7 +428,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -445,7 +445,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "syn-solidity", "tiny-keccak", ] @@ -463,7 +463,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.79", + "syn 2.0.87", "syn-solidity", ] @@ -501,7 +501,7 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", "tokio", "tower 0.5.1", "tracing", @@ -766,7 +766,7 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror", + "thiserror 1.0.64", "time", ] @@ -853,7 +853,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -864,7 +864,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -899,7 +899,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1010,7 +1010,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1075,7 +1075,7 @@ dependencies = [ "serde_json", "simple-request", "std-shims", - "thiserror", + "thiserror 1.0.64", "tokio", "zeroize", ] @@ -1246,7 +1246,7 @@ dependencies = [ "serde_json", "serde_repr", "serde_urlencoded", - "thiserror", + "thiserror 1.0.64", "tokio", "tokio-util", "tower-service", @@ -1285,7 +1285,7 @@ dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "syn_derive", ] @@ -1408,16 +1408,16 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.17.0" +version = "0.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7daec1a2a2129eeba1644b220b4647ec537b0b5d4bfd6876fcc5a540056b592" +checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" dependencies = [ "camino", "cargo-platform", "semver 1.0.23", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -1589,7 +1589,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1605,7 +1605,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" dependencies = [ "termcolor", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] @@ -1623,6 +1623,19 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "console" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width 0.2.1", + "windows-sys 0.59.0", +] + [[package]] name = "const-hex" version = "1.13.1" @@ -1917,7 +1930,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1944,7 +1957,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1961,7 +1974,7 @@ checksum = "98532a60dedaebc4848cb2cba5023337cc9ea3af16a5b062633fabfd9f18fb60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2093,7 +2106,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2113,7 +2126,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "unicode-xid", ] @@ -2193,7 +2206,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2209,7 +2222,7 @@ dependencies = [ "rand_core", "schnorr-signatures", "std-shims", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -2228,7 +2241,7 @@ dependencies = [ "multiexp", "rand_core", "rustversion", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -2250,7 +2263,7 @@ dependencies = [ "serde", "serde_json", "strum 0.26.3", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", ] @@ -2380,6 +2393,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "enum-as-inner" version = "0.5.1" @@ -2401,7 +2420,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2457,7 +2476,7 @@ dependencies = [ "k256", "modular-frost", "rand_core", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -2507,7 +2526,7 @@ dependencies = [ "fs-err", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2583,18 +2602,6 @@ dependencies = [ "log", ] -[[package]] -name = "filetime" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" -dependencies = [ - "cfg-if", - "libc", - "libredox", - "windows-sys 0.59.0", -] - [[package]] name = "finality-grandpa" version = "0.16.2" @@ -2782,7 +2789,7 @@ dependencies = [ "proc-macro-warning", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2794,7 +2801,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2804,7 +2811,7 @@ source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf77 dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2963,7 +2970,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -3806,7 +3813,7 @@ dependencies = [ "serde", "serde_json", "soketto 0.7.1", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", ] @@ -3856,7 +3863,7 @@ dependencies = [ "beef", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", "tracing", ] @@ -4003,7 +4010,7 @@ dependencies = [ "multiaddr", "pin-project", "rw-stream-sink", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -4053,7 +4060,7 @@ dependencies = [ "rand", "rw-stream-sink", "smallvec", - "thiserror", + "thiserror 1.0.64", "unsigned-varint", "void", ] @@ -4125,7 +4132,7 @@ dependencies = [ "quick-protobuf", "quick-protobuf-codec", "smallvec", - "thiserror", + "thiserror 1.0.64", "void", ] @@ -4142,7 +4149,7 @@ dependencies = [ "quick-protobuf", "rand", "sha2", - "thiserror", + "thiserror 1.0.64", "tracing", "zeroize", ] @@ -4170,7 +4177,7 @@ dependencies = [ "rand", "sha2", "smallvec", - "thiserror", + "thiserror 1.0.64", "uint", "unsigned-varint", "void", @@ -4235,7 +4242,7 @@ dependencies = [ "sha2", "snow", "static_assertions", - "thiserror", + "thiserror 1.0.64", "x25519-dalek", "zeroize", ] @@ -4278,7 +4285,7 @@ dependencies = [ "ring 0.16.20", "rustls 0.21.12", "socket2 0.5.7", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -4333,7 +4340,7 @@ dependencies = [ "proc-macro-warning", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4367,7 +4374,7 @@ dependencies = [ "ring 0.16.20", "rustls 0.21.12", "rustls-webpki 0.101.7", - "thiserror", + "thiserror 1.0.64", "x509-parser", "yasna", ] @@ -4418,7 +4425,7 @@ dependencies = [ "pin-project-lite", "rw-stream-sink", "soketto 0.8.0", - "thiserror", + "thiserror 1.0.64", "url", "webpki-roots", ] @@ -4432,7 +4439,7 @@ dependencies = [ "futures", "libp2p-core", "log", - "thiserror", + "thiserror 1.0.64", "yamux", ] @@ -4444,7 +4451,6 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", - "redox_syscall", ] [[package]] @@ -4597,7 +4603,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4611,7 +4617,7 @@ dependencies = [ "macro_magic_core_macros", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4622,7 +4628,7 @@ checksum = "d710e1214dffbab3b5dacb21475dde7d6ed84c69ff722b3a47a782668d44fbac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4633,7 +4639,7 @@ checksum = "b8fb85ec1620619edf2984a7693497d4ec88a9665d8b87e942856884c92dbf2a" dependencies = [ "macro_magic_core", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4817,7 +4823,7 @@ dependencies = [ [[package]] name = "modular-frost" -version = "0.8.1" +version = "0.9.0" dependencies = [ "ciphersuite", "dalek-ff-group", @@ -4832,7 +4838,7 @@ dependencies = [ "schnorr-signatures", "serde_json", "subtle", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -4849,7 +4855,7 @@ dependencies = [ "serde", "serde_json", "std-shims", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -4876,7 +4882,7 @@ dependencies = [ "monero-primitives", "rand_core", "std-shims", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -4896,7 +4902,7 @@ dependencies = [ "rand_core", "std-shims", "subtle", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -4931,7 +4937,7 @@ dependencies = [ "monero-io", "monero-primitives", "std-shims", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -4959,7 +4965,7 @@ dependencies = [ "serde", "serde_json", "std-shims", - "thiserror", + "thiserror 1.0.64", "zeroize", ] @@ -5033,7 +5039,7 @@ dependencies = [ "serde", "serde_json", "std-shims", - "thiserror", + "thiserror 1.0.64", "tokio", "zeroize", ] @@ -5211,7 +5217,7 @@ checksum = "254a5372af8fc138e36684761d3c0cdb758a4410e938babcff1c860ce14ddbfc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -5258,7 +5264,7 @@ dependencies = [ "anyhow", "byteorder", "paste", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -5272,7 +5278,7 @@ dependencies = [ "log", "netlink-packet-core", "netlink-sys", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -5424,7 +5430,7 @@ checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -5812,7 +5818,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" dependencies = [ "memchr", - "thiserror", + "thiserror 1.0.64", "ucd-trie", ] @@ -5843,7 +5849,7 @@ checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -6051,7 +6057,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -6062,7 +6068,7 @@ checksum = "3d1eaa7fa0aa1929ffdf7eeb6eac234dde6268914a14ad44d23521ab6a9b258e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -6085,7 +6091,7 @@ dependencies = [ "lazy_static", "memchr", "parking_lot 0.12.3", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -6108,7 +6114,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -6218,7 +6224,7 @@ dependencies = [ "asynchronous-codec", "bytes", "quick-protobuf", - "thiserror", + "thiserror 1.0.64", "unsigned-varint", ] @@ -6235,7 +6241,7 @@ dependencies = [ "quinn-udp", "rustc-hash 1.1.0", "rustls 0.21.12", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", ] @@ -6252,7 +6258,7 @@ dependencies = [ "rustc-hash 1.1.0", "rustls 0.21.12", "slab", - "thiserror", + "thiserror 1.0.64", "tinyvec", "tracing", ] @@ -6399,7 +6405,7 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom", "libredox", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -6419,7 +6425,7 @@ checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -6587,7 +6593,7 @@ dependencies = [ "netlink-packet-route", "netlink-proto", "nix", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -6831,7 +6837,7 @@ dependencies = [ "log", "sp-core", "sp-wasm-interface", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -6859,7 +6865,7 @@ dependencies = [ "sp-keystore", "sp-runtime", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -6927,7 +6933,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -6964,7 +6970,7 @@ dependencies = [ "sp-panic-handler", "sp-runtime", "sp-version", - "thiserror", + "thiserror 1.0.64", "tiny-bip39 1.0.2", "tokio", ] @@ -7042,7 +7048,7 @@ dependencies = [ "sp-runtime", "sp-state-machine", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7078,7 +7084,7 @@ dependencies = [ "sp-keystore", "sp-runtime", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7132,7 +7138,7 @@ dependencies = [ "sp-keystore", "sp-runtime", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7186,9 +7192,9 @@ version = "0.10.0-dev" source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ "sc-allocator", - "sp-maybe-compressed-blob", + "sp-maybe-compressed-blob 4.1.0-dev", "sp-wasm-interface", - "thiserror", + "thiserror 1.0.64", "wasm-instrument", ] @@ -7236,7 +7242,7 @@ dependencies = [ "sp-application-crypto", "sp-core", "sp-keystore", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7274,7 +7280,7 @@ dependencies = [ "sp-core", "sp-runtime", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", "unsigned-varint", "void", "wasm-timer", @@ -7297,7 +7303,7 @@ dependencies = [ "sc-network", "sp-blockchain", "sp-runtime", - "thiserror", + "thiserror 1.0.64", "unsigned-varint", ] @@ -7355,7 +7361,7 @@ dependencies = [ "sp-blockchain", "sp-core", "sp-runtime", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7389,7 +7395,7 @@ dependencies = [ "sp-core", "sp-runtime", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7496,7 +7502,7 @@ dependencies = [ "sp-rpc", "sp-runtime", "sp-version", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7536,7 +7542,7 @@ dependencies = [ "sp-core", "sp-runtime", "sp-version", - "thiserror", + "thiserror 1.0.64", "tokio-stream", ] @@ -7597,7 +7603,7 @@ dependencies = [ "static_init", "substrate-prometheus-endpoint", "tempfile", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", "tracing-futures", @@ -7648,7 +7654,7 @@ dependencies = [ "sc-utils", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", "wasm-timer", ] @@ -7674,7 +7680,7 @@ dependencies = [ "sp-rpc", "sp-runtime", "sp-tracing", - "thiserror", + "thiserror 1.0.64", "tracing", "tracing-log", "tracing-subscriber 0.2.25", @@ -7688,7 +7694,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -7714,7 +7720,7 @@ dependencies = [ "sp-tracing", "sp-transaction-pool", "substrate-prometheus-endpoint", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -7730,7 +7736,7 @@ dependencies = [ "sp-blockchain", "sp-core", "sp-runtime", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -8017,7 +8023,7 @@ dependencies = [ "simple-request", "sp-core", "sp-runtime", - "thiserror", + "thiserror 1.0.64", "tokio", "zeroize", ] @@ -8456,7 +8462,7 @@ dependencies = [ "serai-processor-messages", "serde_json", "sp-application-crypto", - "thiserror", + "thiserror 1.0.64", "tokio", "zalloc", "zeroize", @@ -8653,7 +8659,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -8676,7 +8682,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -8952,7 +8958,7 @@ dependencies = [ "sp-std", "sp-trie", "sp-version", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -8966,7 +8972,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9034,7 +9040,7 @@ dependencies = [ "sp-database", "sp-runtime", "sp-state-machine", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -9048,7 +9054,7 @@ dependencies = [ "sp-inherents", "sp-runtime", "sp-state-machine", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -9137,7 +9143,7 @@ dependencies = [ "sp-storage", "ss58-registry", "substrate-bip39", - "thiserror", + "thiserror 1.0.64", "tiny-bip39 1.0.2", "tracing", "zeroize", @@ -9162,7 +9168,7 @@ source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf77 dependencies = [ "quote", "sp-core-hashing", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9181,7 +9187,7 @@ source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf77 dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9206,7 +9212,7 @@ dependencies = [ "scale-info", "sp-runtime", "sp-std", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -9251,7 +9257,7 @@ dependencies = [ "parking_lot 0.12.3", "sp-core", "sp-externalities", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -9259,10 +9265,19 @@ name = "sp-maybe-compressed-blob" version = "4.1.0-dev" source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" dependencies = [ - "thiserror", + "thiserror 1.0.64", "zstd 0.12.4", ] +[[package]] +name = "sp-maybe-compressed-blob" +version = "11.0.0" +source = "git+https://github.com/serai-dex/polkadot-sdk?branch=serai-next#ebfc5f338a9752403fa07e88dd072b1d6de7acfc" +dependencies = [ + "thiserror 2.0.14", + "zstd 0.13.2", +] + [[package]] name = "sp-metadata-ir" version = "0.1.0" @@ -9353,7 +9368,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9401,7 +9416,7 @@ dependencies = [ "sp-panic-handler", "sp-std", "sp-trie", - "thiserror", + "thiserror 1.0.64", "tracing", "trie-db", ] @@ -9434,7 +9449,7 @@ dependencies = [ "sp-inherents", "sp-runtime", "sp-std", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -9475,7 +9490,7 @@ dependencies = [ "schnellru", "sp-core", "sp-std", - "thiserror", + "thiserror 1.0.64", "tracing", "trie-db", "trie-root", @@ -9495,7 +9510,7 @@ dependencies = [ "sp-runtime", "sp-std", "sp-version-proc-macro", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -9506,7 +9521,7 @@ dependencies = [ "parity-scale-codec", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9622,7 +9637,7 @@ dependencies = [ [[package]] name = "std-shims" -version = "0.1.1" +version = "0.1.2" dependencies = [ "hashbrown 0.14.5", "spin 0.9.8", @@ -9694,7 +9709,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9707,7 +9722,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9754,24 +9769,25 @@ dependencies = [ "hyper 0.14.30", "log", "prometheus", - "thiserror", + "thiserror 1.0.64", "tokio", ] [[package]] name = "substrate-wasm-builder" -version = "5.0.0-dev" -source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f" +version = "25.0.0" +source = "git+https://github.com/serai-dex/polkadot-sdk?branch=serai-next#ebfc5f338a9752403fa07e88dd072b1d6de7acfc" dependencies = [ - "anstyle", "build-helper", "cargo_metadata", - "filetime", + "console", + "jobserver", "parity-wasm", - "sp-maybe-compressed-blob", - "strum 0.25.0", + "shlex", + "sp-maybe-compressed-blob 11.0.0", + "strum 0.26.3", "tempfile", - "toml 0.7.8", + "toml 0.8.19", "walkdir", "wasm-opt", ] @@ -9795,9 +9811,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.79" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -9813,7 +9829,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9825,7 +9841,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -9904,7 +9920,7 @@ dependencies = [ "parity-scale-codec", "patchable-async-sleep", "serai-db", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -9929,7 +9945,16 @@ version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.64", +] + +[[package]] +name = "thiserror" +version = "2.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b0949c3a6c842cbde3f1686d6eea5a010516deb7085f79db747562d4102f41e" +dependencies = [ + "thiserror-impl 2.0.14", ] [[package]] @@ -9940,7 +9965,18 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5b44b4ab9c2fdd0e0512e6bece8388e214c0749f5862b114cc5b7a25daf227" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", ] [[package]] @@ -10011,7 +10047,7 @@ dependencies = [ "rand", "rustc-hash 1.1.0", "sha2", - "thiserror", + "thiserror 1.0.64", "unicode-normalization", "wasm-bindgen", "zeroize", @@ -10067,7 +10103,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -10118,14 +10154,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.8" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.19.15", + "toml_edit 0.22.20", ] [[package]] @@ -10144,8 +10180,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ "indexmap 2.5.0", - "serde", - "serde_spanned", "toml_datetime", "winnow 0.5.40", ] @@ -10157,6 +10191,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap 2.5.0", + "serde", + "serde_spanned", "toml_datetime", "winnow 0.6.20", ] @@ -10236,7 +10272,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -10340,7 +10376,7 @@ dependencies = [ "serai-db", "subtle", "tendermint-machine", - "thiserror", + "thiserror 1.0.64", "tokio", "zeroize", ] @@ -10386,7 +10422,7 @@ dependencies = [ "rand", "smallvec", "socket2 0.4.10", - "thiserror", + "thiserror 1.0.64", "tinyvec", "tokio", "tracing", @@ -10411,7 +10447,7 @@ dependencies = [ "once_cell", "rand", "smallvec", - "thiserror", + "thiserror 1.0.64", "tinyvec", "tokio", "tracing", @@ -10433,7 +10469,7 @@ dependencies = [ "rand", "resolv-conf", "smallvec", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", "trust-dns-proto 0.23.2", @@ -10520,6 +10556,12 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" +[[package]] +name = "unicode-width" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -10663,7 +10705,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "wasm-bindgen-shared", ] @@ -10697,7 +10739,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10728,25 +10770,25 @@ dependencies = [ [[package]] name = "wasm-opt" -version = "0.114.2" +version = "0.116.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "effbef3bd1dde18acb401f73e740a6f3d4a1bc651e9773bddc512fe4d8d68f67" +checksum = "2fd87a4c135535ffed86123b6fb0f0a5a0bc89e50416c942c5f0662c645f679c" dependencies = [ "anyhow", "libc", "strum 0.24.1", "strum_macros 0.24.3", "tempfile", - "thiserror", + "thiserror 1.0.64", "wasm-opt-cxx-sys", "wasm-opt-sys", ] [[package]] name = "wasm-opt-cxx-sys" -version = "0.114.2" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c09e24eb283919ace2ed5733bda4842a59ce4c8de110ef5c6d98859513d17047" +checksum = "8c57b28207aa724318fcec6575fe74803c23f6f266fce10cbc9f3f116762f12e" dependencies = [ "anyhow", "cxx", @@ -10756,9 +10798,9 @@ dependencies = [ [[package]] name = "wasm-opt-sys" -version = "0.114.2" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f2f817bed2e8d65eb779fa37317e74de15585751f903c9118342d1970703a4" +checksum = "8a1cce564dc768dacbdb718fc29df2dba80bd21cb47d8f77ae7e3d95ceb98cbe" dependencies = [ "anyhow", "cc", @@ -10869,7 +10911,7 @@ dependencies = [ "log", "object 0.31.1", "target-lexicon", - "thiserror", + "thiserror 1.0.64", "wasmparser", "wasmtime-cranelift-shared", "wasmtime-environ", @@ -10906,7 +10948,7 @@ dependencies = [ "object 0.31.1", "serde", "target-lexicon", - "thiserror", + "thiserror 1.0.64", "wasmparser", "wasmtime-types", ] @@ -10994,7 +11036,7 @@ checksum = "77943729d4b46141538e8d0b6168915dc5f88575ecdfea26753fd3ba8bab244a" dependencies = [ "cranelift-entity", "serde", - "thiserror", + "thiserror 1.0.64", "wasmparser", ] @@ -11006,7 +11048,7 @@ checksum = "ca7af9bb3ee875c4907835e607a275d10b04d15623d3aebe01afe8fbd3f85050" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -11342,7 +11384,7 @@ dependencies = [ "nom", "oid-registry", "rusticata-macros", - "thiserror", + "thiserror 1.0.64", "time", ] @@ -11411,7 +11453,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -11431,7 +11473,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] diff --git a/orchestration/runtime/Dockerfile b/orchestration/runtime/Dockerfile index 145b4cab..8716d1af 100644 --- a/orchestration/runtime/Dockerfile +++ b/orchestration/runtime/Dockerfile @@ -11,7 +11,7 @@ RUN rm -rf /etc/apt/sources.list.d/debian.sources && \ RUN apt update -y && apt upgrade -y && apt install -y clang # Add the wasm toolchain -RUN rustup target add wasm32-unknown-unknown +RUN rustup target add wasm32v1-none FROM deterministic diff --git a/orchestration/src/main.rs b/orchestration/src/main.rs index 1bad77fb..9f0bacad 100644 --- a/orchestration/src/main.rs +++ b/orchestration/src/main.rs @@ -160,7 +160,7 @@ RUN apt install -y pkg-config clang RUN apt install -y make protobuf-compiler # Add the wasm toolchain -RUN rustup target add wasm32-unknown-unknown +RUN rustup target add wasm32v1-none {prelude} diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 8303f09b..7ed4c04e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] channel = "1.82" -targets = ["wasm32-unknown-unknown"] +targets = ["wasm32v1-none"] profile = "minimal" components = ["rust-src", "rustfmt", "clippy"] diff --git a/spec/Getting Started.md b/spec/Getting Started.md index c2530b2a..06a5cfb7 100644 --- a/spec/Getting Started.md +++ b/spec/Getting Started.md @@ -27,9 +27,9 @@ brew install rustup ``` rustup update rustup toolchain install stable -rustup target add wasm32-unknown-unknown +rustup target add wasm32v1-none rustup toolchain install nightly -rustup target add wasm32-unknown-unknown --toolchain nightly +rustup target add wasm32v1-none --toolchain nightly ``` ### Install Solidity diff --git a/substrate/runtime/Cargo.toml b/substrate/runtime/Cargo.toml index 9cd0f5ab..97e7d42c 100644 --- a/substrate/runtime/Cargo.toml +++ b/substrate/runtime/Cargo.toml @@ -76,7 +76,7 @@ frame-system-rpc-runtime-api = { git = "https://github.com/serai-dex/substrate", pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/serai-dex/substrate", default-features = false } [build-dependencies] -substrate-wasm-builder = { git = "https://github.com/serai-dex/substrate" } +substrate-wasm-builder = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "serai-next" } [features] std = [ diff --git a/substrate/runtime/build.rs b/substrate/runtime/build.rs index e9a5dbc7..9aafd17d 100644 --- a/substrate/runtime/build.rs +++ b/substrate/runtime/build.rs @@ -1,10 +1,4 @@ -use substrate_wasm_builder::WasmBuilder; - fn main() { - WasmBuilder::new() - .with_current_project() - .disable_runtime_version_section_check() - .export_heap_base() - .import_memory() - .build() + #[cfg(feature = "std")] + substrate_wasm_builder::WasmBuilder::build_using_defaults(); } From 92d9e908cbb748da8fab3705ace2a224511e9164 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 10:56:10 -0400 Subject: [PATCH 062/116] Version bumps for packages that needed to be published for monero-oxide --- common/std-shims/Cargo.toml | 4 ++-- crypto/frost/Cargo.toml | 4 ++-- crypto/schnorrkel/Cargo.toml | 2 +- networks/bitcoin/Cargo.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/common/std-shims/Cargo.toml b/common/std-shims/Cargo.toml index 534a4216..ef746a64 100644 --- a/common/std-shims/Cargo.toml +++ b/common/std-shims/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "std-shims" -version = "0.1.1" +version = "0.1.2" description = "A series of std shims to make alloc more feasible" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/common/std-shims" authors = ["Luke Parker "] keywords = ["nostd", "no_std", "alloc", "io"] edition = "2021" -rust-version = "1.70" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 7c32b6f0..29a974f2 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "modular-frost" -version = "0.8.1" +version = "0.9.0" description = "Modular implementation of FROST over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost" authors = ["Luke Parker "] keywords = ["frost", "multisig", "threshold"] edition = "2021" -rust-version = "1.79" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/schnorrkel/Cargo.toml b/crypto/schnorrkel/Cargo.toml index 2508bef0..47717af5 100644 --- a/crypto/schnorrkel/Cargo.toml +++ b/crypto/schnorrkel/Cargo.toml @@ -26,7 +26,7 @@ group = "0.13" ciphersuite = { path = "../ciphersuite", version = "^0.4.1", features = ["std", "ristretto"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1" } -frost = { path = "../frost", package = "modular-frost", version = "^0.8.1", features = ["ristretto"] } +frost = { path = "../frost", package = "modular-frost", version = "^0.9.0", features = ["ristretto"] } schnorrkel = { version = "0.11" } diff --git a/networks/bitcoin/Cargo.toml b/networks/bitcoin/Cargo.toml index 5ab44cc6..02f834d8 100644 --- a/networks/bitcoin/Cargo.toml +++ b/networks/bitcoin/Cargo.toml @@ -26,7 +26,7 @@ rand_core = { version = "0.6", default-features = false } bitcoin = { version = "0.32", default-features = false } k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] } -frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true } +frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.9", default-features = false, features = ["secp256k1"], optional = true } hex = { version = "0.4", default-features = false, optional = true } serde = { version = "1", default-features = false, features = ["derive"], optional = true } From 971951a1a66014fce5a943b4c78fc24c63187dbb Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 10:56:28 -0400 Subject: [PATCH 063/116] Add overflow-checks even on release, per good practice --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index 1aa7602f..06ea12c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -130,6 +130,7 @@ monero-serai = { opt-level = 3 } [profile.release] panic = "unwind" +overflow-checks = true [patch.crates-io] # https://github.com/rust-lang-nursery/lazy-static.rs/issues/201 From 0c2f2979a9fcee0618507cdb8dcb833fc959eff7 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 11:10:54 -0400 Subject: [PATCH 064/116] Remove monero-serai, migrating to monero-oxide --- .github/workflows/monero-tests.yaml | 72 - .github/workflows/networks-tests.yml | 13 - Cargo.lock | 86 +- Cargo.toml | 23 +- deny.toml | 1 + networks/ethereum/README.md | 13 +- networks/monero/Cargo.toml | 56 - networks/monero/LICENSE | 21 - networks/monero/README.md | 31 - networks/monero/generators/Cargo.toml | 46 - networks/monero/generators/LICENSE | 21 - networks/monero/generators/README.md | 13 - .../monero/generators/src/hash_to_point.rs | 71 - networks/monero/generators/src/lib.rs | 89 -- networks/monero/generators/src/tests/mod.rs | 36 - .../monero/generators/src/tests/tests.txt | 628 -------- networks/monero/io/Cargo.toml | 25 - networks/monero/io/LICENSE | 21 - networks/monero/io/README.md | 11 - networks/monero/io/src/lib.rs | 248 ---- networks/monero/primitives/Cargo.toml | 44 - networks/monero/primitives/LICENSE | 21 - networks/monero/primitives/README.md | 11 - networks/monero/primitives/src/lib.rs | 262 ---- networks/monero/primitives/src/tests.rs | 32 - .../monero/primitives/src/unreduced_scalar.rs | 145 -- networks/monero/ringct/borromean/Cargo.toml | 41 - networks/monero/ringct/borromean/LICENSE | 21 - networks/monero/ringct/borromean/README.md | 12 - networks/monero/ringct/borromean/src/lib.rs | 112 -- .../monero/ringct/bulletproofs/Cargo.toml | 55 - networks/monero/ringct/bulletproofs/LICENSE | 21 - networks/monero/ringct/bulletproofs/README.md | 14 - networks/monero/ringct/bulletproofs/build.rs | 84 -- .../ringct/bulletproofs/src/batch_verifier.rs | 106 -- .../monero/ringct/bulletproofs/src/core.rs | 74 - .../monero/ringct/bulletproofs/src/lib.rs | 311 ---- .../src/original/inner_product.rs | 307 ---- .../ringct/bulletproofs/src/original/mod.rs | 342 ----- .../src/plus/aggregate_range_proof.rs | 264 ---- .../ringct/bulletproofs/src/plus/mod.rs | 84 -- .../bulletproofs/src/plus/transcript.rs | 17 - .../src/plus/weighted_inner_product.rs | 409 ----- .../ringct/bulletproofs/src/point_vector.rs | 59 - .../ringct/bulletproofs/src/scalar_vector.rs | 138 -- .../ringct/bulletproofs/src/tests/mod.rs | 56 - .../src/tests/original/inner_product.rs | 75 - .../bulletproofs/src/tests/original/mod.rs | 62 - .../src/tests/plus/aggregate_range_proof.rs | 28 - .../ringct/bulletproofs/src/tests/plus/mod.rs | 4 - .../src/tests/plus/weighted_inner_product.rs | 82 -- networks/monero/ringct/clsag/Cargo.toml | 65 - networks/monero/ringct/clsag/LICENSE | 21 - networks/monero/ringct/clsag/README.md | 15 - networks/monero/ringct/clsag/src/lib.rs | 434 ------ networks/monero/ringct/clsag/src/multisig.rs | 389 ----- networks/monero/ringct/clsag/src/tests.rs | 131 -- networks/monero/ringct/mlsag/Cargo.toml | 45 - networks/monero/ringct/mlsag/LICENSE | 21 - networks/monero/ringct/mlsag/README.md | 11 - networks/monero/ringct/mlsag/src/lib.rs | 242 --- networks/monero/rpc/Cargo.toml | 47 - networks/monero/rpc/LICENSE | 21 - networks/monero/rpc/README.md | 11 - networks/monero/rpc/simple-request/Cargo.toml | 30 - networks/monero/rpc/simple-request/LICENSE | 21 - networks/monero/rpc/simple-request/README.md | 3 - networks/monero/rpc/simple-request/src/lib.rs | 278 ---- .../monero/rpc/simple-request/tests/tests.rs | 144 -- networks/monero/rpc/src/lib.rs | 1310 ----------------- networks/monero/src/block.rs | 165 --- networks/monero/src/lib.rs | 39 - networks/monero/src/merkle.rs | 55 - networks/monero/src/ring_signatures.rs | 101 -- networks/monero/src/ringct.rs | 478 ------ networks/monero/src/tests/mod.rs | 1 - networks/monero/src/tests/transaction.rs | 287 ---- .../monero/src/tests/vectors/clsag_tx.json | 78 - .../monero/src/tests/vectors/ring_data.json | 134 -- .../src/tests/vectors/transactions.json | 324 ---- networks/monero/src/transaction.rs | 635 -------- networks/monero/tests/tests.rs | 3 - networks/monero/verify-chain/Cargo.toml | 32 - networks/monero/verify-chain/LICENSE | 21 - networks/monero/verify-chain/README.md | 7 - networks/monero/verify-chain/src/main.rs | 284 ---- networks/monero/wallet/Cargo.toml | 81 - networks/monero/wallet/LICENSE | 21 - networks/monero/wallet/README.md | 58 - networks/monero/wallet/address/Cargo.toml | 49 - networks/monero/wallet/address/LICENSE | 21 - networks/monero/wallet/address/README.md | 11 - .../monero/wallet/address/src/base58check.rs | 107 -- networks/monero/wallet/address/src/lib.rs | 505 ------- networks/monero/wallet/address/src/tests.rs | 205 --- .../src/vectors/featured_addresses.json | 230 --- networks/monero/wallet/src/decoys.rs | 331 ----- networks/monero/wallet/src/extra.rs | 304 ---- networks/monero/wallet/src/lib.rs | 163 -- networks/monero/wallet/src/output.rs | 376 ----- networks/monero/wallet/src/scan.rs | 381 ----- .../monero/wallet/src/send/eventuality.rs | 137 -- networks/monero/wallet/src/send/mod.rs | 610 -------- networks/monero/wallet/src/send/multisig.rs | 304 ---- networks/monero/wallet/src/send/tx.rs | 338 ----- networks/monero/wallet/src/send/tx_keys.rs | 281 ---- networks/monero/wallet/src/tests/extra.rs | 202 --- networks/monero/wallet/src/tests/mod.rs | 2 - networks/monero/wallet/src/tests/scan.rs | 168 --- networks/monero/wallet/src/view_pair.rs | 144 -- networks/monero/wallet/tests/add_data.rs | 82 -- networks/monero/wallet/tests/decoys.rs | 163 -- networks/monero/wallet/tests/eventuality.rs | 80 - .../monero/wallet/tests/runner/builder.rs | 82 -- networks/monero/wallet/tests/runner/mod.rs | 361 ----- networks/monero/wallet/tests/scan.rs | 160 -- networks/monero/wallet/tests/send.rs | 401 ----- .../wallet/tests/wallet2_compatibility.rs | 361 ----- processor/Cargo.toml | 4 +- substrate/client/Cargo.toml | 2 +- tests/full-stack/Cargo.toml | 4 +- tests/no-std/Cargo.toml | 2 - tests/no-std/src/lib.rs | 2 - tests/processor/Cargo.toml | 4 +- 124 files changed, 55 insertions(+), 17383 deletions(-) delete mode 100644 .github/workflows/monero-tests.yaml delete mode 100644 networks/monero/Cargo.toml delete mode 100644 networks/monero/LICENSE delete mode 100644 networks/monero/README.md delete mode 100644 networks/monero/generators/Cargo.toml delete mode 100644 networks/monero/generators/LICENSE delete mode 100644 networks/monero/generators/README.md delete mode 100644 networks/monero/generators/src/hash_to_point.rs delete mode 100644 networks/monero/generators/src/lib.rs delete mode 100644 networks/monero/generators/src/tests/mod.rs delete mode 100644 networks/monero/generators/src/tests/tests.txt delete mode 100644 networks/monero/io/Cargo.toml delete mode 100644 networks/monero/io/LICENSE delete mode 100644 networks/monero/io/README.md delete mode 100644 networks/monero/io/src/lib.rs delete mode 100644 networks/monero/primitives/Cargo.toml delete mode 100644 networks/monero/primitives/LICENSE delete mode 100644 networks/monero/primitives/README.md delete mode 100644 networks/monero/primitives/src/lib.rs delete mode 100644 networks/monero/primitives/src/tests.rs delete mode 100644 networks/monero/primitives/src/unreduced_scalar.rs delete mode 100644 networks/monero/ringct/borromean/Cargo.toml delete mode 100644 networks/monero/ringct/borromean/LICENSE delete mode 100644 networks/monero/ringct/borromean/README.md delete mode 100644 networks/monero/ringct/borromean/src/lib.rs delete mode 100644 networks/monero/ringct/bulletproofs/Cargo.toml delete mode 100644 networks/monero/ringct/bulletproofs/LICENSE delete mode 100644 networks/monero/ringct/bulletproofs/README.md delete mode 100644 networks/monero/ringct/bulletproofs/build.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/batch_verifier.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/core.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/lib.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/original/inner_product.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/original/mod.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/plus/mod.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/plus/transcript.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/point_vector.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/scalar_vector.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/tests/mod.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/tests/original/inner_product.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/tests/original/mod.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/tests/plus/aggregate_range_proof.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/tests/plus/mod.rs delete mode 100644 networks/monero/ringct/bulletproofs/src/tests/plus/weighted_inner_product.rs delete mode 100644 networks/monero/ringct/clsag/Cargo.toml delete mode 100644 networks/monero/ringct/clsag/LICENSE delete mode 100644 networks/monero/ringct/clsag/README.md delete mode 100644 networks/monero/ringct/clsag/src/lib.rs delete mode 100644 networks/monero/ringct/clsag/src/multisig.rs delete mode 100644 networks/monero/ringct/clsag/src/tests.rs delete mode 100644 networks/monero/ringct/mlsag/Cargo.toml delete mode 100644 networks/monero/ringct/mlsag/LICENSE delete mode 100644 networks/monero/ringct/mlsag/README.md delete mode 100644 networks/monero/ringct/mlsag/src/lib.rs delete mode 100644 networks/monero/rpc/Cargo.toml delete mode 100644 networks/monero/rpc/LICENSE delete mode 100644 networks/monero/rpc/README.md delete mode 100644 networks/monero/rpc/simple-request/Cargo.toml delete mode 100644 networks/monero/rpc/simple-request/LICENSE delete mode 100644 networks/monero/rpc/simple-request/README.md delete mode 100644 networks/monero/rpc/simple-request/src/lib.rs delete mode 100644 networks/monero/rpc/simple-request/tests/tests.rs delete mode 100644 networks/monero/rpc/src/lib.rs delete mode 100644 networks/monero/src/block.rs delete mode 100644 networks/monero/src/lib.rs delete mode 100644 networks/monero/src/merkle.rs delete mode 100644 networks/monero/src/ring_signatures.rs delete mode 100644 networks/monero/src/ringct.rs delete mode 100644 networks/monero/src/tests/mod.rs delete mode 100644 networks/monero/src/tests/transaction.rs delete mode 100644 networks/monero/src/tests/vectors/clsag_tx.json delete mode 100644 networks/monero/src/tests/vectors/ring_data.json delete mode 100644 networks/monero/src/tests/vectors/transactions.json delete mode 100644 networks/monero/src/transaction.rs delete mode 100644 networks/monero/tests/tests.rs delete mode 100644 networks/monero/verify-chain/Cargo.toml delete mode 100644 networks/monero/verify-chain/LICENSE delete mode 100644 networks/monero/verify-chain/README.md delete mode 100644 networks/monero/verify-chain/src/main.rs delete mode 100644 networks/monero/wallet/Cargo.toml delete mode 100644 networks/monero/wallet/LICENSE delete mode 100644 networks/monero/wallet/README.md delete mode 100644 networks/monero/wallet/address/Cargo.toml delete mode 100644 networks/monero/wallet/address/LICENSE delete mode 100644 networks/monero/wallet/address/README.md delete mode 100644 networks/monero/wallet/address/src/base58check.rs delete mode 100644 networks/monero/wallet/address/src/lib.rs delete mode 100644 networks/monero/wallet/address/src/tests.rs delete mode 100644 networks/monero/wallet/address/src/vectors/featured_addresses.json delete mode 100644 networks/monero/wallet/src/decoys.rs delete mode 100644 networks/monero/wallet/src/extra.rs delete mode 100644 networks/monero/wallet/src/lib.rs delete mode 100644 networks/monero/wallet/src/output.rs delete mode 100644 networks/monero/wallet/src/scan.rs delete mode 100644 networks/monero/wallet/src/send/eventuality.rs delete mode 100644 networks/monero/wallet/src/send/mod.rs delete mode 100644 networks/monero/wallet/src/send/multisig.rs delete mode 100644 networks/monero/wallet/src/send/tx.rs delete mode 100644 networks/monero/wallet/src/send/tx_keys.rs delete mode 100644 networks/monero/wallet/src/tests/extra.rs delete mode 100644 networks/monero/wallet/src/tests/mod.rs delete mode 100644 networks/monero/wallet/src/tests/scan.rs delete mode 100644 networks/monero/wallet/src/view_pair.rs delete mode 100644 networks/monero/wallet/tests/add_data.rs delete mode 100644 networks/monero/wallet/tests/decoys.rs delete mode 100644 networks/monero/wallet/tests/eventuality.rs delete mode 100644 networks/monero/wallet/tests/runner/builder.rs delete mode 100644 networks/monero/wallet/tests/runner/mod.rs delete mode 100644 networks/monero/wallet/tests/scan.rs delete mode 100644 networks/monero/wallet/tests/send.rs delete mode 100644 networks/monero/wallet/tests/wallet2_compatibility.rs diff --git a/.github/workflows/monero-tests.yaml b/.github/workflows/monero-tests.yaml deleted file mode 100644 index a72a85a5..00000000 --- a/.github/workflows/monero-tests.yaml +++ /dev/null @@ -1,72 +0,0 @@ -name: Monero Tests - -on: - push: - branches: - - develop - paths: - - "networks/monero/**" - - "processor/**" - - pull_request: - paths: - - "networks/monero/**" - - "processor/**" - - workflow_dispatch: - -jobs: - # Only run these once since they will be consistent regardless of any node - unit-tests: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - - - name: Test Dependencies - uses: ./.github/actions/test-dependencies - - - name: Run Unit Tests Without Features - run: | - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib - - # Doesn't run unit tests with features as the tests workflow will - - integration-tests: - runs-on: ubuntu-latest - # Test against all supported protocol versions - strategy: - matrix: - version: [v0.17.3.2, v0.18.3.4] - - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - - - name: Test Dependencies - uses: ./.github/actions/test-dependencies - with: - monero-version: ${{ matrix.version }} - - - name: Run Integration Tests Without Features - run: | - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*' - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*' - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*' - - - name: Run Integration Tests - # Don't run if the the tests workflow also will - if: ${{ matrix.version != 'v0.18.3.4' }} - run: | - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*' - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*' - GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*' diff --git a/.github/workflows/networks-tests.yml b/.github/workflows/networks-tests.yml index 5966a6a8..224770d1 100644 --- a/.github/workflows/networks-tests.yml +++ b/.github/workflows/networks-tests.yml @@ -33,16 +33,3 @@ jobs: -p alloy-simple-request-transport \ -p ethereum-serai \ -p serai-ethereum-relayer \ - -p monero-io \ - -p monero-generators \ - -p monero-primitives \ - -p monero-mlsag \ - -p monero-clsag \ - -p monero-borromean \ - -p monero-bulletproofs \ - -p monero-serai \ - -p monero-rpc \ - -p monero-simple-request-rpc \ - -p monero-address \ - -p monero-wallet \ - -p monero-serai-verify-chain diff --git a/Cargo.lock b/Cargo.lock index 73cd8128..799988d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4845,15 +4845,11 @@ dependencies = [ [[package]] name = "monero-address" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", - "hex", - "hex-literal", "monero-io", "monero-primitives", - "rand_core", - "serde", - "serde_json", "std-shims", "thiserror 1.0.64", "zeroize", @@ -4862,6 +4858,7 @@ dependencies = [ [[package]] name = "monero-borromean" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "monero-generators", @@ -4874,9 +4871,9 @@ dependencies = [ [[package]] name = "monero-bulletproofs" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", - "hex-literal", "monero-generators", "monero-io", "monero-primitives", @@ -4889,6 +4886,7 @@ dependencies = [ [[package]] name = "monero-clsag" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "dalek-ff-group", @@ -4909,11 +4907,11 @@ dependencies = [ [[package]] name = "monero-generators" version = "0.4.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "dalek-ff-group", "group", - "hex", "monero-io", "sha3", "std-shims", @@ -4923,6 +4921,7 @@ dependencies = [ [[package]] name = "monero-io" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "std-shims", @@ -4931,6 +4930,7 @@ dependencies = [ [[package]] name = "monero-mlsag" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "monero-generators", @@ -4942,11 +4942,29 @@ dependencies = [ ] [[package]] -name = "monero-primitives" -version = "0.1.0" +name = "monero-oxide" +version = "0.1.4-alpha" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +dependencies = [ + "curve25519-dalek", + "hex-literal", + "monero-borromean", + "monero-bulletproofs", + "monero-clsag", + "monero-generators", + "monero-io", + "monero-mlsag", + "monero-primitives", + "std-shims", + "zeroize", +] + +[[package]] +name = "monero-primitives" +version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", - "hex", "monero-generators", "monero-io", "sha3", @@ -4957,11 +4975,12 @@ dependencies = [ [[package]] name = "monero-rpc" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "hex", "monero-address", - "monero-serai", + "monero-oxide", "serde", "serde_json", "std-shims", @@ -4969,48 +4988,13 @@ dependencies = [ "zeroize", ] -[[package]] -name = "monero-serai" -version = "0.1.4-alpha" -dependencies = [ - "curve25519-dalek", - "hex", - "hex-literal", - "monero-borromean", - "monero-bulletproofs", - "monero-clsag", - "monero-generators", - "monero-io", - "monero-mlsag", - "monero-primitives", - "serde", - "serde_json", - "std-shims", - "zeroize", -] - -[[package]] -name = "monero-serai-verify-chain" -version = "0.1.0" -dependencies = [ - "curve25519-dalek", - "hex", - "monero-rpc", - "monero-serai", - "monero-simple-request-rpc", - "rand_core", - "serde", - "serde_json", - "tokio", -] - [[package]] name = "monero-simple-request-rpc" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "digest_auth", "hex", - "monero-address", "monero-rpc", "simple-request", "tokio", @@ -5020,6 +5004,7 @@ dependencies = [ [[package]] name = "monero-wallet" version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" dependencies = [ "curve25519-dalek", "dalek-ff-group", @@ -5029,18 +5014,14 @@ dependencies = [ "modular-frost", "monero-address", "monero-clsag", + "monero-oxide", "monero-rpc", - "monero-serai", - "monero-simple-request-rpc", "rand", "rand_chacha", "rand_core", "rand_distr", - "serde", - "serde_json", "std-shims", "thiserror 1.0.64", - "tokio", "zeroize", ] @@ -8346,7 +8327,6 @@ dependencies = [ "dleq", "flexible-transcript", "minimal-ed448", - "monero-wallet", "multiexp", "schnorr-signatures", ] diff --git a/Cargo.toml b/Cargo.toml index 06ea12c6..e0911b41 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,20 +43,6 @@ members = [ "networks/ethereum", "networks/ethereum/relayer", - "networks/monero/io", - "networks/monero/generators", - "networks/monero/primitives", - "networks/monero/ringct/mlsag", - "networks/monero/ringct/clsag", - "networks/monero/ringct/borromean", - "networks/monero/ringct/bulletproofs", - "networks/monero", - "networks/monero/rpc", - "networks/monero/rpc/simple-request", - "networks/monero/wallet/address", - "networks/monero/wallet", - "networks/monero/verify-chain", - "message-queue", "processor/messages", @@ -126,13 +112,20 @@ minimal-ed448 = { opt-level = 3 } multiexp = { opt-level = 3 } -monero-serai = { opt-level = 3 } +monero-oxide = { opt-level = 3 } [profile.release] panic = "unwind" overflow-checks = true [patch.crates-io] +# Dependencies from monero-oxide which originate from within our own tree +std-shims = { path = "common/std-shims" } +simple-request = { path = "common/request" } +dalek-ff-group = { path = "crypto/dalek-ff-group" } +flexible-transcript = { path = "crypto/transcript" } +modular-frost = { path = "crypto/frost" } + # https://github.com/rust-lang-nursery/lazy-static.rs/issues/201 lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" } diff --git a/deny.toml b/deny.toml index c5fe2808..881761c8 100644 --- a/deny.toml +++ b/deny.toml @@ -103,6 +103,7 @@ unknown-git = "deny" allow-registry = ["https://github.com/rust-lang/crates.io-index"] allow-git = [ "https://github.com/rust-lang-nursery/lazy-static.rs", + "https://github.com/monero-oxide/monero-oxide", "https://github.com/serai-dex/substrate-bip39", "https://github.com/serai-dex/substrate", ] diff --git a/networks/ethereum/README.md b/networks/ethereum/README.md index 0090b26b..75cd7881 100644 --- a/networks/ethereum/README.md +++ b/networks/ethereum/README.md @@ -1,13 +1,12 @@ # Ethereum -This package contains Ethereum-related functionality, specifically deploying and -interacting with Serai contracts. +This package contains Ethereum-related functionality, specifically deploying +and interacting with Serai contracts. -While `monero-serai` and `bitcoin-serai` are general purpose libraries, -`ethereum-serai` is Serai specific. If any of the utilities are generally -desired, please fork and maintain your own copy to ensure the desired -functionality is preserved, or open an issue to request we make this library -general purpose. +While `bitcoin-serai` is a general purpose library, `ethereum-serai` is Serai +specific. If any of the utilities are generally desired, please fork and +maintain your own copy to ensure the desired functionality is preserved, or +open an issue to request we make this library general purpose. ### Dependencies diff --git a/networks/monero/Cargo.toml b/networks/monero/Cargo.toml deleted file mode 100644 index 3d43df6e..00000000 --- a/networks/monero/Cargo.toml +++ /dev/null @@ -1,56 +0,0 @@ -[package] -name = "monero-serai" -version = "0.1.4-alpha" -description = "A modern Monero transaction library" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -monero-io = { path = "io", version = "0.1", default-features = false } -monero-generators = { path = "generators", version = "0.4", default-features = false } -monero-primitives = { path = "primitives", version = "0.1", default-features = false } -monero-mlsag = { path = "ringct/mlsag", version = "0.1", default-features = false } -monero-clsag = { path = "ringct/clsag", version = "0.1", default-features = false } -monero-borromean = { path = "ringct/borromean", version = "0.1", default-features = false } -monero-bulletproofs = { path = "ringct/bulletproofs", version = "0.1", default-features = false } - -hex-literal = "0.4" - -[dev-dependencies] -hex = { version = "0.4", default-features = false, features = ["std"] } -serde = { version = "1", default-features = false, features = ["std", "derive"] } -serde_json = { version = "1", default-features = false, features = ["std"] } - -[features] -std = [ - "std-shims/std", - - "zeroize/std", - - "monero-io/std", - "monero-generators/std", - "monero-primitives/std", - "monero-mlsag/std", - "monero-clsag/std", - "monero-borromean/std", - "monero-bulletproofs/std", -] - -compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"] -default = ["std", "compile-time-generators"] diff --git a/networks/monero/LICENSE b/networks/monero/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/README.md b/networks/monero/README.md deleted file mode 100644 index 24fc1480..00000000 --- a/networks/monero/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# monero-serai - -A modern Monero transaction library. It provides a modern, Rust-friendly view of -the Monero protocol. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -Recommended usage of the library is with `overflow-checks = true`, even for -release builds. - -### Wallet Functionality - -monero-serai originally included wallet functionality. That has been moved to -monero-wallet. - -### Purpose and Support - -monero-serai was written for Serai, a decentralized exchange aiming to support -Monero. Despite this, monero-serai is intended to be a widely usable library, -accurate to Monero. monero-serai guarantees the functionality needed for Serai, -yet does not include any functionality specific to Serai. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). -- `compile-time-generators` (on by default): Derives the generators at - compile-time so they don't need to be derived at runtime. This is recommended - if program size doesn't need to be kept minimal. -- `multisig`: Enables the `multisig` feature for all dependencies. diff --git a/networks/monero/generators/Cargo.toml b/networks/monero/generators/Cargo.toml deleted file mode 100644 index af8cbcd9..00000000 --- a/networks/monero/generators/Cargo.toml +++ /dev/null @@ -1,46 +0,0 @@ -[package] -name = "monero-generators" -version = "0.4.0" -description = "Monero's hash to point function and generators" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/generators" -authors = ["Luke Parker "] -edition = "2021" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false } - -subtle = { version = "^2.4", default-features = false } - -sha3 = { version = "0.10", default-features = false } -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -group = { version = "0.13", default-features = false } -dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false } - -monero-io = { path = "../io", version = "0.1", default-features = false } - -[dev-dependencies] -hex = "0.4" - -[features] -std = [ - "std-shims/std", - - "subtle/std", - - "sha3/std", - - "group/alloc", - "dalek-ff-group/std", - - "monero-io/std" -] -default = ["std"] diff --git a/networks/monero/generators/LICENSE b/networks/monero/generators/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/generators/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/generators/README.md b/networks/monero/generators/README.md deleted file mode 100644 index e9ac925b..00000000 --- a/networks/monero/generators/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Monero Generators - -Generators used by Monero in both its Pedersen commitments and Bulletproofs(+). -An implementation of Monero's `hash_to_ec` is included, as needed to generate -the generators. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/generators/src/hash_to_point.rs b/networks/monero/generators/src/hash_to_point.rs deleted file mode 100644 index 136fea84..00000000 --- a/networks/monero/generators/src/hash_to_point.rs +++ /dev/null @@ -1,71 +0,0 @@ -use subtle::ConditionallySelectable; - -use curve25519_dalek::edwards::EdwardsPoint; - -use group::ff::{Field, PrimeField}; -use dalek_ff_group::FieldElement; - -use monero_io::decompress_point; - -use crate::keccak256; - -/// Monero's `hash_to_ec` function. -pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint { - #[allow(non_snake_case)] - let A = FieldElement::from(486662u64); - - let v = FieldElement::from_square(keccak256(&bytes)).double(); - let w = v + FieldElement::ONE; - let x = w.square() + (-A.square() * v); - - // This isn't the complete X, yet its initial value - // We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X - // While inefficient, it solves API boundaries and reduces the amount of work done here - #[allow(non_snake_case)] - let X = { - let u = w; - let v = x; - let v3 = v * v * v; - let uv3 = u * v3; - let v7 = v3 * v3 * v; - let uv7 = u * v7; - uv3 * - uv7.pow( - (-FieldElement::from(5u8)) * - FieldElement::from(8u8).invert().expect("eight was coprime with the prime 2^{255}-19"), - ) - }; - let x = X.square() * x; - - let y = w - x; - let non_zero_0 = !y.is_zero(); - let y_if_non_zero_0 = w + x; - let sign = non_zero_0 & (!y_if_non_zero_0.is_zero()); - - let mut z = -A; - z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign); - #[allow(non_snake_case)] - let Z = z + w; - #[allow(non_snake_case)] - let mut Y = z - w; - - /* - If sign, `z = -486662`, else, `z = -486662 * v` - `w = v + 1` - - We need `z + w \ne 0`, which would require `z \cong -w \mod 2^{255}-19`. This requires: - - If `sign`, `v \mod 2^{255}-19 \ne 486661`. - - If `!sign`, `(v + 1) \mod 2^{255}-19 \ne (v * 486662) \mod 2^{255}-19` which is equivalent to - `(v * 486661) \mod 2^{255}-19 \ne 1`. - - In summary, if `sign`, `v` must not `486661`, and if `!sign`, `v` must not be the - multiplicative inverse of `486661`. Since `v` is the output of a hash function, this should - have negligible probability. Additionally, since the definition of `sign` is dependent on `v`, - it may be truly impossible to reach. - */ - Y *= Z.invert().expect("if sign, v was 486661. if !sign, v was 486661^{-1}"); - let mut bytes = Y.to_repr(); - bytes[31] |= sign.unwrap_u8() << 7; - - decompress_point(bytes).expect("point from hash-to-curve wasn't on-curve").mul_by_cofactor() -} diff --git a/networks/monero/generators/src/lib.rs b/networks/monero/generators/src/lib.rs deleted file mode 100644 index 6256eecf..00000000 --- a/networks/monero/generators/src/lib.rs +++ /dev/null @@ -1,89 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use std_shims::{sync::LazyLock, vec::Vec}; - -use sha3::{Digest, Keccak256}; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, edwards::EdwardsPoint}; - -use monero_io::{write_varint, decompress_point}; - -mod hash_to_point; -pub use hash_to_point::hash_to_point; - -#[cfg(test)] -mod tests; - -fn keccak256(data: &[u8]) -> [u8; 32] { - Keccak256::digest(data).into() -} - -/// Monero's `H` generator. -/// -/// Contrary to convention (`G` for values, `H` for randomness), `H` is used by Monero for amounts -/// within Pedersen commitments. -#[allow(non_snake_case)] -pub static H: LazyLock = LazyLock::new(|| { - decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes())) - .expect("known on-curve point wasn't on-curve") - .mul_by_cofactor() -}); - -static H_POW_2_CELL: LazyLock<[EdwardsPoint; 64]> = LazyLock::new(|| { - let mut res = [*H; 64]; - for i in 1 .. 64 { - res[i] = res[i - 1] + res[i - 1]; - } - res -}); -/// Monero's `H` generator, multiplied by 2**i for i in 1 ..= 64. -/// -/// This table is useful when working with amounts, which are u64s. -#[allow(non_snake_case)] -pub fn H_pow_2() -> &'static [EdwardsPoint; 64] { - &H_POW_2_CELL -} - -/// The maximum amount of commitments provable for within a single range proof. -pub const MAX_COMMITMENTS: usize = 16; -/// The amount of bits a value within a commitment may use. -pub const COMMITMENT_BITS: usize = 64; - -/// Container struct for Bulletproofs(+) generators. -#[allow(non_snake_case)] -pub struct Generators { - /// The G (bold) vector of generators. - pub G: Vec, - /// The H (bold) vector of generators. - pub H: Vec, -} - -/// Generate generators as needed for Bulletproofs(+), as Monero does. -/// -/// Consumers should not call this function ad-hoc, yet call it within a build script or use a -/// once-initialized static. -pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators { - // The maximum amount of bits used within a single range proof. - const MAX_MN: usize = MAX_COMMITMENTS * COMMITMENT_BITS; - - let mut preimage = H.compress().to_bytes().to_vec(); - preimage.extend(dst); - - let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) }; - for i in 0 .. MAX_MN { - // We generate a pair of generators per iteration - let i = 2 * i; - - let mut even = preimage.clone(); - write_varint(&i, &mut even).expect("write failed but doesn't fail"); - res.H.push(hash_to_point(keccak256(&even))); - - let mut odd = preimage.clone(); - write_varint(&(i + 1), &mut odd).expect("write failed but doesn't fail"); - res.G.push(hash_to_point(keccak256(&odd))); - } - res -} diff --git a/networks/monero/generators/src/tests/mod.rs b/networks/monero/generators/src/tests/mod.rs deleted file mode 100644 index 3ab9449f..00000000 --- a/networks/monero/generators/src/tests/mod.rs +++ /dev/null @@ -1,36 +0,0 @@ -use crate::{decompress_point, hash_to_point}; - -#[test] -fn test_vectors() { - // tests.txt file copied from monero repo - // https://github.com/monero-project/monero/ - // blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt - let reader = include_str!("./tests.txt"); - - for line in reader.lines() { - let mut words = line.split_whitespace(); - let command = words.next().unwrap(); - - match command { - "check_key" => { - let key = words.next().unwrap(); - let expected = match words.next().unwrap() { - "true" => true, - "false" => false, - _ => unreachable!("invalid result"), - }; - - let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap()); - assert_eq!(actual.is_some(), expected); - } - "hash_to_ec" => { - let bytes = words.next().unwrap(); - let expected = words.next().unwrap(); - - let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap()); - assert_eq!(hex::encode(actual.compress().to_bytes()), expected); - } - _ => unreachable!("unknown command"), - } - } -} diff --git a/networks/monero/generators/src/tests/tests.txt b/networks/monero/generators/src/tests/tests.txt deleted file mode 100644 index 6cc27a74..00000000 --- a/networks/monero/generators/src/tests/tests.txt +++ /dev/null @@ -1,628 +0,0 @@ -check_key c2cb3cf3840aa9893e00ec77093d3d44dba7da840b51c48462072d58d8efd183 false -check_key bd85a61bae0c101d826cbed54b1290f941d26e70607a07fc6f0ad611eb8f70a6 true -check_key 328f81cad4eba24ab2bad7c0e56b1e2e7346e625bcb06ae649aef3ffa0b8bef3 false -check_key 6016a5463b9e5a58c3410d3f892b76278883473c3f0b69459172d3de49e85abe true -check_key 4c71282b2add07cdc6898a2622553f1ca4eb851e5cb121181628be5f3814c5b1 false -check_key 69393c25c3b50e177f81f20f852dd604e768eb30052e23108b3cfa1a73f2736e true -check_key 3d5a89b676cb84c2be3428d20a660dc6a37cae13912e127888a5132e8bac2163 true -check_key 78cd665deb28cebc6208f307734c56fccdf5fa7e2933fadfcdd2b6246e9ae95c false -check_key e03b2414e260580f86ee294cd4c636a5b153e617f704e81dad248fbf715b2ee4 true -check_key 28c3503ce82d7cdc8e0d96c4553bcf0352bbcfc73925495dbe541e7e1df105fc false -check_key 06855c3c3e0d03fec354059bda319b39916bdc10b6581e3f41b335ee7b014fd5 false -check_key 556381485df0d7d5a268ab5ecfb2984b060acc63471183fcf538bf273b0c0cb5 true -check_key c7f76d82ac64b1e7fdc32761ff00d6f0f7ada4cf223aa5a11187e3a02e1d5319 true -check_key cfa85d8bdb6f633fcf031adee3a299ac42eeb6bd707744049f652f6322f5aa47 true -check_key 91e9b63ced2b08979fee713365464cc3417c4f238f9bdd3396efbb3c58e195ee true -check_key 7b56e76fe94bd30b3b2f2c4ba5fe4c504821753a8965eb1cbcf8896e2d6aba19 true -check_key 7338df494bc416cf5edcc02069e067f39cb269ce67bd9faba956021ce3b3de3a false -check_key f9a1f27b1618342a558379f4815fa5039a8fe9d98a09f45c1af857ba99231dc1 false -check_key b2a1f37718180d4448a7fcb5f788048b1a7132dde1cfd25f0b9b01776a21c687 true -check_key 0d3a0f9443a8b24510ad1e76a8117cca03bce416edfe35e3c2a2c2712454f8dc false -check_key d8d3d806a76f120c4027dc9c9d741ad32e06861b9cfbc4ce39289c04e251bb3c false -check_key 1e9e3ba7bc536cd113606842835d1f05b4b9e65875742f3a35bfb2d63164b5d5 true -check_key 5c52d0087997a2cdf1d01ed0560d94b4bfd328cb741cb9a8d46ff50374b35a57 true -check_key bb669d4d7ffc4b91a14defedcdbd96b330108b01adc63aa685e2165284c0033b false -check_key d2709ae751a0a6fd796c98456fa95a7b64b75a3434f1caa3496eeaf5c14109b4 true -check_key e0c238cba781684e655b10a7d4af04ab7ff2e7022182d7ed2279d6adf36b3e7a false -check_key 34ebb4bf871572cee5c6935716fab8c8ec28feef4f039763d8f039b84a50bf4c false -check_key 4730d4f38ec3f3b83e32e6335d2506df4ee39858848842c5a0184417fcc639e4 true -check_key d42cf7fdf5e17e0a8a7f88505a2b7a3d297113bd93d3c20fa87e11509ec905a2 true -check_key b757c95059cefabb0080d3a8ebca82e46efecfd29881be3121857f9d915e388c false -check_key bbe777aaf04d02b96c0632f4b1c6f35f1c7bcbc5f22af192f92c077709a2b50b false -check_key 73518522aabd28566f858c33fccb34b7a4de0e283f6f783f625604ee647afad9 true -check_key f230622c4a8f6e516590466bd10f86b64fbef61695f6a054d37604e0b024d5af false -check_key bc6b9a8379fd6c369f7c3bd9ddce58db6b78f27a41d798bb865c3920824d0943 false -check_key 45a4f87c25898cd6be105fa1602b85c4d862782adaac8b85c996c4a2bcd8af47 true -check_key eb4ad3561d21c4311affbd7cc2c7ff5fd509f72f88ba67dc097a75c31fdbd990 false -check_key 2f34f4630c09a23b7ecc19f02b4190a26df69e07e13de8069ae5ff80d23762fc true -check_key 2ea4e4fb5085eb5c8adee0d5ab7d35c67d74d343bd816cd13924536cffc2527c true -check_key 5d35467ee6705a0d35818aa9ae94e4603c3e5500bfc4cf4c4f77a7160a597aa6 true -check_key 8ff42bc76796e20c99b6e879369bd4b46a256db1366416291de9166e39d5a093 true -check_key 0262ba718850df6c621e8a24cd9e4831c047e38818a89e15c7a06a489a4558e1 false -check_key 58b29b2ba238b534b08fb46f05f430e61cb77dc251b0bb50afec1b6061fd9247 false -check_key 153170e3dc2b0e1b368fc0d0e31053e872f094cdace9a2846367f0d9245a109b false -check_key 40419d309d07522d493bb047ca9b5fb6c401aae226eefae6fd395f5bb9114200 true -check_key 713068818d256ef69c78cd6082492013fbd48de3c9e7e076415dd0a692994504 true -check_key a7218ee08e50781b0c87312d5e0031467e863c10081668e3792d96cbcee4e474 true -check_key 356ce516b00e674ef1729c75b0a68090e7265cef675bbf32bf809495b67e9342 false -check_key 52a5c053293675e3efd2c585047002ea6d77931cbf38f541b9070d319dc0d237 false -check_key 77c0080bf157e069b18c4c604cc9505c5ec6f0f9930e087592d70507ca1b5534 false -check_key e733bc41f880a4cfb1ca6f397916504130807289cacfca10b15f5b8d058ed1bf false -check_key c4f1d3c884908a574ecea8be10e02277de35ef84a1d10f105f2be996f285161f true -check_key aed677f7f69e146aa0863606ac580fc0bbdc22a88c4b4386abaa4bdfff66bcc9 false -check_key 6ad0edf59769599af8caa986f502afc67aecbebb8107aaf5e7d3ae51d5cf8dd8 false -check_key 64a0a70e99be1f775c222ee9cd6f1bee6f632cb9417899af398ff9aff70661c6 true -check_key c63afaa03bb5c4ed7bc77aac175dbfb73f904440b2e3056a65850ac1bd261332 false -check_key a4e89cd2471c26951513b1cfbdcf053a86575e095af52495276aa56ede8ce344 false -check_key 2ce935d97f7c3ddb973de685d20f58ee39938fe557216328045ec2b83f3132be true -check_key 3e3d38b1fca93c1559ac030d586616354c668aa76245a09e3fa6de55ac730973 true -check_key 8b81b9681f76a4254007fd07ed1ded25fc675973ccb23afd06074805194733a4 false -check_key 26d1c15dfc371489439e29bcef2afcf7ed01fac24960fdc2e7c20847a8067588 true -check_key 85c1199b5a4591fc4cc36d23660648c1b9cfbb0e9c47199fa3eea33299a3dcec false -check_key 60830ba5449c1f04ac54675dfc7cac7510106c4b7549852551f8fe65971123e2 false -check_key 3e43c28c024597b3b836e4bc16905047cbf6e841b80e0b8cd6a325049070c2a5 false -check_key 474792c16a0032343a6f28f4cb564747c3b1ea0b6a6b9a42f7c71d7cc3dd3b44 true -check_key c8ec5e67cb5786673085191881950a3ca20dde88f46851b01dd91c695cfbad16 true -check_key 861c4b24b24a87b8559e0bb665f84dcc506c147a909f335ae4573b92299f042f false -check_key 2c9e0fe3e4983d79f86c8c36928528f1bc90d94352ce427032cdef6906d84d0b true -check_key 9293742822c2dff63fdc1bf6645c864fd527cea2ddba6d4f3048d202fc340c9a true -check_key 3956422ad380ef19cb9fe360ef09cc7aaec7163eea4114392a7a0b2e2671914e true -check_key 5ae8e72cadda85e525922fec11bd53a261cf26ee230fe85a1187f831b1b2c258 false -check_key 973feca43a0baf450c30ace5dc19015e19400f0898316e28d9f3c631da31f99a true -check_key dd946c91a2077f45c5c16939e53859d9beabaf065e7b1b993d5e5cd385f8716e true -check_key b3928f2d67e47f6bd6da81f72e64908d8ff391af5689f0202c4c6fec7666ffe8 true -check_key 313382e82083697d7f9d256c3b3800b099b56c3ef33cacdccbd40a65622e25fc false -check_key 7d65380c12144802d39ed9306eed79fe165854273700437c0b4b50559800c058 true -check_key 4db5c20a49422fd27739c9ca80e2271a8a125dfcead22cb8f035d0e1b7b163be true -check_key dd76a9f565ef0e44d1531349ec4c5f7c3c387c2f5823e693b4952f4b0b70808c true -check_key 66430bf628eae23918c3ed17b42138db1f98c24819e55fc4a07452d0c85603eb true -check_key 9f0b677830c3f089c27daf724bb10be848537f8285de83ab0292d35afb617f77 false -check_key cbf98287391fb00b1e68ad64e9fb10198025864c099b8b9334d840457e673874 true -check_key a42552e9446e49a83aed9e3370506671216b2d1471392293b8fc2b81c81a73ee false -check_key fb3de55ac81a923d506a514602d65d004ec9d13e8b47e82d73af06da73006673 false -check_key e17abb78e58a4b72ff4ad7387b290f2811be880b394b8bcaae7748ac09930169 false -check_key 9ffbda7ace69753761cdb5eb01f75433efa5cdb6a4f1b664874182c6a95adcba true -check_key 507123c979179ea0a3f7f67fb485f71c8636ec4ec70aa47b92f3c707e7541a54 false -check_key f1d0b156571994ef578c61cb6545d34f834eb30e4357539a5633c862d4dffa91 false -check_key 3de62311ec14f9ee95828c190b2dc3f03059d6119e8dfccb7323efc640e07c75 false -check_key 5e50bb48bc9f6dd11d52c1f0d10d8ae5674d7a4af89cbbce178dafc8a562e5fe false -check_key 20b2c16497be101995391ceefb979814b0ea76f1ed5b6987985bcdcd17b36a81 false -check_key d63bff73b914ce791c840e99bfae0d47afdb99c2375e33c8f149d0df03d97873 false -check_key 3f24b3d94b5ddd244e4c4e67a6d9f533f0396ca30454aa0ca799f21328b81d47 true -check_key 6a44c016f09225a6d2e830290719d33eb29b53b553eea7737ed3a6e297b2e7d2 true -check_key ff0f34df0c76c207b8340be2009db72f730c69c2bbfeea2013105eaccf1d1f8e true -check_key 4baf559869fe4e915e219c3c8d9a2330fc91e542a5a2a7311d4d59fee996f807 true -check_key 1632207dfef26e97d13b0d0035ea9468fc5a8a89b0990fce77bb143c9d7f3b67 true -check_key fcb3dee3993d1a47630f29410903dd03706bd5e81c5802e6f1b9095cbdb404d3 true -check_key fb527092b9809e3d27d7588c7ef89915a769b99c1e03e7f72bbead9ed837daae false -check_key 902b118d27d40ab9cbd55edd375801ce302cdb59e09c8659a3ea1401918d8bba false -check_key 4d6fbf25ca51e263a700f1abf84f758dde3d11b632e908b3093d64fe2e70ea0a true -check_key f4c3211ec70affc1c9a94a6589460ee8360dad5f8c679152f16994038532e3fc true -check_key c2b3d73ac14956d7fdf12fa92235af1bb09e1566a6a6ffd0025682c750abdd69 false -check_key b7e68c12207d2e2104fb2ca224829b6fccc1c0e2154e8a931e3c837a945f4430 false -check_key 56ca0ca227708f1099bda1463db9559541c8c11ffad7b3d95c717471f25a01bf true -check_key 3eef3a46833e4d851671182a682e344e36bea7211a001f3b8af1093a9c83f1b2 true -check_key bd1f4a4f26cab7c1cbc0e17049b90854d6d28d2d55181e1b5f7a8045fcdfa06e true -check_key 8537b01c87e7c184d9555e8d93363dcd9b60a8acc94cd3e41eb7525fd3e1d35a false -check_key 68ace49179d549bad391d98ab2cc8afee65f98ce14955c3c1b16e850fabec231 true -check_key f9922f8a660e7c3e4f3735a817d18b72f59166a0be2d99795f953cf233a27e24 true -check_key 036b6be3da26e80508d5a5a6a5999a1fe0db1ac4e9ade8f1ea2eaf2ea9b1a70e true -check_key 5e595e886ce16b5ea31f53bcb619f16c8437276618c595739fece6339731feb0 false -check_key 4ee2cebae3476ed2eeb7efef9d20958538b3642f938403302682a04115c0f8ed false -check_key 519eedbd0da8676063ce7d5a605b3fc27afeecded857afa24b894ad248c87b5d false -check_key ce2b627c0accf4a3105796680c37792b30c6337d2d4fea11678282455ff82ff7 false -check_key aa26ed99071a8416215e8e7ded784aa7c2b303aab67e66f7539905d7e922eb4d false -check_key 435ae49c9ca26758aa103bdcca8d51393b1906fe27a61c5245361e554f335ec2 true -check_key 42568af395bd30024f6ccc95205c0e11a6ad1a7ee100f0ec46fcdf0af88e91fb false -check_key 0b4a78d1fde56181445f04ca4780f0725daa9c375b496fab6c037d6b2c2275db true -check_key 2f82d2a3c8ce801e1ad334f9e074a4fbf76ffac4080a7331dc1359c2b4f674a4 false -check_key 24297d8832d733ed052dd102d4c40e813f702006f325644ccf0cb2c31f77953f false -check_key 5231a53f6bea7c75b273bde4a9f673044ed87796f20e0909978f29d98fc8d4f0 true -check_key 94b5affcf78be5cf62765c32a0794bc06b4900e8a47ddba0e166ec20cec05935 true -check_key c14b4d846ea52ffbbb36aa62f059453af3cfae306280dada185d2d385ef8f317 true -check_key cceb34fddf01a6182deb79c6000a998742d4800d23d1d8472e3f43cd61f94508 true -check_key 1faffa33407fba1634d4136cf9447896776c16293b033c6794f06774b514744c true -check_key faaac98f644a2b77fb09ba0ebf5fcddf3ff55f6604c0e9e77f0278063e25113a true -check_key 09e8525b00bea395978279ca979247a76f38f86dce4465eb76c140a7f904c109 true -check_key 2d797fc725e7fb6d3b412694e7386040effe4823cdf01f6ec7edea4bc0e77e20 false -check_key bbb74dabee651a65f46bca472df6a8a749cc4ba5ca35078df5f6d27a772f922a false -check_key 77513ca00f3866607c3eff5c2c011beffa775c0022c5a4e7de1120a27e6687fd true -check_key 10064c14ace2a998fc2843eeeb62884fe3f7ab331ca70613d6a978f44d9868eb false -check_key 026ae84beb5e54c62629a7b63702e85044e38cadfc9a1fcabee6099ba185005c false -check_key aef91536292b7ba34a3e787fb019523c2fa7a0d56fca069cc82ccb6b02a45b14 false -check_key 147bb1a82c623c722540feaad82b7adf4b85c6ec0cbcef3ca52906f3e85617ac true -check_key fc9fb281a0847d58dc9340ef35ef02f7d20671142f12bdd1bfb324ab61d03911 false -check_key b739801b9455ac617ca4a7190e2806669f638d4b2f9288171afb55e1542c8d71 false -check_key 494cc1e2ee997eb1eb051f83c4c89968116714ddf74e460d4fa1c6e7c72e3eb3 true -check_key ed2fbdf2b727ed9284db90ec900a942224787a880bc41d95c4bc4cf136260fd7 true -check_key 02843d3e6fc6835ad03983670a592361a26948eb3e31648d572416a944d4909e true -check_key c14fea556a7e1b6b6c3d4e2e38a4e7e95d834220ff0140d3f7f561a34e460801 true -check_key 5f8f82a35452d0b0d09ffb40a1154641916c31e161ad1a6ab8cfddc2004efdf6 false -check_key 7b93d72429fab07b49956007eba335bb8c5629fbf9e7a601eaa030f196934a56 true -check_key 6a63ed96d2e46c2874beaf82344065d94b1e5c04406997f94caf4ccd97cfbab9 false -check_key c915f409e1e0f776d1f440aa6969cfec97559ef864b07d8c0d7c1163871b4603 true -check_key d06bc33630fc94303c2c369481308f805f5ce53c40141160aa4a1f072967617e false -check_key 1aafb14ca15043c2589bcd32c7c5f29479216a1980e127e9536729faf1c40266 true -check_key 58c115624a20f4b0c152ccd048c54a28a938556863ab8521b154d3165d3649cd false -check_key 9001ba086e8aa8a67e128f36d700cc641071556306db7ec9b8ac12a6256b27b7 false -check_key 898c468541634fb0def11f82c781341fce0def7b15695af4e642e397218c730c true -check_key 47ea6539e65b7b611b0e1ae9ee170adf7c31581ca9f78796d8ebbcc5cd74b712 false -check_key 0c60952a64eeac446652f5d3c136fd36966cf66310c15ee6ab2ecbf981461257 false -check_key 682264c4686dc7736b6e46bdc8ab231239bc5dac3f5cb9681a1e97a527945e8e true -check_key 276006845ca0ea4238b231434e20ad8b8b2a36876effbe1d1e3ffb1f14973397 true -check_key eecd3a49e55e32446f86c045dce123ef6fe2e5c57db1d850644b3c56ec689fce true -check_key a4dced63589118db3d5aebf6b5670e71250f07485ca4bb6dddf9cce3e4c227a1 false -check_key b8ade608ba43d55db7ab481da88b74a9be513fca651c03e04d30cc79f50e0276 false -check_key 0d91de88d007a03fe782f904808b036ff63dec6b73ce080c55231afd4ed261c3 true -check_key 87c59becb52dd16501edadbb0e06b0406d69541c4d46115351e79951a8dd9c28 true -check_key 9aee723be2265171fe10a86d1d3e9cf5a4e46178e859db83f86d1c6db104a247 false -check_key 509d34ae5bf56db011845b8cdf0cc7729ed602fce765e9564cb433b4d4421a43 false -check_key 06e766d9a6640558767c2aab29f73199130bfdc07fd858a73e6ae8e7b7ba23ba false -check_key 801c4fe5ab3e7cf13f7aa2ca3bc57cc8eba587d21f8bc4cd40b1e98db7aec8d9 false -check_key d85ad63aeb7d2faa22e5c9b87cd27f45b01e6d0fdc4c3ddf105584ac0a021465 false -check_key a7ca13051eb2baeb5befa5e236e482e0bb71803ad06a6eae3ae48742393329d2 true -check_key 5a9ba3ec20f116173d933bf5cf35c320ed3751432f3ab453e4a6c51c1d243257 false -check_key a4091add8a6710c03285a422d6e67863a48b818f61c62e989b1e9b2ace240a87 false -check_key bdee0c6442e6808f25bb18e21b19032cf93a55a5f5c6426fba2227a41c748684 true -check_key d4aeb6cdad9667ec3b65c7fbc5bfd1b82bba1939c6bb448a86e40aec42be5f25 false -check_key 73525b30a77f1212f7e339ec11f48c453e476f3669e6e70bebabc2fe9e37c160 true -check_key 45501f2dc4d0a3131f9e0fe37a51c14869ab610abd8bf0158111617924953629 false -check_key 07d0e4c592aa3676adf81cca31a95d50c8c269d995a78cde27b2a9a7a93083a6 false -check_key a1797d6178c18add443d22fdbf45ca5e49ead2f78b70bdf1500f570ee90adca5 true -check_key 0961e82e6e7855d7b7bf96777e14ae729f91c5bbd20f805bd7daac5ccbec4bab false -check_key 57f5ba0ad36e997a4fb585cd2fc81b9cc5418db702c4d1e366639bb432d37c73 true -check_key 82b005be61580856841e042ee8be74ae4ca66bb6733478e81ca1e56213de5c05 false -check_key d7733dcae1874c93e9a2bd46385f720801f913744d60479930dad7d56c767cdc false -check_key b8b8b698609ac3f1bd8f4965151b43b362e6c5e3d1c1feae312c1d43976d59ab true -check_key 4bba7815a9a1b86a5b80b17ac0b514e2faa7a24024f269b330e5b7032ae8c04e true -check_key 0f70da8f8266b58acda259935ef1a947c923f8698622c5503520ff31162e877b false -check_key 233eaa3db80f314c6c895d1328a658a9175158fa2483ed216670c288a04b27bc false -check_key a889f124fabfd7a1e2d176f485be0cbd8b3eeaafeee4f40e99e2a56befb665be true -check_key 2b7b8abc198b11cf7efa21bc63ec436f790fe1f9b8c044440f183ab291af61d6 true -check_key 2491804714f7938cf501fb2adf07597b4899b919cabbaab49518b8f8767fdc6a true -check_key 52744a54fcb00dc930a5d7c2bc866cbfc1e75dd38b38021fd792bb0ca9f43164 true -check_key e42cbf70b81ba318419104dffbb0cdc3b7e7d4698e422206b753a4e2e6fc69bb false -check_key 2faff73e4fed62965f3dbf2e6446b5fea0364666cc8c9450b6ed63bbb6f5f0e7 true -check_key 8b963928d75be661c3c18ddd4f4d1f37ebc095ce1edc13fe8b23784c8f416dfd false -check_key b1162f952808434e4d2562ffda98bd311613d655d8cf85dc86e0a6c59f7158bc true -check_key 5a69adcd9e4f5b0020467e968d85877cb3aa04fa86088d4499b57ca65a665836 true -check_key 61ab47da432c829d0bc9d4fdb59520b135428eec665ad509678188b81c7adf49 false -check_key 154bb547f22f65a87c0c3f56294f5791d04a3c14c8125d256aeed8ec54c4a06e true -check_key 0a78197861c30fd3547b5f2eabd96d3ac22ac0632f03b7afd9d5d2bfc2db352f true -check_key 8bdeadcca1f1f8a4a67b01ed2f10ef31aba7b034e8d1df3a69fe9aebf32454e0 false -check_key f4b17dfca559be7d5cea500ac01e834624fed9befae3af746b39073d5f63190d true -check_key 622c52821e16ddc63b58f3ec2b959fe8c6ea6b1a596d9a58fd81178963f41c01 true -check_key 07bedd5d55c937ef5e23a56c6e58f31adb91224d985285d7fef39ede3a9efb17 false -check_key 5179bf3b7458648e57dc20f003c6bbfd55e8cd7c0a6e90df6ef8e8183b46f99d true -check_key 683c80c3f304f10fdd53a84813b5c25b1627ebd14eb29b258b41cd14396ef41f true -check_key c266244ed597c438170875fe7874f81258a830105ca1108131e6b8fea95eb8ba true -check_key 0c1cdc693df29c2d1e66b2ce3747e34a30287d5eb6c302495634ec856593fe8e true -check_key 28950f508f6a0d4c20ab5e4d55b80565a6a539092e72b7eb0ed9fa5017ecef88 false -check_key 8328a2a5fcfc4433b1c283539a8943e6eb8cc16c59f29dedc3af2c77cfd56f25 true -check_key 5d0f82319676d4d3636ff5dc2a38ea5ec8aeaac4835fdcab983ab35d76b7967b false -check_key cafcc75e94a014115f25c23aaae86e67352f928f468d4312b92240ff0f3a4481 false -check_key 3e5fdd8072574218f389d018e959669e8ca4ef20b114ea7dce7bfb32339f9f42 true -check_key 591763e3390a78ccb529ceea3d3a97165878b179ad2edaa166fd3c78ec69d391 true -check_key 7a0a196935bf79dc2b1c3050e8f2bf0665f7773fc07511b828ec1c4b1451d317 false -check_key 9cf0c034162131fbaa94a608f58546d0acbcc2e67b62a0b2be2ce75fc8c25b9a false -check_key e3840846e3d32644d45654b96def09a5d6968caca9048c13fcaab7ae8851c316 false -check_key a4e330253739af588d70fbda23543f6df7d76d894a486d169e5fedf7ed32d2e2 false -check_key cfb41db7091223865f7ecbdda92b9a6fb08887827831451de5bcb3165395d95d true -check_key 3d10bd023cef8ae30229fdbfa7446a3c218423d00f330857ff6adde080749015 false -check_key 4403b53b8d4112bb1727bb8b5fd63d1f79f107705ffe17867704e70a61875328 false -check_key 121ef0813a9f76b7a9c045058557c5072de6a102f06a9b103ead6af079420c29 true -check_key 386204cf473caf3854351dda55844a41162eb9ce4740e1e31cfef037b41bc56e false -check_key eb5872300dc658161df469364283e4658f37f6a1349976f8973bd6b5d1d57a39 true -check_key b8f32188f0fc62eeb38a561ff7b7f3c94440e6d366a05ef7636958bc97834d02 false -check_key a817f129a8292df79eef8531736fdebb2e985304653e7ef286574d0703b40fb4 false -check_key 2c06595bc103447b9c20a71cd358c704cb43b0b34c23fb768e6730ac9494f39e true -check_key dd84bc4c366ced4f65c50c26beb8a9bc26c88b7d4a77effbb0f7af1b28e25734 false -check_key 76b4d33810eed637f90d49a530ac5415df97cafdac6f17eda1ba7eb9a14e5886 true -check_key 926ce5161c4c92d90ec4efc58e5f449a2c385766c42d2e60af16b7362097aef5 false -check_key 20c661f1e95e94a745eb9ec7a4fa719eff2f64052968e448d4734f90952aefee false -check_key 671b50abbd119c756010416e15fcdcc9a8e92eed0f67cbca240c3a9154db55c0 false -check_key df7aeee8458433e5c68253b8ef006a1c74ce3aef8951056f1fa918a8eb855213 false -check_key 70c81a38b92849cf547e3d5a6570d78e5228d4eaf9c8fdd15959edc9eb750daf false -check_key 55a512100b72d4ae0cfc16c75566fcaa3a7bb9116840db1559c71fd0e961cc36 false -check_key dbfbec4d0d2433a794ad40dc0aea965b6582875805c9a7351b47377403296acd true -check_key 0a7fe09eb9342214f98b38964f72ae3c787c19e5d7e256af9216f108f88b00a3 true -check_key a82e54681475f53ced9730ee9e3a607e341014d9403f5a42f3dbdbe8fc52e842 true -check_key 4d1f90059f7895a3f89abf16162e8d69b399c417f515ccb43b83144bbe8105f6 true -check_key 94e5c5b8486b1f2ff4e98ddf3b9295787eb252ba9b408ca4d7724595861da834 false -check_key d16e3e8dfa6d33d1d2db21c651006ccddbf4ce2e556594de5a22ae433e774ae6 false -check_key a1b203ec5e36098a3af08d6077068fec57eab3a754cbb5f8192983f37191c2df false -check_key 5378bb3ec8b4e49849bd7477356ed86f40757dd1ea3cee1e5183c7e7be4c3406 false -check_key 541a4162edeb57130295441dc1cb604072d7323b6c7dffa02ea5e4fed1d2ee9e true -check_key d8e86e189edcc4b5c262c26004691edd7bd909090997f886b00ed4b6af64d547 false -check_key 18a8731d1983d1df2ce2703b4c85e7357b6356634ac1412e6c2ac33ad35f8364 false -check_key b21212eac1eb11e811022514c5041233c4a07083a5b20acd7d632a938dc627de true -check_key 50efcfac1a55e9829d89334513d6d921abeb237594174015d154512054e4f9d1 true -check_key 9c44e8bcba31ddb4e67808422e42062540742ebd73439da0ba7837bf26649ec4 true -check_key b068a4f90d5bd78fd350daa129de35e5297b0ad6be9c85c7a6f129e3760a1482 false -check_key e9df93932f0096fcf2055564457c6dc685051673a4a6cd87779924be5c4abead true -check_key eddab2fc52dac8ed12914d1eb5b0da9978662c4d35b388d64ddf8f065606acaf true -check_key 54d3e6b3f2143d9083b4c98e4c22d98f99d274228050b2dc11695bf86631e89f true -check_key 6da1d5ef1827de8bbf886623561b058032e196d17f983cbc52199b31b2acc75b true -check_key e2a2df18e2235ebd743c9714e334f415d4ca4baf7ad1b335fb45021353d5117f true -check_key f34cb7d6e861c8bfe6e15ac19de68e74ccc9b345a7b751a10a5c7f85a99dfeb6 false -check_key f36e2f5967eb56244f9e4981a831f4d19c805e31983662641fe384e68176604a true -check_key c7e2dc9e8aa6f9c23d379e0f5e3057a69b931b886bbb74ded9f660c06d457463 true -check_key b97324364941e06f2ab4f5153a368f9b07c524a89e246720099042ad9e8c1c5b false -check_key eff75c70d425f5bba0eef426e116a4697e54feefac870660d9cf24c685078d75 false -check_key 161f3cd1a5873788755437e399136bcbf51ff5534700b3a8064f822995a15d24 false -check_key 63d6d3d2c21e88b06c9ff856809572024d86c85d85d6d62a52105c0672d92e66 false -check_key 1dc19b610b293de602f43dca6c204ce304702e6dc15d2a9337da55961bd26834 false -check_key 28a16d02405f509e1cfef5236c0c5f73c3bcadcd23c8eff377253941f82769db true -check_key 682d9cc3b65d149b8c2e54d6e20101e12b7cf96be90c9458e7a69699ec0c8ed7 false -check_key 0000000000000000000000000000000000000000000000000000000000000000 true -check_key 0000000000000000000000000000000000000000000000000000000000000080 true -check_key 0100000000000000000000000000000000000000000000000000000000000000 true -check_key 0100000000000000000000000000000000000000000000000000000000000080 false -check_key 0200000000000000000000000000000000000000000000000000000000000000 false -check_key 0200000000000000000000000000000000000000000000000000000000000080 false -check_key 0300000000000000000000000000000000000000000000000000000000000000 true -check_key 0300000000000000000000000000000000000000000000000000000000000080 true -check_key 0400000000000000000000000000000000000000000000000000000000000000 true -check_key 0400000000000000000000000000000000000000000000000000000000000080 true -check_key 0500000000000000000000000000000000000000000000000000000000000000 true -check_key 0500000000000000000000000000000000000000000000000000000000000080 true -check_key 0600000000000000000000000000000000000000000000000000000000000000 true -check_key 0600000000000000000000000000000000000000000000000000000000000080 true -check_key 0700000000000000000000000000000000000000000000000000000000000000 false -check_key 0700000000000000000000000000000000000000000000000000000000000080 false -check_key 0800000000000000000000000000000000000000000000000000000000000000 false -check_key 0800000000000000000000000000000000000000000000000000000000000080 false -check_key 0900000000000000000000000000000000000000000000000000000000000000 true -check_key 0900000000000000000000000000000000000000000000000000000000000080 true -check_key 0a00000000000000000000000000000000000000000000000000000000000000 true -check_key 0a00000000000000000000000000000000000000000000000000000000000080 true -check_key 0b00000000000000000000000000000000000000000000000000000000000000 false -check_key 0b00000000000000000000000000000000000000000000000000000000000080 false -check_key 0c00000000000000000000000000000000000000000000000000000000000000 false -check_key 0c00000000000000000000000000000000000000000000000000000000000080 false -check_key 0d00000000000000000000000000000000000000000000000000000000000000 false -check_key 0d00000000000000000000000000000000000000000000000000000000000080 false -check_key 0e00000000000000000000000000000000000000000000000000000000000000 true -check_key 0e00000000000000000000000000000000000000000000000000000000000080 true -check_key 0f00000000000000000000000000000000000000000000000000000000000000 true -check_key 0f00000000000000000000000000000000000000000000000000000000000080 true -check_key 1000000000000000000000000000000000000000000000000000000000000000 true -check_key 1000000000000000000000000000000000000000000000000000000000000080 true -check_key 1100000000000000000000000000000000000000000000000000000000000000 false -check_key 1100000000000000000000000000000000000000000000000000000000000080 false -check_key 1200000000000000000000000000000000000000000000000000000000000000 true -check_key 1200000000000000000000000000000000000000000000000000000000000080 true -check_key 1300000000000000000000000000000000000000000000000000000000000000 true -check_key 1300000000000000000000000000000000000000000000000000000000000080 true -check_key daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key dcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key dcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key ddffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key ddffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key deffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key deffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key dfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key dfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key e0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key e0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key e1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key e1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key e2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key e2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key e3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key e3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key e4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key e4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key e5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key e5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key e6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key e6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key e7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key e7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key e8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key e8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key e9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key e9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key eaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key eaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true -check_key ebffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key ebffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true -check_key ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key f9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key f9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key faffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key faffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key fbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key fbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key fcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key fcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key fdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key fdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key feffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key feffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -check_key ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false -check_key ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false -hash_to_ec da66e9ba613919dec28ef367a125bb310d6d83fb9052e71034164b6dc4f392d0 52b3f38753b4e13b74624862e253072cf12f745d43fcfafbe8c217701a6e5875 -hash_to_ec a7fbdeeccb597c2d5fdaf2ea2e10cbfcd26b5740903e7f6d46bcbf9a90384fc6 f055ba2d0d9828ce2e203d9896bfda494d7830e7e3a27fa27d5eaa825a79a19c -hash_to_ec ed6e6579368caba2cc4851672972e949c0ee586fee4d6d6a9476d4a908f64070 da3ceda9a2ef6316bf9272566e6dffd785ac71f57855c0202f422bbb86af4ec0 -hash_to_ec 9ae78e5620f1c4e6b29d03da006869465b3b16dae87ab0a51f4e1b74bc8aa48b 72d8720da66f797f55fbb7fa538af0b4a4f5930c8289c991472c37dc5ec16853 -hash_to_ec ab49eb4834d24db7f479753217b763f70604ecb79ed37e6c788528720f424e5b 45914ba926a1a22c8146459c7f050a51ef5f560f5b74bae436b93a379866e6b8 -hash_to_ec 5b79158ef2341180b8327b976efddbf364620b7e88d2e0707fa56f3b902c34b3 eac991dcbba39cb3bd166906ab48e2c3c3f4cd289a05e1c188486d348ede7c2e -hash_to_ec f21daa7896c81d3a7a2e9df721035d3c3902fe546c9d739d0c334ed894fb1d21 a6bedc5ffcc867d0c13a88a03360c8c83a9e4ddf339851bd3768c53a124378ec -hash_to_ec 3dae79aaca1abe6aecea7b0d38646c6b013d40053c7cdde2bed094497d925d2b 1a442546a35860a4ab697a36b158ded8e001bbfe20aef1c63e2840e87485c613 -hash_to_ec 3d219463a55c24ac6f55706a6e46ade3fcd1edc87bade7b967129372036aca63 b252922ab64e32968735b8ade861445aa8dc02b763bd249bff121d10829f7c52 -hash_to_ec bc5db69aced2b3197398eaf7cf60fd782379874b5ca27cb21bd23692c3c885cc ae072a43f78a0f29dc9822ae5e70865bbd151236a6d7fe4ae3e8f8961e19b0e5 -hash_to_ec 98a6ed760b225976f8ada0579540e35da643089656695b5d0b8c7265a37e2342 6a99dbfa8ead6228910498cc3ff3fb18cb8627c5735e4b8657da846c16d2dcad -hash_to_ec e9cdc9fd9425a4a2389a5d60f76a2d839f0afbf66330f079a88fe23d73eae930 8aa518d091928668f3ca40e71e14b2698f6cae097b8120d7f6ae9afba8fd3d60 -hash_to_ec a50c026c0af2f9f9884c2e9b8464724ac83bef546fec2c86b7de0880980d24fb b07433f8df39da2453a1e13fd413123a158feae602d822b724d42ef6c8e443bf -hash_to_ec bf180e20d160fa23ccfa6993febe22b920160efc5a9614245f1a3a360076e87a 9d6454ff69779ce978ea5fb3be88576dc8feaedf151e93b70065f92505f2e800 -hash_to_ec b2b64dfeb1d58c6afbf5a56d8c0c42012175ebb4b7df30f26a67b66be8c34614 0523b22e7f220c939b604a15780abc5816709b91b81d9ee1541d44bd2586bbd8 -hash_to_ec 463fc877f4279740020d10652c950f088ebdebeae34aa7a366c92c9c8773f63a daa5fa72e70c4d3af407b8f2f3364708029b2d4863bbdde54bd67bd08db0fcad -hash_to_ec 721842f3809982e7b96a806ae1f162d98ae6911d476307ad1e4f24522fd26f55 4397c300a8cfcb42e7cc310bc975dc975ec2d191eaa7e0462998eb2830c34126 -hash_to_ec 384da8d9b83972af8cbefc2da5efc744037c8ef40efa4b3bacc3238a6232963d 3c80f107e6868f73ef600ab9229a3f4bbe24f4adce52e6ab3a66d5d510e0670d -hash_to_ec e26f8adef5b6fe5bb01466bff0455ca23fda07e200133697b3b6430ca3332bde e262a58bcc1f8baf1980e00d5d40ba00803690174d14fb4c0f608429ce3df773 -hash_to_ec 6e275b4ea4f085a5d3151aa08cf16a8c60b078e70be7ce5dac75b5d7b0eebe7c cb21b5a7744b4fcdc92ead4be0b04bcb9145e7bb4b06eff3bb2f0fe429b85108 -hash_to_ec a0dde4561ad9daa796d9cd8a3c34fd41687cee76d128bf2e2252466e3ef3b068 79a2eb06bb7647f5d0aae5da7cf2e2b2d2ce890f25f2b1f81bfc5fef8c87a7d3 -hash_to_ec dbaf63830e037b4c329969d1d85e58cb6c4f56014fd08eb38219bd20031ae27c 079c93ae27cd98075a487fd3f7457ad2fb57cdf12ec8651fedd944d765d07549 -hash_to_ec 1e87ba8a9acf96948bc199ae55c83ab3277be152c6d0b1d68a07955768d81171 5c6339f834116791f9ea22fcc3970346aaeddacf13fbd0a7d4005fbd469492ca -hash_to_ec 5a544088e63ddf5b9f444ed75a75bc9315c4c50439522f06b4823ecaf5e8a08d e95ca0730d57c6469be3a0f3c94382f8490257e2e546de86c650bdbc6482eaee -hash_to_ec e4e06d92ebb036a5e4bb547dbaa43fd70db3929eef2702649455c86d7e59aa46 e26210ff8ee28e24ef2613df40aa8a874b5e3c1d07ae14acc59220615aa334dc -hash_to_ec 5793b8b32dcc0f204501647f2976493c4f8f1fa5132315226f99f29a5a6fdfce 656e390086906d99852c9696e831f62cb56fc8f85f9a5c936c327f23c7faf4fe -hash_to_ec 84f56fa4d7f12e0efd48b1f7c81c15d6e3843ebb419f4a27ec97028d4f9da19e 0cbd4f0cd288e1e071cce800877de6aef97b63fff867424a4f2b2bab25602608 -hash_to_ec 242683ddf0a9fc55f6585de3aa64ea17c9c544896ff7677cd82c98f833bdf2ca 38c36d52314549213df7c7201ab7749a4724cbea92812f583bb48cabc20816ad -hash_to_ec a93ee320dc030aa382168c2eb6d75fce6e5a63a81f15632d514c6de8a7cfa5ee bd0a2facaa95bc95215a94be21996e46f789ee8beb38e75a1173b75fc686c505 -hash_to_ec e36136601d84475d25c3f14efe030363d646658937a8a8a19a812d5e6deb5944 2fb93d78fae299c9f6b22346acfb829796ee7a47ec71db5456d8201bec6c35a3 -hash_to_ec ba4b67d3d387c66baa4a32ec8b1db7681087e85076e71bab10036388c3aeb011 cc01329ce56f963bf444a124751c45b2c779ccb6dea16ca05251baca246b5401 -hash_to_ec 3fbc91896a2585154d6f7094c5ab9c487e29a27951c226eec1235f618e44946b 7d983acbb901bf5497d0708392e5e742ec8c8036cbb0d03403e9929da8cc85a7 -hash_to_ec a2da289fed650e9901f69a5f33535eb47c6bd07798633cbf6c00ce3172df76ac dca8a4d30ec2d657fefd0dba9c1c5fd45a79f665048b3cf72ac2c3b7363da1ac -hash_to_ec 99025d2d493f768e273ed66cacd3a5b392761e6bd158ca09c8fba84631ea1534 7ef5af79ab155ab7e1770a47fcd7f194aca43d79ec6e303c7ce18c6a20279b04 -hash_to_ec 3cf1d01d0b70fb31f2a2f979c1bae812381430f474247d0b018167f2a2cd9a9f 7c53d799ec938a21bb305a6b5ca0a7a355fa9a68b01d289c4f22b36ce3738f95 -hash_to_ec 639c421b49636b2a1f8416c5d6e64425fe51e3b52584c265502379189895668e 0b47216ae5e6e03667143a6cf8894d9d73e3152c64fb455631d81a424410e871 -hash_to_ec 4ccf2c973348b7cc4b14f846f9bfcdcb959b7429accf6dede96248946841d990 7fd41f5b97ba42ed03947dd953f8e69770c92cc34b16236edad7ab3c78cbbb2e -hash_to_ec f76ae09fff537f8919fd1a43ff9b8922b6a77e9e30791c82cf2c4b8acb51363e 8e2c6bf86461ad2c230c496ee3896da33c11cc020fd4c70faa3645b329049234 -hash_to_ec 98932da7450f15db6c1eef78359904915c31c2aa7572366ec8855180edb81e3a 86180adddfac0b4d1fb41d58e98445dde1da605b380d392e9386bd445f1d821c -hash_to_ec ab26a1660988ec7aba91fc01f7aa9a157bbc12927f5b197062b922a5c0c7f8dd 2c44a43eda0d0aad055f18333e761f2f2ec11c585ec7339081c19266af918e4f -hash_to_ec 4465d0c1b4930cc718252efd87d11d04162d2a321b9b850c4a19a6acdfca24f4 b03806287d804188a4d679a0ecee66f399d7bdc3bd1494f9b2b0772bbb5a034f -hash_to_ec 0f2a7867864ed00e5c40082df0a0b031c89fa5f978d9beb2fde75153f51cfb75 5c471e1b118ef9d76c93aec70e0578f46e8db1d55affd447c1f64c0ad9a5caa5 -hash_to_ec 5c2808c07d8175f332cae050ce13bec4254870d76abff68faf34b0b8d3ad5000 eeff1d9a5aa428b7aecc575e63dde17294072eb246568493e1ed88ce5c95b779 -hash_to_ec 36300a21601fad00d00da45e27b36c11923b857f97e50303bd01f21998eaef95 b33b077871e6f5dad8ff6bc621c1b6dedcf700777d996c8c02d73f7297108b7e -hash_to_ec 9e1afb76d6c480816d2cedd7f2ab08a36c309efaa3764dcdb51bad6049683805 4cd96ba7b543b1a224b8670bf20b3733e3910711d32456d3e58e920215788adf -hash_to_ec 685f152704664495459b76c81567a4b571e8b307dd0e3c9b08ee95651a006047 80dd6b637580cb3be76025867f1525852b65a7a66066993fda3af7eb187dc1a5 -hash_to_ec 0b216444391a1163c14f7b27f9135e9747978c0e426dce1fa65c657f3e9146be 021259695a6854a4a03e8c74d09ab9630a401bfca06172a733fe122f01af90b4 -hash_to_ec cfcb35e98f71226c3558eaa9cf620db5ae207ece081ab13ddea4b1f122850a5a 46763d2742e2cdffe80bb3d056f4d3a1565aa83f19aab0a1f89e54ad81ae0814 -hash_to_ec 07e7292da8cdcdb58ee30c3fa16f1d609e9b3b1110dd6fa9b2cc18f4103a1c12 fe949ca251ac66f13a8925ae624a09cdbf6696d3c110442338d37700536e8ec7 -hash_to_ec 813bc7e3749e658190cf2a4e358bc07a6671f262e2c4eef9f44c66066a72e6a7 6b92fbda984bd0e6f4af7a5e04c2b66b6f0f9d197a9694362a8556e5b7439f8a -hash_to_ec 89c50a1e5497156e0fae20d99f5e33e330362b962c9ca00eaf084fe91aaec71d ef36cb75eb95fb761a8fa8c376e9c4447bcd61421250f7a711bd289e6ed78a9b -hash_to_ec d9bd9ff2dd807eb25de7c5de865dbc43cce2466389cedbc92b90aab0eb014f81 30104771ff961cd1861cd053689feab888c57b8a4a2e3989646ea7dea40f3c04 -hash_to_ec b8c837501b6ca3e118db9848717c847c062bf0ebeca5a7c211726c1426878af5 19a1e204b4a32ce9cccf5d96a541eb76a78789dceaf4fe69964e58ff96c29b63 -hash_to_ec 84376c5350a42c07ac9f96e8d5c35a8c7f62c639a1834b09e4331b5962ecace8 ba1e4437d5048bd1294eadc502092eafc470b99fde82649e84a52225e68e88f2 -hash_to_ec a3345e4a4cfc369bf0e7d11f49aed0d2a6ded00e3ff8c7605db9a919cf730640 0d318705c16e943c0fdcde134aaf6e4ccce9f3d9161d001861656fc7ea77a0b1 -hash_to_ec 3c994dfb9c71e4f401e65fd552dc9f49885f88b8b3588e24e1d2e9b8870ffab1 984157de5d7c2c4b43b2bffea171809165d7bb442baea88e83b27f839ebdb939 -hash_to_ec 153674c1c1b18a646f564af77c5bd7de452dc3f3e1e2326bfe9c57745b69ec5c e9a4a1e225ae472d1b3168c99f8ba1943ad2ed84ef29598f3f96314f22db9ef2 -hash_to_ec 2d46a705d4fe5d8b5a1f4e9ef46d9e06467450eb357b6d39faa000995314e871 b9d1aec540bf6a9c0e1b325ab87d4fbe66b1df48986dde3cb62e66e136eba107 -hash_to_ec 6764c3767f16ec8faecc62f9f76735f76b11d7556aeb61066aeaeaad4fc9042f 3a5c68fb94b023488fb5940e07d1005e7c18328e7a84f673ccd536c07560a57b -hash_to_ec c99c6ee5804d4b13a445bc03eaa07a6ef5bcb2fff0f71678dd3bd66b822f8be8 a9e1ce91deed4136e6e53e143d1c0af106abde9d77c066c78ebbf5d227f9dde0 -hash_to_ec 3009182e1efac085c7eba24a7d9ef28ace98ebafa72211e73a41c935c37e6768 e55431a4c89d38bd95f8092cdf6e44d164ad5855677aba17ec262abc8c217c86 -hash_to_ec e7153acd114a7636a207be0b67fa86fee56dd318f2808a81e35dd13d4251b2d0 ff2b98d257e4d4ff7379e8871441ca7d26e73f78f3f5afcf421d78c9799ba677 -hash_to_ec 6378586744b721c5003976e3e18351c49cd28154c821bc45338892e5efedd197 3d765fb7bb4e165a3fa6ea00b5b5e22250f3861f0db0099626d9a9020443dda2 -hash_to_ec 5be49aba389b7e3ad6def3ba3c7dbec0a11a3c36fc9d441130ef370b8a8d29c2 2d61faf38062dc98ae1aaafec05e90a925c9769df5b8b8f7090d9e91b2a11151 -hash_to_ec f7bc382178d38e1b9a1a995bd8347c1283d8a2e8d150379faa53fd125e903d2b 544c815da65c3c5994b0ac7d6455578d03a2bc7cf558b788bcdb3430e231635a -hash_to_ec c28b5c4b6662eebb3ec358600644849ebeb59d827ed589c161d900ca18715fa8 a2d64db3c0e0353c257aadf9abc12ac779654d364f348b9f8e429aa7571203db -hash_to_ec 3a4792e5df9b2416a785739b9cf4e0d68aef600fa756a399cc949dd1fff5033a 4b54591bd79c30640b700dfb7f20158f692f467b6af70bd8a4e739c14a66c86a -hash_to_ec 002e70f25e1ceaf35cc14b2c6975a4c777b284a695550541e6f5424b962c19f5 73987e9342e338eb57a7a9e03bd33144db37c1091e952a10bd243c5bb295c18a -hash_to_ec 7eb671319f212c9cae0975571b6af109124724ba182937a9066546c92bdeff0c 49b46da3be0df1d141d2a323d5af82202afa2947a95b9f3df47722337f0d5798 -hash_to_ec ca093712559c8edd5c51689e2ddcb8641c2960e5d9c8b03a44926bb798a0c8dc b9ef9cf0f8e4a3d123db565afafb1102338bfb75498444ac0a25c5ed70d615da -hash_to_ec cfea0a08a72777ff3aa7be0d8934587fa4127cd49a1a938232815dc3fd8b23ac b4de604b3d712f1ef578195fb0e53c865d41e2dfe425202c6cfe6f10e4404eb5 -hash_to_ec aa0122ae258d6db21a26a31c0c92d8a0e3fdb46594aed41d561e069687dedcd6 5247eaec346de1c6cddf0ab04c12cd1d85cdb6d3a2fba2a5f9a5fe461abef5eb -hash_to_ec b3941734f4d3ba34ccaf03c4c737ac5a1e036eb74309300ce44d73aca24fef08 535938985c936e3780c61fe29a4121d6cb89a05080b6c2147031ea0c2b5b9829 -hash_to_ec 8c2ee1041a2743b30dcbf413cc9232099b9268f82a5a21a09b63e7aff750882f 6ad0d4b3a65b522dfad0e9ac814b1fb939bc4910bd780943c72f57f362754cca -hash_to_ec 4b6829a2a2d46c8f0d0c23db0f735fcf976524bf39ccb623b919dd3b28ad5193 2e0097d7f92993bc45ba06baf4ca63d64899d86760adc4eb5eeefb4a78561050 -hash_to_ec 9c1407cb6bba11e7b4c1d274d772f074f410d6fe9a1ee7a22cddf379257877d9 692261c7d6a9a7031c67d033f6d82a68ef3c27bd51a5666e55972238769821cd -hash_to_ec 638c42e4997abf8a4a9bffd040e31bd695d590cde8afbd7efd16ffdbae63bf66 793024c8ce196a2419f761dde8734734af6bd9eb772b30cc78f2cb89598dce97 -hash_to_ec 1fb60d79600de151a1cf8a2334deb5828632cbd91cb5b3d45ae06e08187ae23d ff2542cde5bc2562e69471a31cfc3d0c26e2f6ccc1891a633b07a3968e42521c -hash_to_ec d2fdbbae4e38a1b734151c3df52540feb2d3ff74edfef2f740e49a5c363406ee 344c83ba6ff4e38b257077623d298d2f2b52002645021241bc9389f81b29ad12 -hash_to_ec 836c27a6ddfe1a24aba3d6022dff6dfe970f142d8b4ac6afb8efcba5a051942f b8af481d33726b3f875268282d621e4c63f891a09f920b8f2f49080f3a507387 -hash_to_ec 46281153ddcdf2e79d459693b6fe318c1969538dd59a750b790bfff6e9481abf 8eaf534919ab6573ba4e0fbde0e370ae01eae0763335177aa429f61c4295e9d4 -hash_to_ec d57b789e050bf3db462b79a997dac76aa048d4be05f133c66edee56afd3dbe66 0c5a294cb2cbb6d9d1c0a1d57d938278f674867f612ed89dcbe4533449f1a131 -hash_to_ec 548d524d03ac22da18ff4201ce8dbee83ad9af54ee4e26791d26ed2ab8f9bfc7 c6609d9e7d9fd982dec8a166ff4fb6f7d195b413aad2df85f73d555349134f3b -hash_to_ec cc920690422e307357f573b87a6e0e65f432c6ec12a604eb718b66ba18897a56 6f11c466d1c72fccd81e51d9bda03b6e8d6a395e1d931b2a84e392dc9a3efa18 -hash_to_ec c7fb8a51f5fcd8824fc0875d4eb57ab4917cb97090a6e2288f852f2bb449edd9 45543fea6eed461016e48598b521f18ff70178afea18032b188deea3e56052fc -hash_to_ec c681bb1b829e24b1c52cb890036b89f0029d261c6a15e5b2c684ee7dfe91e746 263006fe2c6b08f1ab29cdf442472c298e2faf225bbf5c32399d3745cd3904bd -hash_to_ec e06411c542312fdd305e17e46be14c63bab5836dc8751da06164b1ae22d4e20f 901871be7a7ff5aecade2acff869846f3c50de69307ac155f2aa3a74d5472ef2 -hash_to_ec 9c725a2acb80fa712f9781da510e5163b1b30f4e1c064c26b5185e537f0614ea 02420d49257846eb39fddd196d3171679f6be21d9adac667786b65a6e90f57b1 -hash_to_ec 22792772820feafa85c5cb3fa8f876105251bef08617d389619697f47dff54f2 a3ad444e7811693687f3925e7c315ae55d08d9f4b0a29876bc2a891ab941c1c3 -hash_to_ec 0587b790121395d0f4f39093d10b4817f58a1e80621a24eea22b3c127d6ac5a2 86c417c695c64c7becaad0d59ddbb2bca4cb2b409a21253d680aac1a08617095 -hash_to_ec fa0b5f28399bef0cd87bfe6b8a2b69e9c5506fb4bacd22deba8049615a5db526 ede0ea240036ff75d075258a053f3ce5d6f77925d358dbe33c06509fc9b12111 -hash_to_ec 62a3274fc0bed109d5057b865c2ba6b6a5a417cb90a3425674102fcd457ede2d ff7e46751bb4dcd1e800a8feab7cf6771f42dc0cfed7084c23b8a5d255a6f34e -hash_to_ec a6fcd4aecaaaf281563b9b7cd6fbc7b1829654f644f4165942669a2ef632b2bf 28f136be0eb957a5b36f8ec294399c9f73ad3a3c9bb953ad191758ced554a233 -hash_to_ec 01baa4c06d6676c9b286cda76ed949fd80a408b3309500ba84a5bb7e3dce58e2 a943d1afa2efce284740e7db21ea02db70b124808be2ff80cbf9b9cb96c7b73e -hash_to_ec dd9aff9c006ba514cef8fae665657bc9813fe2715467cf479643ea4c4e365d6d 68de2f7d49de4004286ce0989a06a686b15d0f463a02ffd448a18914e1ddf713 -hash_to_ec 3df3513d5e539161761ce7992ab9935f649bc934bed0da3c5e1095344b733bb9 e9c2dd747d7b2482474325943cd850102b8093164678362c7621993a790e2a8a -hash_to_ec 7680cfb244dc8ef37c671fff176be1a3dad00e5d283f93145d0cbee74cca2df4 a0fd8c3cca16a130eaa5864cbe8152b7adfbf09e8cf72244b2fc8364c3b20bf4 -hash_to_ec 8a547c38bd6b219ea0d612d4a155eba9c56034a1405dcf4b608de787f37e0fd8 76bf0dc40fd0a5508c5e091d8bb7eccfa28b331e72c6a0d4ac0e05a3d651850b -hash_to_ec dd93901621f58465e9791012afa76908f1e80ad80e52b809dc7fc32bb004f0a8 09a0b7ecfe8058b1e9ee01c9b523826867ca97a32efad29ac8ceebca67a4ea00 -hash_to_ec b643010220f1f4ee6c7565f6e1b3dc84c18274ede363ac36b6af3707e69a1542 233c9ff8de59e5f96c2f91892a71d9d93fa7316319f30d1615f10ac1e01f9285 -hash_to_ec c2637b2299dfc1fd7e953e39a582bafd19e6e7fff3642978eb092b900dbfea80 339587ba1c05e2cba44196a4be1fd218b772199e2c61c3c0ff21dcd54b570c43 -hash_to_ec 1f36d3a7e7c468eb000937de138809e381ad2e23414cbbaac49b7f33533ed486 7e5b0a96051c77237a027a79764c2763487af88121c7774645e97827fb744888 -hash_to_ec 8c142a55f60b2edbe03335b7f90aa2bd63e567048a65d61c70cb28779c5200af d3d6d5563b3d81c8c91cf9806bb13b2850fb7c162c610fd2f5b83c464add8182 -hash_to_ec 99e7b98293c9de1f81aff1376485a990014b8b176521b2a68cdbde6300190398 119cbc01a1d9b9fb4759031d3a70685aebea0f01bc5ee082ce824265fd21b3b4 -hash_to_ec 9753bd38be072b51490290be6207ca4545e3541bdf194e0850ae0a9f9e64b8ba 1ad3aa759863153606fa6570f0e1290baded4c8c1f2ba0f67c1911bfc8ccd7a0 -hash_to_ec 322703864ceee19b7f17cec2a822f310f0c4da3ff98b0be61a6fd30ac4db649c 89d9e7a5947e1cde874e4030de278070aae363063cd3592ce5411821474f0816 -hash_to_ec c1acd01e1e535fad273a8b757d981470f43dd7d95af732901fbba16b6e245761 57e80445248111150da5e63c706b4abbf3eef2cc508bd0347ff6b81e8c59f5bc -hash_to_ec 492473559f181bbe78f60215bc6d3a5168435ea2fc0a508372d6f5ca126e9767 df3965f137cf6f60c56ebd7c8f246281fd6dc92ce23a37e9f846f8452c884e01 -hash_to_ec afa9d6e0e2fb972ee806beb450c2c0165e58234b0676a4ec0ca19b6e710d7c35 669a57e69dd2845a5e50ed8e5d8423ac9ae792a43c7738554d6c5e765a7b088a -hash_to_ec 094de050bdadef3b7dbaeeca29381c667e63e71220970149d97b95db8f4db61b 0cf5d03530c5e97850d0964c6a394de9cde1e8e498f8c0e173c518242c07f99a -hash_to_ec 2ce583724bc699ad800b33176a1d983512fe3cb3afa65d99224b23dae223efb7 e1548fd563c75ae5b5366dbab4cb73c54e7d5e087c9e5453125ff8fbe6c83a5c -hash_to_ec 8064974b976ff5ef6adaade6196ab69cda6970cd74f7f5899181805f691ad970 98ae63c47331a4ac433cb2f17230c525982d89d21e2838515a36ec5744ec2d15 -hash_to_ec 384911047de609c6ae8438c745897357989363885cef2381a8a00a090cf04a58 4692ec3a0a03263620841c108538d584322fdd24d221a74bf1e1f407f83828af -hash_to_ec 0e1b1ced5ae997ef9c10b72cfc6d8c36d7433c01fc04f4083447f87243282528 6ee443ab0637702b7340bd4a908b9e2e63df0cc423c409fb320eb3f383118b80 -hash_to_ec 5a7aea70c85c040af6ff3384bcaa63ec45c015b55b44fffa37ab982a00dc57c5 2df2e20137cefd166c767646ecd2e386d28f405aebe43d739aa55beba04ed407 -hash_to_ec 3e878a3567487f20f7c98ea0488a40b87f1ba99e50bbfe9f00a423f927cbd898 697c7e60e4bf8c429ba7ac22b11a4b248d7465fc6abe597ec6d1e1c973330688 -hash_to_ec c0bb08350d8a4bb6bf8745f6440e9bd254653102a81c79d6528da2810da758e4 396a872ac9147a69b27223bf4ec4198345b26576b3690f233b832395f2598235 -hash_to_ec 6c3026a9284053a4ddb754818f9ae306ffa96eb7003bd03826eeccc9a0cf656e bef73da51d3ba9972a33d1afb7d263094b66ab6dbe3988161b08c17f8c69c2d5 -hash_to_ec f80b7d8f5a80d321af3a42130db199d9edcb8f5a82507d8bfca6d002d65458b6 aa59c167ea60ee024421bfbd00adbb3cbfc20e16bd3c9b172a6bef4d47ca7f57 -hash_to_ec bc0ffc24615aa02fafef447f17e7b776489cd2cc909f71e8344e01cad9f1610d 5c4195cc8dc3518143f06a9c228ae59ec9a6425a8fab89bfc638ad997cf35220 -hash_to_ec b15fad558737229f8816fcba8fbef805bd420c03e392d118c69bdf01890c4924 f5810477e37554728837f097e1b170d1d8c95351c7fff8abbbfc624e1a50c1b9 -hash_to_ec ec8c1f10d8e9da9cf0d57c4a1f2c402771bed7970109f3cf21ad32111f1f198f a697e0a3f09827b0cf3a4ffb6386388feda80d30ffffcbd54443dafcba162b28 -hash_to_ec a989647bf0d70fdb7533b8c303a2a07f5e42e26a45ffc4e48cff5ba88643a201 450fd73e636f94d0d232600dd39031386b0e2ecde4105124fc451341da9803db -hash_to_ec 7159971b03c365480d91d625a0fadc8e3a632c518acf0dbec87dd659da70e168 377bc43c038ac46cf6565aa0a6d6bf39968c0c1142755dba3141eeebf0acdf5d -hash_to_ec e39089a64fedac4b2c25e36312b33f79d02bf75a883f450f910915b8560a3b06 77efa7db1be020e77596f550de45626824a8268095d56a0991696b211cb329cc -hash_to_ec 2056b3c6347611bb0929dad00ec932a4d9bec0f06b2d57f17e01ffa1528a719e b6072c2be2ce928e8cbbb87e8eb7e06975c0f93b309dd3b6a29edaad2b56f99b -hash_to_ec 2c026793146e81b889fc741d62e06c341ce263560d57cd46d0376f5b29174489 8f1f64b67762aa784969e954c196a2c6610addc3604aa3291eb0b80304dfe9ef -hash_to_ec be6026d6704379c489fa7749832b58bdb1a9685a5ffb68c438537f2f76e0011f 0072569a4090a9ad383a205bb092196c9de871c22506e3bb63d6b9d1b2357c96 -hash_to_ec f4db802d5c6b7d7b53663b03d988b4cd0c7cad6c26612c5307754a93ebdc9710 f21bc9be4cb28761f6fe1d0a555ad5e9748375a2e9faea25a1df75cc8d273e18 -hash_to_ec c27d79a564c56b00956a55090481e85fbc837fd5fb5e8311ecb436e300c07e3a 1b1891e6abec74621501450cd68bb1eeaa5b2fffff4ec441a55d1235ff3a0842 -hash_to_ec a1e2f93c717cad32af386efa624198973df5a710963dd19d4c3ac40032a3a286 69c60571e3f9f63d2bfb359386ae3b8cd9e49a2e9127753002866e85c0443573 -hash_to_ec 76920d7b1763474bc94a16433c3c28241a9acdee3ff2b2cb0e6757ba415310aa c1b409169f102b696fc7fa1aa9c48631e58e08b5132b6aadf43407627bb1b499 -hash_to_ec 57ac654b29fa227c181fff2121491fcb283af6cbe932c8199c946862c0e90cb2 a204e8d327ea93b0b1bd74a78ffc370b20cea6455e209f2bc258114baa16d728 -hash_to_ec 88e66cfaef6432b759c50efce885097d1752252b479dac5ed822fa6c85d56427 6fb84790d3749a5c1088209ee3823848d9c19bf1524215c44031143dd8080d70 -hash_to_ec c1e55da929c4f8f793696fc77ff4e1c317c34852d98403bfd15dd388ee7df0df 2f41e76f15c5b480665bd84067e3b543b85ce6de02be9da7a550b5e1ead94d34 -hash_to_ec 29e9ace5aa3c5a572b13f4b62b738a764d90c8c293ccb062ad798acbab7c5ef4 bce791aba1edc2a66079628fd838799489ab16b0a475ce7fe62e24cc56fe131c -hash_to_ec f25b2340689dadacaa9a0ef08aee8447d80b982e8a1ea42cf0500a1b9d85b37d f7f53aa117e6772a9abc452b3931b0a99405ac45147e7c550ac9fcf7ffe377b5 -hash_to_ec 0cb6c47fc8478063b33f5aed615a05bcc84d782c497b6cc8e76ec1fa11edbfdb 7a0b58b03147e7c9be1d98de49ead2ce738d0071b0af8ca03cc92ceb26fc2246 -hash_to_ec 7bd7287d7c4b596fe46fe57a6982c959653487bea843a77dd47d40986200d576 343084618c58284c64a5ff076f891be64885dc2ac73fa1567f7b39fde6b91542 -hash_to_ec e4984bf330708152254fb18ecef12d546afd24898a3cf00fba866957b6ee1b82 c70e88b061656181fbd6ff12aca578fb66de5553c756ea4698a248b177185bc6 -hash_to_ec cefd6c3cb9754ea632d6aea140af017de5ea12e5184f868936b74d9aa349d603 4b476502a8a483aadd50667f262f95351901628dd3a2aac1a5a41c4ea03f1647 -hash_to_ec da5d0f33344ee7f3345204badf183491b9452b84bccc907602c7bad43e5cf43e 9561b9e61241625e028361494d4fa5cd78df4c7219fa64c8fede6d8421b8904a -hash_to_ec d6f0a4f8c770a1274a76fd7ae4e5faf7779249263e1aaecc6f815cf376f5c302 cd5c55820be10f0d38feb81363ede3716a9168601a0dd1ce3109aab81367d698 -hash_to_ec b6bf32491d12a41c275d8518fc534d9a0d17aade509e7e8b8409a95c86167307 4aae534abbd67a9a8f2974154606c0e9be8932e920c7a5e931b46a92859acf82 -hash_to_ec 0f930beaad041f9cefd867bc194027dd651fb3c9bda5944ececdba8a7136b6d3 521708f8149891b418d0920369569a9d578029c78f8e41c68a0bb68d3ad5df60 -hash_to_ec 49b1fe0f97be74b81e0b047027b3e9f726fa5e90a67dafa877309397291c06c5 0852e59dfae5ec32cce606c119376597bce5cd4d04879d329f74e3ec66414cd3 -hash_to_ec 4d57647d03f2cfbd4782fcc933e0683b52d35fc8d37283e6c7de522ddfa7e698 cbeb9ebfbbc49ec81fac3b7b063fecac1bb40ea686d3ffb08f82b291715cd87f -hash_to_ec 4ea3238c06fc9346c7421ff85bc0244b893860b94bc437378472814d09b2e99f a1fbae941adc344031bbdf53385dfdc012311490a4eb5e9a2749a21b27ce917a -hash_to_ec 0cd3609f5c78b318cb853d189b73b1ee2d00edd4e5fce2812027daa3fcb1fed1 0c7a7241b16e3c47d41f5abbf205797bd4b63fc425a7120cb2a4bf324e08ae74 -hash_to_ec d74ab71428e36943c9868f70d3243469babd27988a1666a06f499a5741a52e3e 65b7c259f3b4547c082b2a7669b2b363668c4d87ac14e80471317b03b34e5216 -hash_to_ec f6b151998365e7d69bcbce383dd2e8b5bf93b8b72f029ff942588208c1619591 6ce840ce5dfbca238665c1e6eddb8b045aa85c69b5976fc55ab57e66d3d0a791 -hash_to_ec 207751de234b2bd7ec20bdd8326210c23aa68f04875c94ad7e256a96520f25d6 fc8f79ab3af317c38bfb88f40fb84422995a0479cfa6b03fa6df7f4e5f2813fb -hash_to_ec 62291e2873f38c0a234b77d1964205f3f91905c261d3c06f81051a9b0cb787cb 076d1d767457518e6777cb3bd4df22c8a19eb617e4bbccd1b0bd37522d6597a5 -hash_to_ec 4b060df2d2854036751d00190ee821cb0066d256d4172539fdfa6fbd1cdfe1f9 59866e927c69e7de5df00dc46c0d2a1ddf799d901128ff040cebb8fd61b95da4 -hash_to_ec ac8daf73f9c609bb36bce4fdeec1e50be5f22de38c3904fabcf758f0fc180bc7 7d8dc4e956363b652468a5fecafd7c08d48a2297e93b8edcb38e595fdd5a1fde -hash_to_ec fef7b6563fd27f3aab1d659806b26b8f2ec38bc8feefad50288383c001d1c20f e6e42547f12df431439d45103d2c5a583248f44554a98a3a433cf8c38b11805d -hash_to_ec 40a3d6871c76ecc6bb7b28324478733e196cc11d062dd4c9265cf31be5cf5a97 8c55a3811c241a020b1be202a58d5defbc4c8945d73b132570b47dd7c019ccf0 -hash_to_ec 0cd71e7e562b2b47f4bc8640caf20e69d3a62f10231b4c7a372c9691cff9ac3c fb8e4e3de479b3bf1f4f13b4ed5507df1e80bd9250567b9d021b03339d6e7197 -hash_to_ec 40a4e62800a99b7a26e0b507ffb29592e5bdba25284dc473048f24b27d25b40a 90ae131d29ee4a71cd764ab26f1ca4e6d09a40db98f8692b345c3a0e130dc860 -hash_to_ec 1ddf35193cf52860bfe3e41060a7f44281241c6ae49cd541d24c1aca679b7501 3b4f50013895c522776ced456329c4e727de03575f6b99ae7d238a9f70862121 -hash_to_ec 014e0fa8ce9d5df262b9a1765725fde354a855de8aef3fc23684e05dd1ba8d34 3857f57776a3cb68721bcb7f1533a5f9fb416a1dc8824d719399b63a142d24de -hash_to_ec 09987979b0e98d1d5355df8a8698b8f54d3a037d12745c0a4317fe519c3df9cc 32a181e2b754aeced214c73ac459c97d99e63317be3eb923344c64a396173bca -hash_to_ec 51e9e8ec4413e92dbaaba067824c32b018487a8d16412ed310507b4741e18eed 0356b209156b4993fd5d5630308298429a1b0021c19bedecb7719ac607cfa644 -hash_to_ec 14d91313dfe46e353310e6a4a23ee15d7a4e1f431700a444be8520e6043d08d9 6f345f4018b5d178d9f61894d9f46ac09ff639483727b0d113943507cee88cfd -hash_to_ec 0d5af9ace87382acfffb9ab1a34b6e921881aa015d4f6d9c73171b2b0a97600d a8dbf36c85bebe6a7b3733e70cd3cd9ed0eb282ca470f344e5fcf9fe959f2e6e -hash_to_ec 996690caac7328b19d20ed28eb0003d675b1a9ff79055ab530e3bf170eb22a94 14340d7d935cffce74b8b2f325c9d92ce0238b51807ef2c1512935bb843194ce -hash_to_ec ad839c4b4c278c8ebe16ff137a558255a1f74646aa87c6cd99e994c7bb97ce8a d4f2da327ffded913b50577be0e583db2b237b5ca74da648e9b985c247073b76 -hash_to_ec 26fc2eeeee983e1300d72362fdff42edf08038e4eee277a6e2dbd1bd8c9d6560 3468b8269728c2c0bfc2e53b1575415124798bc0f59b60ea2f14967fc0ca19ce -hash_to_ec db33cecaf4ee6f0ceba338cc5fabfb7462cd952a9c9007357ff3f0ca8336f8bc 0bab38f58686d0ff770f770a297971510bc83e2ff2dfead34823d1c4d67f11af -hash_to_ec a0ee84b3c646526fb8787d26dcd9b7fe9dc713c8a6c1a4ea640465a9f36a64df 4d7a638f6759d3ec45339cd1300e1239cca5f0f658ca3cd29bc9bdb32f44faf0 -hash_to_ec 6a702e7899fcf3988e2b6b55654c22e54f43d3fa29de19177bdff5b2295fe27f 145d5748d6054fb586568e276f6925aef593a5b9c8249ad3dbef510af99b4307 -hash_to_ec 30ce0fd4f1fac8b62d613b8ee4a66deef6eb7094bd8466531050b837460f6971 f3aa850d593ba7cef01389f7e1916e57617f1d75cd42f64ce8f5f272384b148c -hash_to_ec 3aa31d4ad7046ad13d83eb11c9a6e90eb8483a374a77a9a7b2a7cc0978fefa76 2fe0827dc080d9c1e7ec475a78aa7ae3c86d1a35f4c3f25f4a1f7299cacf018a -hash_to_ec 8562a5a91e763b98014523ebb6e49120979098f89c31df1fde9eb3a49a15b20f ae223bf85e2009a9daf5fd8a14685e2e1e625fc88818b2fd437dd7e109a48f59 -hash_to_ec ccf9c313a47b8dbf7ce42c94b785818bc24134d95b6d22acc53c1ec2be29cf27 3e79fce6fe5aa14251b6560df4b76e811d7739eec097f27052c4403a283be71d -hash_to_ec d1e33cd6f8918618d5fb6d67ad8de939db8beaec4f115551eac64479b739b773 613fffcbe1bf48bb2d7bfd64fd97790a06025f8f2429edddb9ac145707847ecf -hash_to_ec 81eaeced34dd44e448d5dafa5715225e4956c90911c964a96ff7aa5b86b969bc 8f81177495d120a1357380164d677509b167f2958eb8b962b616c3951d426d8c -hash_to_ec 2bc001a29f8eab1c7377de69957ba365fb5bdaf9c2c220889709af920dfe27d3 9bcb3010038f366fa4c280eed6e914a23bfc402594d0b83d0e66730a465a565b -hash_to_ec 6feeb703c05e86c58d9fc5623f1af8657ecd1e75a14d18c4eedb642a8a393d16 6544628ba67ed0e14854961739c4d467fcf49d6361e39d32ea73dabeae51e6c3 -hash_to_ec e8ff145a7c26897f2c1639edd333a5412f87752f110079f581ccdc87fcce208c d4b5a6e06069c7e012e32119f8eda08ff04a8dfa784e1cf1bced455a4d41d905 -hash_to_ec 80488131dcb2018527908dbf8cdf4b823ef0806dc1d360f4da671004ef7ff74d 9984a79d9fd4f317768b442161116eef84e2ca49e938642b268fd64312d59a27 -hash_to_ec d8c4ca60446849a784d1462aa26a3b93073ff6841cb2da3ef52ab9785b00b1fd da5ec1562e7de2382d35728312f4eea3608d4dba775c1c108de510e1ce97d059 -hash_to_ec 68645728dfc6b9358dfb426493238ba38f24a2f46a3e89edb47d212549939cb7 d3253aa7235113dcc1b577d3bb80be34f528398815a653dbdbacbcbdfd5887a1 -hash_to_ec 4e8eb97ba2d1046e1b42e67530a61441e31c84e5e5e448d8e8dbe75d104eaccb de94f73e83222aa0e39b559d4fef70387b0815b9b2f6beff5da67262d8f0eb3e -hash_to_ec 104ff03122ffdf59b22b8c0fe3d8f2ef67d02328e4d5181916d3d2a92f9a0bb7 1517ccf69c0328327e1cf581f16944ff66bc91c37e1cd68a99525415e00b7c9f -hash_to_ec 80f23aae7356ae9a2f9f7504495a731214d26f870fb7df68fdc00b233494156f 7aef046b0a70f84e8d239aa95e192b5a3fffa0fae5090c91273e8996beca9e38 -hash_to_ec 2424b33235955a737ebddbf1c6c59cd8778af74da3bd3e658447666a2ab2f557 d19e2be8d482950fbdae429618da7a9daedb8c5944dea19cd1b6b274e792231b -hash_to_ec 0adc839d2b8f099e4341a4763b074c06318d6bcbd1ec558d20a9820c4a426463 cea5da12a84e5c20011726d9224a9930bec30f9571762dd7ca857b86bd37d056 -hash_to_ec 46c84d53951f1ba23c46a23d5d96bf019c559aa5d2d79e4535cfcdb36f38ce25 2a913a01a6f7dd78a43cdd5354d1160d9a5f0d824c489a892c80eba798a77567 -hash_to_ec 99bdaaf68555ccdc93d97c3a0fb4c126a1aa8b1202194a1a753401a6cae21055 1f645efe173577a092f2d847cc966e28ba3b36397fe84c96dfa4724ed4fcfdf9 -hash_to_ec c540ff78f1e063ad26ffa69febb8818c9f2a325072c566091ad816e40fe39af4 de7a762262c91ab4beccc0713233cb91163aec43e34de0dbcfad0c431e8a9722 -hash_to_ec de8b1ff8978cd5e02681521542b7b6c3c2f8f4602065059f83594809d04e3dda 290601e75207085bff3e016746e55a80310a76dea9ef566c24181079c76da11c -hash_to_ec d555994c8a022e52602d2a8bdd01fc1bfa6b9ab6734ff72a1bd5f937de4627f8 5f6794e874f48c4b362d0a24207374c2d274e28de86351afc6ddb95d8cc2fd62 -hash_to_ec 19db72f703fe6f1b73f21b6ba133ae6b111ae8cc496d3aa32e02411e34c0d8d7 42f159f43d2d62b8cf8a47d5f1340c5cf070e9860fc60de647c55d50fe9f5607 -hash_to_ec 23a87a258c2a5d1353aa2d5946f9e5749b92f85e3c58e1d177c3b6c3dcac809c e5685016f79d5e87d1fecb3e2a0fe64e4875f7accd2f6649d7f6b16317549cb1 -hash_to_ec 43e1738d7d1b5b565f5fc78e81480f7edf9a4dc18f104fc4be95135b98931b17 650f5b682e45f2d0c5d5e8bcfd9e0cda7d9071b55ecbfaf5e3b59941cd7479f2 -hash_to_ec a9d644de0804edf62dee613efa2547e510990a9b7a987ebe55ec74c23873a878 52ad329f88499a4f110e6a6cba1f820012d8db6ccb8f6495ab1e3eb5a24786e1 -hash_to_ec 11f2b5d89a0350d7c8727becf0f4dd19bd90f8c94ff207132ab13282dd9b94e6 b798a47bb98dc2a8f99deaf64d27638e33a0d504c5d2fbee477a2bc9b89e2838 -hash_to_ec 5e206e3190b3b715d125f1a11fff424fb33e36e534c99ddde2a3517068b7dcc4 2738e9571c96b2ddf93cb5f4a72b1ea78d3731d9555b830494513c0683c950ca -hash_to_ec efc3d65a43d4f10795c7265a76671348f80173e0f507c812f7ae76793b99c529 cf4434d18ce8167b51f117fe930860143c46e1739a8db1fba73b6b0de830d707 -hash_to_ec 81f00469788aad6631cf75b585ae06d43ec81c20479925a2009afac9687dff60 c335b5889b36ba4b4175bb0d986807e8eedb6f6b7329b70b922e2ab729c4202a -hash_to_ec 9ef5ff329b525ee8f5c3ac38e1dba7cb19985617341d356707c67ff273aed02d bef9f9e051ba0e24d1fdf72099cf43ecdd250d047fb329855b5372d5c422db9e -hash_to_ec 3fa1401bd63132cf8b385c0fa65f0715ba1fe6161e41d59f8033ae2b22f63fa1 8289a1cb3c2dae48879bb8913fafe2d196cc2fdab5f2a77607910efd33eae6df -hash_to_ec 6559836fd0081fa38a3f8d8408b564e5698b9797cf5e15f7f12a7d2c84511989 28d405a6687d2ecc90c1c66bf0454d58f3fa38835743075e1db58c658e15a104 -hash_to_ec 8e0882d45f0e4c2fb2839d3be86ff699d4b2242f5b25ac5a3c2f65297c7d2032 2771fdcf9135a62007adb5f0004d8222f0e42f819c81710aa4dc3ab2042bebf3 -hash_to_ec 1d91dc4dd9bd82646029d13aca1af96830c1d8a0400ddebeb14b00c93501c039 7792c62e897f32cbc9c4229f0d28f7882ceeae120329a1cd35f76a75ac704e93 -hash_to_ec 09527f9052acbbdd7676cbbd9534780865f04a27aaadad2b7d4f1dac68883cf0 b934220cde1327f2dc6af67bcb4124bf424d5084ef4da945e4daad1717cd0bb8 -hash_to_ec 2362e1abe73e64cdd2ca7f6c5ea9f467213747dd3f2b7c6e5df9cb21e03307d7 676b7122b96564358bbaaf77e3a5a4db1767e4f9a50f6ddd1c69df4566755af9 -hash_to_ec 26c2dd2356e9b6c68a415b25f91d18614dc8500c66f346d28489da543ee75a94 0f4fd7086acd68eb7c9fa2410e2ecf18e34654eb44e979bc03ce436e992d5feb -hash_to_ec 422dc0a09d6a45a8e0b563eeb6a5ee84b08abd3a8cb34ff93f77ba3b163f4042 631f1b412ff5a0fccbe53a02b4a3deaa93a0418ed9874df401eb698ef75d7441 -hash_to_ec ceecdf46f57ef3f36ff30a1a3579b609340282d1b26ab5ddef2f53514e91bab1 9bc6f981fe98d14a2fc5b01a8134b6d35e123ec9ab8a3f303e0a5abb28150e2e -hash_to_ec 024a9e6e0d73f28aa6207fb1e02ce86d444d2d46f8211e8aaab54f459db91a5a 5fb0c1d2c3b30f399102104ea1874099fa83110b3d9c1fcfffb2981c98bf8cdf -hash_to_ec 5b8e45e269c9ccac4c68e532a72b29346d218f4606f37a14064826a62050e3a8 c7be46a871b77fc05ce891d24bd6bd54d9775b7ef573c6bc2d92b67f3604c1d1 -hash_to_ec 9a6593a385c266389eef14237874b97bdcd1823c3199311667d4853c2d12aa81 9f55ee9d94102d2b9c5670f30586cf9823bf205b4d4fe088c323e87c4e10f26f -hash_to_ec 27377e2811598c3569b92990865d39b72c7a5533e1be30f77330863187c11875 abd82bc726f2710a8b87e4c1cf5a069f0ae800de614468d3ff35639983020197 -hash_to_ec 7cacfaa135fb7d568b8dce8ea9136498b1b28c6d1020af45d376288d78d411f0 229fccd49744c0692508af329224553d21561ee6062b2b8a21f080f73da5bd97 -hash_to_ec 52abd90a5542d6496b8dec9567b020f30058e29458d64f2d4f3ad6f3bfc1a5a0 874e82ced7cf77577b3374087fb08a2300b7f403de628310c26bdb3be869d309 -hash_to_ec 5c8eebe9d12309187afa8d0d5191de3fdb84e5a05485d7cd62e8804ce7fdc0bc 12b7537643488aa8b9dcc4bae040cd491f8b466163b7988157b0502fb6c9177f -hash_to_ec 6ca3dd5c7a21a6bf65d6eefbe20a66e9b1d6b64196344be0c075f47aea48e3aa 5e1d0705ee24675238293b73ab1d98359119d4b328275be2460cc6ee4d19cc88 -hash_to_ec d7e6cd0d39b4308c2a5ee547c4569c8bb3887e49cedece62d218d7c3c5277797 793dc4397112dfd9a8f4e061f457eb6d6fbb1d7a58c40bad5f16002c64914186 -hash_to_ec 9cb6de8ba967cca0f0f861c6e20546f8958446595c01c28dae7ba6cfa09d6b14 ba1a2f7502b58fee3499c20e35fa01bb932e7a7c4a925dc04fbf5d90f33cfb5e -hash_to_ec 8ef9c7366733a1edcd116238cdbd177d61222d5c3e05b30ef6b85014cbcb6b79 8fc89664722947164ac9b77086aed319897612068f56ecd57f47029f14671603 -hash_to_ec 7f317a34e4fb7de9f69cb107ffc0e57fd9f5c85b85ccb5319d05cebfc169924a 4b71c42339c73db7d710cd63f374d478a6c13bdc352cff40e967282268965ba7 -hash_to_ec 15beef8d9687b92918a903b01d594859db4e7128263c8db0cae9d423ff962c1e cd75e6323952f6ac88f138f391b69f38c46d70b7eda61f9e431725b6f1d514a5 -hash_to_ec 7a1c04c9af8fc6649833fe81e96f0199fcfe94959256cbe1490075fc5be0904e 0368270cd979439ae0a9552a5d6c9f959e4247fcf920d9e071464582e79c04b1 -hash_to_ec c854c583d338615f85f69061e0fa9c9d7c5bbbfe562e8774fef3be556fe8bb63 061620171d7320f64bee98414ff7200a1f481521d202fb281cab06be73b80402 -hash_to_ec 0fb8af5aba05ad2503edf1cfad5a451da088e7e974772057cd991a4e0601a3eb d3cbc20384a4420143fcce2cb763b0c15bec4f3267d1bdad3c34c1ee6b790f5e -hash_to_ec 9a251cf59e84a9da5630642f9671c732440caa8fcf4c92446a7e5f5ef99da46c 9b9679086a433f2077f40bcd4c7545fb5cc87e7dbb8bba468d53cb04a74361a0 -hash_to_ec 8c632e357cef00e0911eb566f8cc809136b3f5ac1e82d183e4d645cef89fa155 5e06b0f4f278fa1ccb5431866e0b35171cdb814e2e82b9189ce01d8d8a1b2408 -hash_to_ec 4aa4c31463475086a5d96b3ff550340567ab3b4a86fa3f01cfe9be18bc4dcb54 76a2916cfc093f27992e1f07b50f431d61d58e255507e208cd29ea4d3bc56623 -hash_to_ec 1d33d9aadb949346e3c78d065a0f5262374524f4cb97a7390c8cdaede7ca6578 9ad2f757f499359903031adea6126c577469c4e834a2959e3ac08ee74b13783c -hash_to_ec d9217b9a070df20c4d2f0db42ff0bb36bfba9f51b0b6df8fdfe150405dce4934 65a843c522b4b8ec081a696a0d2dd8dfdfea45db201de7a5889a1446c6dff8c7 -hash_to_ec b665b2ca8a285e44ba84e785533b56496a5319730dbb95bc14d3bdfece7544dc 8a804cd13457497b0a29eeca2cecfaa858766ec1d270a0e0c6785b43fd49b824 -hash_to_ec 43b5cbcc21b3404bca97fa9a661940fe64d40f3ca569310e50b1bb0173c4d5ee 6c12fffb540d536060bb8b96cf635c1b2cbaa4d875a8d2fb0bf79a690363df19 -hash_to_ec 11c58f20562c00dec5bb4456be07cd98186837e9af38d50d45f5e7b6f0f9000d cee76b567586f66dadd38c01213bfc1a17d38e96a495efb4c26063dc498ba209 -hash_to_ec b069a980b51d8e030262db0b30069e660f4a3f6f8075d1790c153ba12b879f8b 262391b00bdee71d1d827b2cfe50b46c29e265934dc91959bd369aca0cc6444e -hash_to_ec 75274bfd79bf33eb2f9ab046d34528af9a71811e7e3d55c20eb049c81ac692d8 cb93c850e36896fe6626e97c53652af6736ec3ba0641c7765d0cca2bad2352de -hash_to_ec 5cdb6a24d9736a00f197d9707949fedc5405f367744fe8c83b7cff650302b589 8b4ac03123fab9275dcf340345a1b11fba48ef106d410ba2e0e6f6457037a419 -hash_to_ec 07fdc85f809f95a07b59b084402bf91c512ebbe05c7657d6ba27a9e7e121e3e2 61182b3def063630e11de648a278032bcb75949f3a24ef5a133da87830ae5c4e -hash_to_ec a4188ca634cbb796f9927822e343d7b267e0a609c1a0ffa4dcf3726b9ffcc8a2 a911e4899fda28fd6337d708d34553ac5e810ee4938f6f7d9d6e521cab069edb -hash_to_ec 3c128ec5c955ea189a5789df2c892e94193a534a9d5801b8f75df870bc492a69 59eef5ee9df0f681df5b5c67ead1f06b059a8a843837b67f20cce15779608170 -hash_to_ec 51a4cc7ec4a14a98c0731e9de7f3ce0779123222d95455e940f2014a23729ec8 105863ccda076af7290d1bf9ec828651dc5811159839044d23f1c3e31a11c5e2 -hash_to_ec 1b901a31acbb7807c3309facdc7d04bc3b5a4aa714e6e346bd1c6ad4634e6534 01b3c0000b6c6b471c67c6ab3f9c7a500beaea5edb5c8f2b34df91b69ff67f21 -hash_to_ec d2f2c8d79cfa2e7cb2db80568ba62ca0576741acfbe5e2baa0d9b3c424a7c84d 7df9d9088022bd1ce6814d6f8051eef27a650ee38e789b184da2691efd27139d -hash_to_ec 04dcb7644fdfc12d8e34d6e57d7769db939b4a149ed2b81aa51a74ee90babe19 6cff0ab2dd3b32ba1bd1a78e3661722f3f10003a01ce83e430970557decedb2c -hash_to_ec 222798c6841eeaa07e7b7e29686942d7c7f9afc38d09360c8e1f52f2b7debd12 133e3a04ec82aa9b8dbbec18cadbafff446d1270bf7c6f3f97ddd3906dae2468 -hash_to_ec 4f7277c3ef247a0689b486ad965f969c433fc63e95d7310e789c4708418ccabc 7e0f2c984dd3cffb35458938c95fe92acf2e697aed060b0e3377c7a07e53c494 -hash_to_ec 359b4d6709413243ae2c5409ea02714a9f8961bbbb64a91e81daf01e18c981bf eab69af2cb7f113ad6a27035c0399853d10bd0b99291fad37794d100f7530431 -hash_to_ec 6cea3c6a9eb38f60329537170aa4db8dbb869af2040061e53b10c267daf6568c da9a97f4fa96bd05dade5e2704a6a633ba4dbe5080a1e831cda888e9d4f86615 -hash_to_ec 3dddecb954ef0209bcf61fd5b46b6c94f2384ef281c48a20ffee74f90788172d af9899c31f944617af54712f93d1a2b4944e48867f480d0d1aec61f3b713e32d -hash_to_ec 9605247462f50bdf7ff57fe966abbefe8b6efa0b65b5116252f0ec723717013f fc8f10904d42a74e09310ccf63db31a90f1dab88b278f15e3364a2356810f7e9 -hash_to_ec a005143c4d299933f866db41d0a0b8c67264f5d4ea840dd243cb10c3526bc077 928df1fe9404ffa9c1f4a1c8b2d43ab9b81c5615c8330d2dc2074ac66d4d5200 -hash_to_ec f45ce88065c34a163f8e77b6fb583502ed0eb1f490f63f76065a9d97e214e3a9 41bd6784270af4154f2f24f118617e2d7f5b7771a409f08b0f2b7bbcb5e3d666 -hash_to_ec 7b40ac30ed02b12ff592a5479c80cf5a7673abfdd4dd38810e40e63275bc2eed 6c6bf5961d83851c9728801093d9af04e5a693bc6cbad237b9ac4b0ed580a771 -hash_to_ec 9f985005794d3052a63361413a9820d2ce903198d6d5195b3f20a68f146c6d5c 88bcac53ba5b1c5b44730a24b4cc2cd782298fc70dc9d777b577a2b33b256449 -hash_to_ec 31b8e37d01fd5669de4ebf78889d749bc44ffe997186ace56f1fb3e60b8742d2 776366b44170efb130a5045597db5675c6c0b56f3def84863c6b6358aa8dcf40 diff --git a/networks/monero/io/Cargo.toml b/networks/monero/io/Cargo.toml deleted file mode 100644 index 887df8b2..00000000 --- a/networks/monero/io/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "monero-io" -version = "0.1.0" -description = "Serialization functions, as within the Monero protocol" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/io" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false } - -curve25519-dalek = { version = "4", default-features = false, features = ["alloc"] } - -[features] -std = ["std-shims/std"] -default = ["std"] diff --git a/networks/monero/io/LICENSE b/networks/monero/io/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/io/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/io/README.md b/networks/monero/io/README.md deleted file mode 100644 index 536b72dd..00000000 --- a/networks/monero/io/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Monero IO - -Serialization functions, as within the Monero protocol. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/io/src/lib.rs b/networks/monero/io/src/lib.rs deleted file mode 100644 index 345a8ed6..00000000 --- a/networks/monero/io/src/lib.rs +++ /dev/null @@ -1,248 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use core::fmt::Debug; -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use curve25519_dalek::{ - scalar::Scalar, - edwards::{EdwardsPoint, CompressedEdwardsY}, -}; - -const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000; - -mod sealed { - use core::fmt::Debug; - - /// A trait for a number readable/writable as a VarInt. - /// - /// This is sealed to prevent unintended implementations. - pub trait VarInt: TryInto + TryFrom + Copy { - const BITS: usize; - } - - impl VarInt for u8 { - const BITS: usize = 8; - } - impl VarInt for u32 { - const BITS: usize = 32; - } - impl VarInt for u64 { - const BITS: usize = 64; - } - // Don't compile for platforms where `usize` exceeds `u64`, preventing various possible runtime - // exceptions - const _NO_128_BIT_PLATFORMS: [(); (u64::BITS - usize::BITS) as usize] = - [(); (u64::BITS - usize::BITS) as usize]; - impl VarInt for usize { - const BITS: usize = core::mem::size_of::() * 8; - } -} - -/// The amount of bytes this number will take when serialized as a VarInt. -/// -/// This function will panic if the VarInt exceeds u64::MAX. -pub fn varint_len(varint: V) -> usize { - let varint_u64: u64 = varint.try_into().expect("varint exceeded u64"); - ((usize::try_from(u64::BITS - varint_u64.leading_zeros()) - .expect("64 > usize::MAX") - .saturating_sub(1)) / - 7) + - 1 -} - -/// Write a byte. -/// -/// This is used as a building block within generic functions. -pub fn write_byte(byte: &u8, w: &mut W) -> io::Result<()> { - w.write_all(&[*byte]) -} - -/// Write a number, VarInt-encoded. -/// -/// This will panic if the VarInt exceeds u64::MAX. -pub fn write_varint(varint: &U, w: &mut W) -> io::Result<()> { - let mut varint: u64 = (*varint).try_into().expect("varint exceeded u64"); - while { - let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)) - .expect("& eight_bit_mask left more than 8 bits set"); - varint >>= 7; - if varint != 0 { - b |= VARINT_CONTINUATION_MASK; - } - write_byte(&b, w)?; - varint != 0 - } {} - Ok(()) -} - -/// Write a scalar. -pub fn write_scalar(scalar: &Scalar, w: &mut W) -> io::Result<()> { - w.write_all(&scalar.to_bytes()) -} - -/// Write a point. -pub fn write_point(point: &EdwardsPoint, w: &mut W) -> io::Result<()> { - w.write_all(&point.compress().to_bytes()) -} - -/// Write a list of elements, without length-prefixing. -pub fn write_raw_vec io::Result<()>>( - f: F, - values: &[T], - w: &mut W, -) -> io::Result<()> { - for value in values { - f(value, w)?; - } - Ok(()) -} - -/// Write a list of elements, with length-prefixing. -pub fn write_vec io::Result<()>>( - f: F, - values: &[T], - w: &mut W, -) -> io::Result<()> { - write_varint(&values.len(), w)?; - write_raw_vec(f, values, w) -} - -/// Read a constant amount of bytes. -pub fn read_bytes(r: &mut R) -> io::Result<[u8; N]> { - let mut res = [0; N]; - r.read_exact(&mut res)?; - Ok(res) -} - -/// Read a single byte. -pub fn read_byte(r: &mut R) -> io::Result { - Ok(read_bytes::<_, 1>(r)?[0]) -} - -/// Read a u16, little-endian encoded. -pub fn read_u16(r: &mut R) -> io::Result { - read_bytes(r).map(u16::from_le_bytes) -} - -/// Read a u32, little-endian encoded. -pub fn read_u32(r: &mut R) -> io::Result { - read_bytes(r).map(u32::from_le_bytes) -} - -/// Read a u64, little-endian encoded. -pub fn read_u64(r: &mut R) -> io::Result { - read_bytes(r).map(u64::from_le_bytes) -} - -/// Read a canonically-encoded VarInt. -pub fn read_varint(r: &mut R) -> io::Result { - let mut bits = 0; - let mut res = 0; - while { - let b = read_byte(r)?; - if (bits != 0) && (b == 0) { - Err(io::Error::other("non-canonical varint"))?; - } - if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) { - Err(io::Error::other("varint overflow"))?; - } - - res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits; - bits += 7; - b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK - } {} - res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type")) -} - -/// Read a canonically-encoded scalar. -/// -/// Some scalars within the Monero protocol are not enforced to be canonically encoded. For such -/// scalars, they should be represented as `[u8; 32]` and later converted to scalars as relevant. -pub fn read_scalar(r: &mut R) -> io::Result { - Option::from(Scalar::from_canonical_bytes(read_bytes(r)?)) - .ok_or_else(|| io::Error::other("unreduced scalar")) -} - -/// Decompress a canonically-encoded Ed25519 point. -/// -/// Ed25519 is of order `8 * l`. This function ensures each of those `8 * l` points have a singular -/// encoding by checking points aren't encoded with an unreduced field element, and aren't negative -/// when the negative is equivalent (0 == -0). -/// -/// Since this decodes an Ed25519 point, it does not check the point is in the prime-order -/// subgroup. Torsioned points do have a canonical encoding, and only aren't canonical when -/// considered in relation to the prime-order subgroup. -pub fn decompress_point(bytes: [u8; 32]) -> Option { - CompressedEdwardsY(bytes) - .decompress() - // Ban points which are either unreduced or -0 - .filter(|point| point.compress().to_bytes() == bytes) -} - -/// Read a canonically-encoded Ed25519 point. -/// -/// This internally calls `decompress_point` and has the same definition of canonicity. This -/// function does not check the resulting point is within the prime-order subgroup. -pub fn read_point(r: &mut R) -> io::Result { - let bytes = read_bytes(r)?; - decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point")) -} - -/// Read a canonically-encoded Ed25519 point, within the prime-order subgroup. -pub fn read_torsion_free_point(r: &mut R) -> io::Result { - read_point(r) - .ok() - .filter(EdwardsPoint::is_torsion_free) - .ok_or_else(|| io::Error::other("invalid point")) -} - -/// Read a variable-length list of elements, without length-prefixing. -pub fn read_raw_vec io::Result>( - f: F, - len: usize, - r: &mut R, -) -> io::Result> { - let mut res = vec![]; - for _ in 0 .. len { - res.push(f(r)?); - } - Ok(res) -} - -/// Read a constant-length list of elements. -pub fn read_array io::Result, const N: usize>( - f: F, - r: &mut R, -) -> io::Result<[T; N]> { - read_raw_vec(f, N, r).map(|vec| { - vec.try_into().expect( - "read vector of specific length yet couldn't transform to an array of the same length", - ) - }) -} - -/// Read a length-prefixed variable-length list of elements. -/// -/// An optional bound on the length of the result may be provided. If `None`, the returned `Vec` -/// will be of the length read off the reader, if successfully read. If `Some(_)`, an error will be -/// raised if the length read off the read is greater than the bound. -pub fn read_vec io::Result>( - f: F, - length_bound: Option, - r: &mut R, -) -> io::Result> { - let declared_length: usize = read_varint(r)?; - if let Some(length_bound) = length_bound { - if declared_length > length_bound { - Err(io::Error::other("vector exceeds bound on length"))?; - } - } - read_raw_vec(f, declared_length, r) -} diff --git a/networks/monero/primitives/Cargo.toml b/networks/monero/primitives/Cargo.toml deleted file mode 100644 index 1aef394e..00000000 --- a/networks/monero/primitives/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "monero-primitives" -version = "0.1.0" -description = "Primitives for the Monero protocol" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/primitives" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -# Cryptographic dependencies -sha3 = { version = "0.10", default-features = false } -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -# Other Monero dependencies -monero-io = { path = "../io", version = "0.1", default-features = false } -monero-generators = { path = "../generators", version = "0.4", default-features = false } - -[dev-dependencies] -hex = { version = "0.4", default-features = false, features = ["alloc"] } - -[features] -std = [ - "std-shims/std", - - "zeroize/std", - - "sha3/std", - - "monero-generators/std", -] -default = ["std"] diff --git a/networks/monero/primitives/LICENSE b/networks/monero/primitives/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/primitives/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/primitives/README.md b/networks/monero/primitives/README.md deleted file mode 100644 index c866193b..00000000 --- a/networks/monero/primitives/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Monero Primitives - -Primitive structures and functions for the Monero protocol. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/primitives/src/lib.rs b/networks/monero/primitives/src/lib.rs deleted file mode 100644 index 47112d1d..00000000 --- a/networks/monero/primitives/src/lib.rs +++ /dev/null @@ -1,262 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use std_shims::{io, vec::Vec}; -#[cfg(feature = "std")] -use std_shims::sync::LazyLock; - -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use sha3::{Digest, Keccak256}; -use curve25519_dalek::{ - constants::ED25519_BASEPOINT_POINT, - traits::VartimePrecomputedMultiscalarMul, - scalar::Scalar, - edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}, -}; - -use monero_io::*; -use monero_generators::H; - -mod unreduced_scalar; -pub use unreduced_scalar::UnreducedScalar; - -#[cfg(test)] -mod tests; - -// On std, we cache some variables in statics. -#[cfg(feature = "std")] -static INV_EIGHT_CELL: LazyLock = LazyLock::new(|| Scalar::from(8u8).invert()); -/// The inverse of 8 over l, the prime factor of the order of Ed25519. -#[cfg(feature = "std")] -#[allow(non_snake_case)] -pub fn INV_EIGHT() -> Scalar { - *INV_EIGHT_CELL -} -// In no-std environments, we prefer the reduced memory use and calculate it ad-hoc. -/// The inverse of 8 over l, the prime factor of the order of Ed25519. -#[cfg(not(feature = "std"))] -#[allow(non_snake_case)] -pub fn INV_EIGHT() -> Scalar { - Scalar::from(8u8).invert() -} - -#[cfg(feature = "std")] -static G_PRECOMP_CELL: LazyLock = - LazyLock::new(|| VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT])); -/// A cached (if std) pre-computation of the Ed25519 generator, G. -#[cfg(feature = "std")] -#[allow(non_snake_case)] -pub fn G_PRECOMP() -> &'static VartimeEdwardsPrecomputation { - &G_PRECOMP_CELL -} -/// A cached (if std) pre-computation of the Ed25519 generator, G. -#[cfg(not(feature = "std"))] -#[allow(non_snake_case)] -pub fn G_PRECOMP() -> VartimeEdwardsPrecomputation { - VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT]) -} - -/// The Keccak-256 hash function. -pub fn keccak256(data: impl AsRef<[u8]>) -> [u8; 32] { - Keccak256::digest(data.as_ref()).into() -} - -/// Hash the provided data to a scalar via keccak256(data) % l. -/// -/// This function panics if it finds the Keccak-256 preimage for [0; 32]. -pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar { - let scalar = Scalar::from_bytes_mod_order(keccak256(data.as_ref())); - // Monero will explicitly error in this case - // This library acknowledges its practical impossibility of it occurring, and doesn't bother to - // code in logic to handle it. That said, if it ever occurs, something must happen in order to - // not generate/verify a proof we believe to be valid when it isn't - assert!( - scalar != Scalar::ZERO, - "keccak256(preimage) \\cong 0 \\mod l! Preimage: {:?}", - data.as_ref() - ); - scalar -} - -/// Transparent structure representing a Pedersen commitment's contents. -#[allow(non_snake_case)] -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub struct Commitment { - /// The mask for this commitment. - pub mask: Scalar, - /// The amount committed to by this commitment. - pub amount: u64, -} - -impl core::fmt::Debug for Commitment { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive() - } -} - -impl Commitment { - /// A commitment to zero, defined with a mask of 1 (as to not be the identity). - pub fn zero() -> Commitment { - Commitment { mask: Scalar::ONE, amount: 0 } - } - - /// Create a new Commitment. - pub fn new(mask: Scalar, amount: u64) -> Commitment { - Commitment { mask, amount } - } - - /// Calculate the Pedersen commitment, as a point, from this transparent structure. - pub fn calculate(&self) -> EdwardsPoint { - EdwardsPoint::vartime_double_scalar_mul_basepoint(&Scalar::from(self.amount), &H, &self.mask) - } - - /// Write the Commitment. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut W) -> io::Result<()> { - w.write_all(&self.mask.to_bytes())?; - w.write_all(&self.amount.to_le_bytes()) - } - - /// Serialize the Commitment to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(32 + 8); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read a Commitment. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut R) -> io::Result { - Ok(Commitment::new(read_scalar(r)?, read_u64(r)?)) - } -} - -/// Decoy data, as used for producing Monero's ring signatures. -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub struct Decoys { - offsets: Vec, - signer_index: u8, - ring: Vec<[EdwardsPoint; 2]>, -} - -impl core::fmt::Debug for Decoys { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - fmt - .debug_struct("Decoys") - .field("offsets", &self.offsets) - .field("ring", &self.ring) - .finish_non_exhaustive() - } -} - -#[allow(clippy::len_without_is_empty)] -impl Decoys { - /// Create a new instance of decoy data. - /// - /// `offsets` are the positions of each ring member within the Monero blockchain, offset from the - /// prior member's position (with the initial ring member offset from 0). - pub fn new(offsets: Vec, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option { - if (offsets.len() > usize::from(u8::MAX)) || - (offsets.len() != ring.len()) || - (usize::from(signer_index) >= ring.len()) - { - None?; - } - // Check these offsets form representable positions - if offsets.iter().copied().try_fold(0, u64::checked_add).is_none() { - None?; - } - Some(Decoys { offsets, signer_index, ring }) - } - - /// The length of the ring. - pub fn len(&self) -> usize { - self.offsets.len() - } - - /// The positions of the ring members within the Monero blockchain, as their offsets. - /// - /// The list is formatted as the position of the first ring member, then the offset from each - /// ring member to its prior. - pub fn offsets(&self) -> &[u64] { - &self.offsets - } - - /// The positions of the ring members within the Monero blockchain. - pub fn positions(&self) -> Vec { - let mut res = Vec::with_capacity(self.len()); - res.push(self.offsets[0]); - for m in 1 .. self.len() { - res.push(res[m - 1] + self.offsets[m]); - } - res - } - - /// The index of the signer within the ring. - pub fn signer_index(&self) -> u8 { - self.signer_index - } - - /// The ring. - pub fn ring(&self) -> &[[EdwardsPoint; 2]] { - &self.ring - } - - /// The [key, commitment] pair of the signer. - pub fn signer_ring_members(&self) -> [EdwardsPoint; 2] { - self.ring[usize::from(self.signer_index)] - } - - /// Write the Decoys. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> { - write_vec(write_varint, &self.offsets, w)?; - w.write_all(&[self.signer_index])?; - write_raw_vec( - |pair, w| { - write_point(&pair[0], w)?; - write_point(&pair[1], w) - }, - &self.ring, - w, - ) - } - - /// Serialize the Decoys to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - let mut res = - Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64)); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read a set of Decoys. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut impl io::Read) -> io::Result { - let offsets = read_vec(read_varint, None, r)?; - let len = offsets.len(); - Decoys::new( - offsets, - read_byte(r)?, - read_raw_vec(|r| Ok([read_point(r)?, read_point(r)?]), len, r)?, - ) - .ok_or_else(|| io::Error::other("invalid Decoys")) - } -} diff --git a/networks/monero/primitives/src/tests.rs b/networks/monero/primitives/src/tests.rs deleted file mode 100644 index 68d3df75..00000000 --- a/networks/monero/primitives/src/tests.rs +++ /dev/null @@ -1,32 +0,0 @@ -use curve25519_dalek::scalar::Scalar; - -use crate::UnreducedScalar; - -#[test] -fn recover_scalars() { - let test_recover = |stored: &str, recovered: &str| { - let stored = UnreducedScalar(hex::decode(stored).unwrap().try_into().unwrap()); - let recovered = - Scalar::from_canonical_bytes(hex::decode(recovered).unwrap().try_into().unwrap()).unwrap(); - assert_eq!(stored.ref10_slide_scalar_vartime(), recovered); - }; - - // https://www.moneroinflation.com/static/data_py/report_scalars_df.pdf - // Table 4. - test_recover( - "cb2be144948166d0a9edb831ea586da0c376efa217871505ad77f6ff80f203f8", - "b8ffd6a1aee47828808ab0d4c8524cb5c376efa217871505ad77f6ff80f20308", - ); - test_recover( - "343d3df8a1051c15a400649c423dc4ed58bef49c50caef6ca4a618b80dee22f4", - "21113355bc682e6d7a9d5b3f2137a30259bef49c50caef6ca4a618b80dee2204", - ); - test_recover( - "c14f75d612800ca2c1dcfa387a42c9cc086c005bc94b18d204dd61342418eba7", - "4f473804b1d27ab2c789c80ab21d034a096c005bc94b18d204dd61342418eb07", - ); - test_recover( - "000102030405060708090a0b0c0d0e0f826c4f6e2329a31bc5bc320af0b2bcbb", - "a124cfd387f461bf3719e03965ee6877826c4f6e2329a31bc5bc320af0b2bc0b", - ); -} diff --git a/networks/monero/primitives/src/unreduced_scalar.rs b/networks/monero/primitives/src/unreduced_scalar.rs deleted file mode 100644 index ae0234f2..00000000 --- a/networks/monero/primitives/src/unreduced_scalar.rs +++ /dev/null @@ -1,145 +0,0 @@ -use core::cmp::Ordering; -use std_shims::{ - sync::LazyLock, - io::{self, *}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::scalar::Scalar; - -use monero_io::*; - -// Precomputed scalars used to recover an incorrectly reduced scalar. -static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| { - let mut precomputed_scalars = [Scalar::ONE; 8]; - for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) { - *scalar = - Scalar::from(u64::try_from((i * 2) + 1).expect("enumerating more than u64::MAX / 2 items")); - } - precomputed_scalars -}); - -/// An unreduced scalar. -/// -/// While most of modern Monero enforces scalars be reduced, certain legacy parts of the code did -/// not. These section can generally simply be read as a scalar/reduced into a scalar when the time -/// comes, yet a couple have non-standard reductions performed. -/// -/// This struct delays scalar conversions and offers the non-standard reduction. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct UnreducedScalar(pub [u8; 32]); - -impl UnreducedScalar { - /// Write an UnreducedScalar. - pub fn write(&self, w: &mut W) -> io::Result<()> { - w.write_all(&self.0) - } - - /// Read an UnreducedScalar. - pub fn read(r: &mut R) -> io::Result { - Ok(UnreducedScalar(read_bytes(r)?)) - } - - fn as_bits(&self) -> [u8; 256] { - let mut bits = [0; 256]; - for (i, bit) in bits.iter_mut().enumerate() { - *bit = core::hint::black_box(1 & (self.0[i / 8] >> (i % 8))) - } - - bits - } - - // Computes the non-adjacent form of this scalar with width 5. - // - // This matches Monero's `slide` function and intentionally gives incorrect outputs under - // certain conditions in order to match Monero. - // - // This function does not execute in constant time and must only be used with public data. - fn non_adjacent_form(&self) -> [i8; 256] { - let bits = self.as_bits(); - let mut naf = [0i8; 256]; - for (b, bit) in bits.into_iter().enumerate() { - naf[b] = i8::try_from(bit).expect("bit didn't fit within an i8"); - } - - for i in 0 .. 256 { - if naf[i] != 0 { - // if the bit is a one, work our way up through the window - // combining the bits with this bit. - for b in 1 .. 6 { - if (i + b) >= 256 { - // if we are at the length of the array then break out - // the loop. - break; - } - // potential_carry - the value of the bit at i+b compared to the bit at i - let potential_carry = naf[i + b] << b; - - if potential_carry != 0 { - if (naf[i] + potential_carry) <= 15 { - // if our current "bit" plus the potential carry is less than 16 - // add it to our current "bit" and set the potential carry bit to 0. - naf[i] += potential_carry; - naf[i + b] = 0; - } else if (naf[i] - potential_carry) >= -15 { - // else if our current "bit" minus the potential carry is more than -16 - // take it away from our current "bit". - // we then work our way up through the bits setting ones to zero, when - // we hit the first zero we change it to one then stop, this is to factor - // in the minus. - naf[i] -= potential_carry; - #[allow(clippy::needless_range_loop)] - for k in (i + b) .. 256 { - if naf[k] == 0 { - naf[k] = 1; - break; - } - naf[k] = 0; - } - } else { - break; - } - } - } - } - } - - naf - } - - /// Recover the scalar that an array of bytes was incorrectly interpreted as by ref10's `slide` - /// function (as used by the reference Monero implementation in C++). - /// - /// For Borromean range proofs, Monero did not check the scalars used were reduced. This led to - /// some scalars serialized being interpreted as distinct scalars. This function recovers these - /// distinct scalars, as required to verify Borromean range proofs within the Monero protocol. - /// - /// See for more info. - // - /// This function does not execute in constant time and must only be used with public data. - pub fn ref10_slide_scalar_vartime(&self) -> Scalar { - if self.0[31] & 128 == 0 { - // Computing the w-NAF of a number can only give an output with 1 more bit than - // the number, so even if the number isn't reduced, the `slide` function will be - // correct when the last bit isn't set. - return Scalar::from_bytes_mod_order(self.0); - } - - let mut recovered = Scalar::ZERO; - for &numb in self.non_adjacent_form().iter().rev() { - recovered += recovered; - match numb.cmp(&0) { - Ordering::Greater => { - recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).expect("positive i8 -> usize") / 2] - } - Ordering::Less => { - recovered -= - PRECOMPUTED_SCALARS[usize::try_from(-numb).expect("negated negative i8 -> usize") / 2] - } - Ordering::Equal => (), - } - } - recovered - } -} diff --git a/networks/monero/ringct/borromean/Cargo.toml b/networks/monero/ringct/borromean/Cargo.toml deleted file mode 100644 index f5fdd34a..00000000 --- a/networks/monero/ringct/borromean/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -[package] -name = "monero-borromean" -version = "0.1.0" -description = "Borromean ring signatures arranged into a range proof, as done by the Monero protocol" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/ringct/borromean" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -# Cryptographic dependencies -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -# Other Monero dependencies -monero-io = { path = "../../io", version = "0.1", default-features = false } -monero-generators = { path = "../../generators", version = "0.4", default-features = false } -monero-primitives = { path = "../../primitives", version = "0.1", default-features = false } - -[features] -std = [ - "std-shims/std", - - "zeroize/std", - - "monero-io/std", - "monero-generators/std", - "monero-primitives/std", -] -default = ["std"] diff --git a/networks/monero/ringct/borromean/LICENSE b/networks/monero/ringct/borromean/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/ringct/borromean/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/ringct/borromean/README.md b/networks/monero/ringct/borromean/README.md deleted file mode 100644 index 3b836804..00000000 --- a/networks/monero/ringct/borromean/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Monero Borromean - -Borromean ring signatures arranged into a range proof, as done by the Monero -protocol. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/ringct/borromean/src/lib.rs b/networks/monero/ringct/borromean/src/lib.rs deleted file mode 100644 index fc0f2194..00000000 --- a/networks/monero/ringct/borromean/src/lib.rs +++ /dev/null @@ -1,112 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] -#![allow(non_snake_case)] - -use core::fmt::Debug; -use std_shims::io::{self, Read, Write}; - -use zeroize::Zeroize; - -use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint}; - -use monero_io::*; -use monero_generators::H_pow_2; -use monero_primitives::{keccak256_to_scalar, UnreducedScalar}; - -// 64 Borromean ring signatures, as needed for a 64-bit range proof. -// -// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced. -// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction -// algorithm which was in use. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -struct BorromeanSignatures { - s0: [UnreducedScalar; 64], - s1: [UnreducedScalar; 64], - ee: Scalar, -} - -impl BorromeanSignatures { - // Read a set of BorromeanSignatures. - fn read(r: &mut R) -> io::Result { - Ok(BorromeanSignatures { - s0: read_array(UnreducedScalar::read, r)?, - s1: read_array(UnreducedScalar::read, r)?, - ee: read_scalar(r)?, - }) - } - - // Write the set of BorromeanSignatures. - fn write(&self, w: &mut W) -> io::Result<()> { - for s0 in &self.s0 { - s0.write(w)?; - } - for s1 in &self.s1 { - s1.write(w)?; - } - write_scalar(&self.ee, w) - } - - fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool { - let mut transcript = [0; 2048]; - - for i in 0 .. 64 { - #[allow(non_snake_case)] - let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint( - &self.ee, - &keys_a[i], - &self.s0[i].ref10_slide_scalar_vartime(), - ); - #[allow(non_snake_case)] - let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint( - &keccak256_to_scalar(LL.compress().as_bytes()), - &keys_b[i], - &self.s1[i].ref10_slide_scalar_vartime(), - ); - transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes()); - } - - keccak256_to_scalar(transcript) == self.ee - } -} - -/// A range proof premised on Borromean ring signatures. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct BorromeanRange { - sigs: BorromeanSignatures, - bit_commitments: [EdwardsPoint; 64], -} - -impl BorromeanRange { - /// Read a BorromeanRange proof. - pub fn read(r: &mut R) -> io::Result { - Ok(BorromeanRange { - sigs: BorromeanSignatures::read(r)?, - bit_commitments: read_array(read_point, r)?, - }) - } - - /// Write the BorromeanRange proof. - pub fn write(&self, w: &mut W) -> io::Result<()> { - self.sigs.write(w)?; - write_raw_vec(write_point, &self.bit_commitments, w) - } - - /// Verify the commitment contains a 64-bit value. - #[must_use] - pub fn verify(&self, commitment: &EdwardsPoint) -> bool { - if &self.bit_commitments.iter().sum::() != commitment { - return false; - } - - #[allow(non_snake_case)] - let H_pow_2 = H_pow_2(); - let mut commitments_sub_one = [EdwardsPoint::identity(); 64]; - for i in 0 .. 64 { - commitments_sub_one[i] = self.bit_commitments[i] - H_pow_2[i]; - } - - self.sigs.verify(&self.bit_commitments, &commitments_sub_one) - } -} diff --git a/networks/monero/ringct/bulletproofs/Cargo.toml b/networks/monero/ringct/bulletproofs/Cargo.toml deleted file mode 100644 index 9c807193..00000000 --- a/networks/monero/ringct/bulletproofs/Cargo.toml +++ /dev/null @@ -1,55 +0,0 @@ -[package] -name = "monero-bulletproofs" -version = "0.1.0" -description = "Bulletproofs(+) range proofs, as defined by the Monero protocol" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/ringct/bulletproofs" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false } - -thiserror = { version = "1", default-features = false, optional = true } - -rand_core = { version = "0.6", default-features = false } -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -# Cryptographic dependencies -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -# Other Monero dependencies -monero-io = { path = "../../io", version = "0.1", default-features = false } -monero-generators = { path = "../../generators", version = "0.4", default-features = false } -monero-primitives = { path = "../../primitives", version = "0.1", default-features = false } - -[build-dependencies] -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } -monero-generators = { path = "../../generators", version = "0.4", default-features = false } - -[dev-dependencies] -hex-literal = "0.4" - -[features] -std = [ - "std-shims/std", - - "thiserror", - - "rand_core/std", - "zeroize/std", - - "monero-io/std", - "monero-generators/std", - "monero-primitives/std", -] -compile-time-generators = ["curve25519-dalek/precomputed-tables"] -default = ["std", "compile-time-generators"] diff --git a/networks/monero/ringct/bulletproofs/LICENSE b/networks/monero/ringct/bulletproofs/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/ringct/bulletproofs/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/ringct/bulletproofs/README.md b/networks/monero/ringct/bulletproofs/README.md deleted file mode 100644 index 8f407fef..00000000 --- a/networks/monero/ringct/bulletproofs/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Monero Bulletproofs(+) - -Bulletproofs(+) range proofs, as defined by the Monero protocol. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). -- `compile-time-generators` (on by default): Derives the generators at - compile-time so they don't need to be derived at runtime. This is recommended - if program size doesn't need to be kept minimal. diff --git a/networks/monero/ringct/bulletproofs/build.rs b/networks/monero/ringct/bulletproofs/build.rs deleted file mode 100644 index ff07f6da..00000000 --- a/networks/monero/ringct/bulletproofs/build.rs +++ /dev/null @@ -1,84 +0,0 @@ -use std::{ - io::Write, - env, - path::Path, - fs::{File, remove_file}, -}; - -#[cfg(feature = "compile-time-generators")] -fn generators(prefix: &'static str, path: &str) { - use curve25519_dalek::EdwardsPoint; - - use monero_generators::bulletproofs_generators; - - fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) { - for generator in points { - generators_string.extend( - format!( - " - curve25519_dalek::edwards::CompressedEdwardsY({:?}) - .decompress() - .expect(\"generator from build script wasn't on-curve\"), - ", - generator.compress().to_bytes() - ) - .chars(), - ); - } - } - - let generators = bulletproofs_generators(prefix.as_bytes()); - #[allow(non_snake_case)] - let mut G_str = String::new(); - serialize(&mut G_str, &generators.G); - #[allow(non_snake_case)] - let mut H_str = String::new(); - serialize(&mut H_str, &generators.H); - - let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path); - let _ = remove_file(&path); - File::create(&path) - .expect("failed to create file in $OUT_DIR") - .write_all( - format!( - " - pub(crate) static GENERATORS: LazyLock = LazyLock::new(|| Generators {{ - G: std_shims::vec![ - {G_str} - ], - H: std_shims::vec![ - {H_str} - ], - }}); - ", - ) - .as_bytes(), - ) - .expect("couldn't write generated source code to file on disk"); -} - -#[cfg(not(feature = "compile-time-generators"))] -fn generators(prefix: &'static str, path: &str) { - let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path); - let _ = remove_file(&path); - File::create(&path) - .expect("failed to create file in $OUT_DIR") - .write_all( - format!( - r#" - pub(crate) static GENERATORS: LazyLock = LazyLock::new(|| {{ - monero_generators::bulletproofs_generators(b"{prefix}") - }}); - "#, - ) - .as_bytes(), - ) - .expect("couldn't write generated source code to file on disk"); -} - -fn main() { - println!("cargo:rerun-if-changed=build.rs"); - - generators("bulletproof", "generators.rs"); - generators("bulletproof_plus", "generators_plus.rs"); -} diff --git a/networks/monero/ringct/bulletproofs/src/batch_verifier.rs b/networks/monero/ringct/bulletproofs/src/batch_verifier.rs deleted file mode 100644 index 103e6bf7..00000000 --- a/networks/monero/ringct/bulletproofs/src/batch_verifier.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std_shims::vec::Vec; - -use curve25519_dalek::{ - constants::ED25519_BASEPOINT_POINT, - traits::{IsIdentity, VartimeMultiscalarMul}, - scalar::Scalar, - edwards::EdwardsPoint, -}; - -use monero_generators::{H as MONERO_H, Generators}; - -use crate::{original, plus}; - -#[derive(Default)] -pub(crate) struct InternalBatchVerifier { - pub(crate) g: Scalar, - pub(crate) h: Scalar, - pub(crate) g_bold: Vec, - pub(crate) h_bold: Vec, - pub(crate) other: Vec<(Scalar, EdwardsPoint)>, -} - -impl InternalBatchVerifier { - #[must_use] - fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool { - /* - Technically, this following line can overflow, and joining these `Vec`s _may_ panic if - they're individually acceptable lengths yet their sum isn't. This is so negligible, due to - the amount of memory required, it's dismissed. - */ - let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len(); - let mut scalars = Vec::with_capacity(capacity); - let mut points = Vec::with_capacity(capacity); - - scalars.push(self.g); - points.push(G); - - scalars.push(self.h); - points.push(H); - - for (i, g_bold) in self.g_bold.into_iter().enumerate() { - scalars.push(g_bold); - points.push(generators.G[i]); - } - - for (i, h_bold) in self.h_bold.into_iter().enumerate() { - scalars.push(h_bold); - points.push(generators.H[i]); - } - - for (scalar, point) in self.other { - scalars.push(scalar); - points.push(point); - } - - EdwardsPoint::vartime_multiscalar_mul(scalars, points).is_identity() - } -} - -#[derive(Default)] -pub(crate) struct BulletproofsBatchVerifier(pub(crate) InternalBatchVerifier); -impl BulletproofsBatchVerifier { - #[must_use] - pub(crate) fn verify(self) -> bool { - self.0.verify(ED25519_BASEPOINT_POINT, *MONERO_H, &original::GENERATORS) - } -} - -#[derive(Default)] -pub(crate) struct BulletproofsPlusBatchVerifier(pub(crate) InternalBatchVerifier); -impl BulletproofsPlusBatchVerifier { - #[must_use] - pub(crate) fn verify(self) -> bool { - // Bulletproofs+ is written as per the paper, with G for the value and H for the mask - // Monero uses H for the value and G for the mask - self.0.verify(*MONERO_H, ED25519_BASEPOINT_POINT, &plus::GENERATORS) - } -} - -/// A batch verifier for Bulletproofs(+). -/// -/// This uses a fixed layout such that all fixed points only incur a single point scaling, -/// regardless of the amounts of proofs verified. For all variable points (commitments), they're -/// accumulated with the fixed points into a single multiscalar multiplication. -#[derive(Default)] -pub struct BatchVerifier { - pub(crate) original: BulletproofsBatchVerifier, - pub(crate) plus: BulletproofsPlusBatchVerifier, -} -impl BatchVerifier { - /// Create a new batch verifier. - pub fn new() -> Self { - Self { - original: BulletproofsBatchVerifier(InternalBatchVerifier::default()), - plus: BulletproofsPlusBatchVerifier(InternalBatchVerifier::default()), - } - } - - /// Verify all of the proofs queued within this batch verifier. - /// - /// This uses a variable-time multiscalar multiplication internally. - #[must_use] - pub fn verify(self) -> bool { - self.original.verify() && self.plus.verify() - } -} diff --git a/networks/monero/ringct/bulletproofs/src/core.rs b/networks/monero/ringct/bulletproofs/src/core.rs deleted file mode 100644 index 9dc86fcd..00000000 --- a/networks/monero/ringct/bulletproofs/src/core.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std_shims::{vec, vec::Vec}; - -use curve25519_dalek::{ - traits::{MultiscalarMul, VartimeMultiscalarMul}, - scalar::Scalar, - edwards::EdwardsPoint, -}; - -pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS}; - -pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint { - let mut buf_scalars = Vec::with_capacity(pairs.len()); - let mut buf_points = Vec::with_capacity(pairs.len()); - for (scalar, point) in pairs { - buf_scalars.push(scalar); - buf_points.push(point); - } - EdwardsPoint::multiscalar_mul(buf_scalars, buf_points) -} - -pub(crate) fn multiexp_vartime(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint { - let mut buf_scalars = Vec::with_capacity(pairs.len()); - let mut buf_points = Vec::with_capacity(pairs.len()); - for (scalar, point) in pairs { - buf_scalars.push(scalar); - buf_points.push(point); - } - EdwardsPoint::vartime_multiscalar_mul(buf_scalars, buf_points) -} - -/* -This has room for optimization worth investigating further. It currently takes -an iterative approach. It can be optimized further via divide and conquer. - -Assume there are 4 challenges. - -Iterative approach (current): - 1. Do the optimal multiplications across challenge column 0 and 1. - 2. Do the optimal multiplications across that result and column 2. - 3. Do the optimal multiplications across that result and column 3. - -Divide and conquer (worth investigating further): - 1. Do the optimal multiplications across challenge column 0 and 1. - 2. Do the optimal multiplications across challenge column 2 and 3. - 3. Multiply both results together. - -When there are 4 challenges (n=16), the iterative approach does 28 multiplications -versus divide and conquer's 24. -*/ -pub(crate) fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec { - let mut products = vec![Scalar::ONE; 1 << challenges.len()]; - - if !challenges.is_empty() { - products[0] = challenges[0].1; - products[1] = challenges[0].0; - - for (j, challenge) in challenges.iter().enumerate().skip(1) { - let mut slots = (1 << (j + 1)) - 1; - while slots > 0 { - products[slots] = products[slots / 2] * challenge.0; - products[slots - 1] = products[slots / 2] * challenge.1; - - slots = slots.saturating_sub(2); - } - } - - // Sanity check since if the above failed to populate, it'd be critical - for product in &products { - debug_assert!(*product != Scalar::ZERO); - } - } - - products -} diff --git a/networks/monero/ringct/bulletproofs/src/lib.rs b/networks/monero/ringct/bulletproofs/src/lib.rs deleted file mode 100644 index 13a7a14e..00000000 --- a/networks/monero/ringct/bulletproofs/src/lib.rs +++ /dev/null @@ -1,311 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] -#![allow(non_snake_case)] - -use std_shims::{ - vec::Vec, - io::{self, Read, Write}, -}; - -use rand_core::{RngCore, CryptoRng}; -use zeroize::Zeroizing; - -use curve25519_dalek::edwards::EdwardsPoint; - -use monero_io::*; -pub use monero_generators::MAX_COMMITMENTS; -use monero_generators::COMMITMENT_BITS; -use monero_primitives::Commitment; - -pub(crate) mod scalar_vector; -pub(crate) mod point_vector; - -pub(crate) mod core; - -pub(crate) mod batch_verifier; -use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier}; -pub use batch_verifier::BatchVerifier; - -pub(crate) mod original; -use crate::original::{ - IpProof, AggregateRangeStatement as OriginalStatement, AggregateRangeWitness as OriginalWitness, - AggregateRangeProof as OriginalProof, -}; - -pub(crate) mod plus; -use crate::plus::{ - WipProof, AggregateRangeStatement as PlusStatement, AggregateRangeWitness as PlusWitness, - AggregateRangeProof as PlusProof, -}; - -#[cfg(test)] -mod tests; - -// The logarithm (over 2) of the amount of bits a value within a commitment may use. -const LOG_COMMITMENT_BITS: usize = COMMITMENT_BITS.ilog2() as usize; -// The maximum length of L/R `Vec`s. -const MAX_LR: usize = (MAX_COMMITMENTS.ilog2() as usize) + LOG_COMMITMENT_BITS; - -/// An error from proving/verifying Bulletproofs(+). -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum BulletproofError { - /// Proving/verifying a Bulletproof(+) range proof with no commitments. - #[cfg_attr(feature = "std", error("no commitments to prove the range for"))] - NoCommitments, - /// Proving/verifying a Bulletproof(+) range proof with more commitments than supported. - #[cfg_attr(feature = "std", error("too many commitments to prove the range for"))] - TooManyCommitments, -} - -/// A Bulletproof(+). -/// -/// This encapsulates either a Bulletproof or a Bulletproof+. -#[allow(clippy::large_enum_variant)] -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum Bulletproof { - /// A Bulletproof. - Original(OriginalProof), - /// A Bulletproof+. - Plus(PlusProof), -} - -impl Bulletproof { - fn bp_fields(plus: bool) -> usize { - if plus { - 6 - } else { - 9 - } - } - - /// Calculate the weight penalty for the Bulletproof(+). - /// - /// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone - /// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of - /// the weight lost by using a proof smaller than it is fast to compensate for this. - /// - /// If the amount of outputs specified exceeds the maximum amount of outputs, the result for the - /// maximum amount of outputs will be returned. - // https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/ - // src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124 - pub fn calculate_clawback(plus: bool, n_outputs: usize) -> (usize, usize) { - #[allow(non_snake_case)] - let mut LR_len = 0; - let mut n_padded_outputs = 1; - while n_padded_outputs < n_outputs.min(MAX_COMMITMENTS) { - LR_len += 1; - n_padded_outputs = 1 << LR_len; - } - LR_len += LOG_COMMITMENT_BITS; - - let mut clawback = 0; - if n_padded_outputs > 2 { - let fields = Bulletproof::bp_fields(plus); - let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2; - let size = (fields + (2 * LR_len)) * 32; - clawback = ((base * n_padded_outputs) - size) * 4 / 5; - } - - (clawback, LR_len) - } - - /// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof. - pub fn prove( - rng: &mut R, - outputs: Vec, - ) -> Result { - if outputs.is_empty() { - Err(BulletproofError::NoCommitments)?; - } - if outputs.len() > MAX_COMMITMENTS { - Err(BulletproofError::TooManyCommitments)?; - } - let commitments = outputs.iter().map(Commitment::calculate).collect::>(); - Ok(Bulletproof::Original( - OriginalStatement::new(&commitments) - .expect("failed to create statement despite checking amount of commitments") - .prove( - rng, - OriginalWitness::new(outputs) - .expect("failed to create witness despite checking amount of commitments"), - ) - .expect( - "failed to prove Bulletproof::Original despite ensuring statement/witness consistency", - ), - )) - } - - /// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof+. - pub fn prove_plus( - rng: &mut R, - outputs: Vec, - ) -> Result { - if outputs.is_empty() { - Err(BulletproofError::NoCommitments)?; - } - if outputs.len() > MAX_COMMITMENTS { - Err(BulletproofError::TooManyCommitments)?; - } - let commitments = outputs.iter().map(Commitment::calculate).collect::>(); - Ok(Bulletproof::Plus( - PlusStatement::new(&commitments) - .expect("failed to create statement despite checking amount of commitments") - .prove( - rng, - &Zeroizing::new( - PlusWitness::new(outputs) - .expect("failed to create witness despite checking amount of commitments"), - ), - ) - .expect("failed to prove Bulletproof::Plus despite ensuring statement/witness consistency"), - )) - } - - /// Verify the given Bulletproof(+). - #[must_use] - pub fn verify(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool { - match self { - Bulletproof::Original(bp) => { - let mut verifier = BulletproofsBatchVerifier::default(); - let Some(statement) = OriginalStatement::new(commitments) else { - return false; - }; - if !statement.verify(rng, &mut verifier, bp.clone()) { - return false; - } - verifier.verify() - } - Bulletproof::Plus(bp) => { - let mut verifier = BulletproofsPlusBatchVerifier::default(); - let Some(statement) = PlusStatement::new(commitments) else { - return false; - }; - if !statement.verify(rng, &mut verifier, bp.clone()) { - return false; - } - verifier.verify() - } - } - } - - /// Accumulate the verification for the given Bulletproof(+) into the specified BatchVerifier. - /// - /// Returns false if the Bulletproof(+) isn't sane, leaving the BatchVerifier in an undefined - /// state. - /// - /// Returns true if the Bulletproof(+) is sane, regardless of its validity. - /// - /// The BatchVerifier must have its verification function executed to actually verify this proof. - #[must_use] - pub fn batch_verify( - &self, - rng: &mut R, - verifier: &mut BatchVerifier, - commitments: &[EdwardsPoint], - ) -> bool { - match self { - Bulletproof::Original(bp) => { - let Some(statement) = OriginalStatement::new(commitments) else { - return false; - }; - statement.verify(rng, &mut verifier.original, bp.clone()) - } - Bulletproof::Plus(bp) => { - let Some(statement) = PlusStatement::new(commitments) else { - return false; - }; - statement.verify(rng, &mut verifier.plus, bp.clone()) - } - } - } - - fn write_core io::Result<()>>( - &self, - w: &mut W, - specific_write_vec: F, - ) -> io::Result<()> { - match self { - Bulletproof::Original(bp) => { - write_point(&bp.A, w)?; - write_point(&bp.S, w)?; - write_point(&bp.T1, w)?; - write_point(&bp.T2, w)?; - write_scalar(&bp.tau_x, w)?; - write_scalar(&bp.mu, w)?; - specific_write_vec(&bp.ip.L, w)?; - specific_write_vec(&bp.ip.R, w)?; - write_scalar(&bp.ip.a, w)?; - write_scalar(&bp.ip.b, w)?; - write_scalar(&bp.t_hat, w) - } - - Bulletproof::Plus(bp) => { - write_point(&bp.A, w)?; - write_point(&bp.wip.A, w)?; - write_point(&bp.wip.B, w)?; - write_scalar(&bp.wip.r_answer, w)?; - write_scalar(&bp.wip.s_answer, w)?; - write_scalar(&bp.wip.delta_answer, w)?; - specific_write_vec(&bp.wip.L, w)?; - specific_write_vec(&bp.wip.R, w) - } - } - } - - /// Write a Bulletproof(+) for the message signed by a transaction's signature. - /// - /// This has a distinct encoding from the standard encoding. - pub fn signature_write(&self, w: &mut W) -> io::Result<()> { - self.write_core(w, |points, w| write_raw_vec(write_point, points, w)) - } - - /// Write a Bulletproof(+). - pub fn write(&self, w: &mut W) -> io::Result<()> { - self.write_core(w, |points, w| write_vec(write_point, points, w)) - } - - /// Serialize a Bulletproof(+) to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut serialized = Vec::with_capacity(512); - self.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - /// Read a Bulletproof. - pub fn read(r: &mut R) -> io::Result { - Ok(Bulletproof::Original(OriginalProof { - A: read_point(r)?, - S: read_point(r)?, - T1: read_point(r)?, - T2: read_point(r)?, - tau_x: read_scalar(r)?, - mu: read_scalar(r)?, - ip: IpProof { - L: read_vec(read_point, Some(MAX_LR), r)?, - R: read_vec(read_point, Some(MAX_LR), r)?, - a: read_scalar(r)?, - b: read_scalar(r)?, - }, - t_hat: read_scalar(r)?, - })) - } - - /// Read a Bulletproof+. - pub fn read_plus(r: &mut R) -> io::Result { - Ok(Bulletproof::Plus(PlusProof { - A: read_point(r)?, - wip: WipProof { - A: read_point(r)?, - B: read_point(r)?, - r_answer: read_scalar(r)?, - s_answer: read_scalar(r)?, - delta_answer: read_scalar(r)?, - L: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(), - R: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(), - }, - })) - } -} diff --git a/networks/monero/ringct/bulletproofs/src/original/inner_product.rs b/networks/monero/ringct/bulletproofs/src/original/inner_product.rs deleted file mode 100644 index 283064eb..00000000 --- a/networks/monero/ringct/bulletproofs/src/original/inner_product.rs +++ /dev/null @@ -1,307 +0,0 @@ -use std_shims::{vec, vec::Vec}; - -use zeroize::Zeroize; - -use curve25519_dalek::{Scalar, EdwardsPoint}; - -use monero_generators::H; -use monero_primitives::{INV_EIGHT, keccak256_to_scalar}; -use crate::{ - core::{multiexp_vartime, challenge_products}, - scalar_vector::ScalarVector, - point_vector::PointVector, - BulletproofsBatchVerifier, -}; - -/// An error from proving/verifying Inner-Product statements. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub(crate) enum IpError { - IncorrectAmountOfGenerators, - DifferingLrLengths, -} - -/// The Bulletproofs Inner-Product statement. -/// -/// This is for usage with Protocol 2 from the Bulletproofs paper. -#[derive(Clone, Debug)] -pub(crate) struct IpStatement { - // Weights for h_bold - h_bold_weights: ScalarVector, - // u as the discrete logarithm of G - u: Scalar, -} - -/// The witness for the Bulletproofs Inner-Product statement. -#[derive(Clone, Debug)] -pub(crate) struct IpWitness { - // a - a: ScalarVector, - // b - b: ScalarVector, -} - -impl IpWitness { - /// Construct a new witness for an Inner-Product statement. - /// - /// This functions return None if the lengths of a, b are mismatched, not a power of two, or are - /// empty. - pub(crate) fn new(a: ScalarVector, b: ScalarVector) -> Option { - if a.0.is_empty() || (a.len() != b.len()) { - None?; - } - - let mut power_of_2 = 1; - while power_of_2 < a.len() { - power_of_2 <<= 1; - } - if power_of_2 != a.len() { - None?; - } - - Some(Self { a, b }) - } -} - -/// A proof for the Bulletproofs Inner-Product statement. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub(crate) struct IpProof { - pub(crate) L: Vec, - pub(crate) R: Vec, - pub(crate) a: Scalar, - pub(crate) b: Scalar, -} - -impl IpStatement { - /// Create a new Inner-Product statement which won't transcript P. - /// - /// This MUST only be called when P is deterministic to already transcripted elements. - pub(crate) fn new_without_P_transcript(h_bold_weights: ScalarVector, u: Scalar) -> Self { - Self { h_bold_weights, u } - } - - // Transcript a round of the protocol - fn transcript_L_R(transcript: Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar { - let mut transcript = transcript.to_bytes().to_vec(); - transcript.extend(L.compress().to_bytes()); - transcript.extend(R.compress().to_bytes()); - keccak256_to_scalar(transcript) - } - - /// Prove for this Inner-Product statement. - /// - /// Returns an error if this statement couldn't be proven for (such as if the witness isn't - /// consistent). - pub(crate) fn prove( - self, - mut transcript: Scalar, - witness: IpWitness, - ) -> Result { - let generators = &crate::original::GENERATORS; - let g_bold_slice = &generators.G[.. witness.a.len()]; - let h_bold_slice = &generators.H[.. witness.a.len()]; - - let (mut g_bold, mut h_bold, u, mut a, mut b) = { - let IpStatement { h_bold_weights, u } = self; - let u = *H * u; - - // Ensure we have the exact amount of weights - if h_bold_weights.len() != g_bold_slice.len() { - Err(IpError::IncorrectAmountOfGenerators)?; - } - // Acquire a local copy of the generators - let g_bold = PointVector(g_bold_slice.to_vec()); - let h_bold = PointVector(h_bold_slice.to_vec()).mul_vec(&h_bold_weights); - - let IpWitness { a, b } = witness; - - (g_bold, h_bold, u, a, b) - }; - - let mut L_vec = vec![]; - let mut R_vec = vec![]; - - // `else: (n > 1)` case, lines 18-35 of the Bulletproofs paper - // This interprets `g_bold.len()` as `n` - while g_bold.len() > 1 { - // Split a, b, g_bold, h_bold as needed for lines 20-24 - let (a1, a2) = a.clone().split(); - let (b1, b2) = b.clone().split(); - - let (g_bold1, g_bold2) = g_bold.split(); - let (h_bold1, h_bold2) = h_bold.split(); - - let n_hat = g_bold1.len(); - - // Sanity - debug_assert_eq!(a1.len(), n_hat); - debug_assert_eq!(a2.len(), n_hat); - debug_assert_eq!(b1.len(), n_hat); - debug_assert_eq!(b2.len(), n_hat); - debug_assert_eq!(g_bold1.len(), n_hat); - debug_assert_eq!(g_bold2.len(), n_hat); - debug_assert_eq!(h_bold1.len(), n_hat); - debug_assert_eq!(h_bold2.len(), n_hat); - - // cl, cr, lines 21-22 - let cl = a1.clone().inner_product(&b2); - let cr = a2.clone().inner_product(&b1); - - let L = { - let mut L_terms = Vec::with_capacity(1 + (2 * g_bold1.len())); - for (a, g) in a1.0.iter().zip(g_bold2.0.iter()) { - L_terms.push((*a, *g)); - } - for (b, h) in b2.0.iter().zip(h_bold1.0.iter()) { - L_terms.push((*b, *h)); - } - L_terms.push((cl, u)); - // Uses vartime since this isn't a ZK proof - multiexp_vartime(&L_terms) - }; - L_vec.push(L * INV_EIGHT()); - - let R = { - let mut R_terms = Vec::with_capacity(1 + (2 * g_bold1.len())); - for (a, g) in a2.0.iter().zip(g_bold1.0.iter()) { - R_terms.push((*a, *g)); - } - for (b, h) in b1.0.iter().zip(h_bold2.0.iter()) { - R_terms.push((*b, *h)); - } - R_terms.push((cr, u)); - multiexp_vartime(&R_terms) - }; - R_vec.push(R * INV_EIGHT()); - - // Now that we've calculate L, R, transcript them to receive x (26-27) - transcript = Self::transcript_L_R( - transcript, - *L_vec.last().expect("couldn't get last L_vec despite always being non-empty"), - *R_vec.last().expect("couldn't get last R_vec despite always being non-empty"), - ); - let x = transcript; - let x_inv = x.invert(); - - // The prover and verifier now calculate the following (28-31) - g_bold = PointVector(Vec::with_capacity(g_bold1.len())); - for (a, b) in g_bold1.0.into_iter().zip(g_bold2.0.into_iter()) { - g_bold.0.push(multiexp_vartime(&[(x_inv, a), (x, b)])); - } - h_bold = PointVector(Vec::with_capacity(h_bold1.len())); - for (a, b) in h_bold1.0.into_iter().zip(h_bold2.0.into_iter()) { - h_bold.0.push(multiexp_vartime(&[(x, a), (x_inv, b)])); - } - - // 32-34 - a = (a1 * x) + &(a2 * x_inv); - b = (b1 * x_inv) + &(b2 * x); - } - - // `if n = 1` case from line 14-17 - - // Sanity - debug_assert_eq!(g_bold.len(), 1); - debug_assert_eq!(h_bold.len(), 1); - debug_assert_eq!(a.len(), 1); - debug_assert_eq!(b.len(), 1); - - // We simply send a/b - Ok(IpProof { L: L_vec, R: R_vec, a: a[0], b: b[0] }) - } - - /// Queue an Inner-Product proof for batch verification. - /// - /// This will return Err if there is an error. This will return Ok if the proof was successfully - /// queued for batch verification. The caller is required to verify the batch in order to ensure - /// the proof is actually correct. - pub(crate) fn verify( - self, - verifier: &mut BulletproofsBatchVerifier, - ip_rows: usize, - mut transcript: Scalar, - verifier_weight: Scalar, - proof: IpProof, - ) -> Result<(), IpError> { - let generators = &crate::original::GENERATORS; - let g_bold_slice = &generators.G[.. ip_rows]; - let h_bold_slice = &generators.H[.. ip_rows]; - - let IpStatement { h_bold_weights, u } = self; - - // Verify the L/R lengths - { - // Calculate the discrete log w.r.t. 2 for the amount of generators present - let mut lr_len = 0; - while (1 << lr_len) < g_bold_slice.len() { - lr_len += 1; - } - - // This proof has less/more terms than the passed in generators are for - if proof.L.len() != lr_len { - Err(IpError::IncorrectAmountOfGenerators)?; - } - if proof.L.len() != proof.R.len() { - Err(IpError::DifferingLrLengths)?; - } - } - - // Again, we start with the `else: (n > 1)` case - - // We need x, x_inv per lines 25-27 for lines 28-31 - let mut xs = Vec::with_capacity(proof.L.len()); - for (L, R) in proof.L.iter().zip(proof.R.iter()) { - transcript = Self::transcript_L_R(transcript, *L, *R); - xs.push(transcript); - } - - // We calculate their inverse in batch - let mut x_invs = xs.clone(); - Scalar::batch_invert(&mut x_invs); - - // Now, with x and x_inv, we need to calculate g_bold', h_bold', P' - // - // For the sake of performance, we solely want to calculate all of these in terms of scalings - // for g_bold, h_bold, P, and don't want to actually perform intermediary scalings of the - // points - // - // L and R are easy, as it's simply x**2, x**-2 - // - // For the series of g_bold, h_bold, we use the `challenge_products` function - // For how that works, please see its own documentation - let product_cache = { - let mut challenges = Vec::with_capacity(proof.L.len()); - - let x_iter = xs.into_iter().zip(x_invs); - let lr_iter = proof.L.into_iter().zip(proof.R); - for ((x, x_inv), (L, R)) in x_iter.zip(lr_iter) { - challenges.push((x, x_inv)); - verifier.0.other.push((verifier_weight * (x * x), L.mul_by_cofactor())); - verifier.0.other.push((verifier_weight * (x_inv * x_inv), R.mul_by_cofactor())); - } - - challenge_products(&challenges) - }; - - // And now for the `if n = 1` case - let c = proof.a * proof.b; - - // The multiexp of these terms equate to the final permutation of P - // We now add terms for a * g_bold' + b * h_bold' b + c * u, with the scalars negative such - // that the terms sum to 0 for an honest prover - - // The g_bold * a term case from line 16 - #[allow(clippy::needless_range_loop)] - for i in 0 .. g_bold_slice.len() { - verifier.0.g_bold[i] -= verifier_weight * product_cache[i] * proof.a; - } - // The h_bold * b term case from line 16 - for i in 0 .. h_bold_slice.len() { - verifier.0.h_bold[i] -= - verifier_weight * product_cache[product_cache.len() - 1 - i] * proof.b * h_bold_weights[i]; - } - // The c * u term case from line 16 - verifier.0.h -= verifier_weight * c * u; - - Ok(()) - } -} diff --git a/networks/monero/ringct/bulletproofs/src/original/mod.rs b/networks/monero/ringct/bulletproofs/src/original/mod.rs deleted file mode 100644 index 1a5d034b..00000000 --- a/networks/monero/ringct/bulletproofs/src/original/mod.rs +++ /dev/null @@ -1,342 +0,0 @@ -use std_shims::{sync::LazyLock, vec::Vec}; - -use rand_core::{RngCore, CryptoRng}; - -use zeroize::Zeroize; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, Scalar, EdwardsPoint}; - -use monero_generators::{H as MONERO_H, Generators, MAX_COMMITMENTS, COMMITMENT_BITS}; -use monero_primitives::{Commitment, INV_EIGHT, keccak256_to_scalar}; -use crate::{core::multiexp, scalar_vector::ScalarVector, BulletproofsBatchVerifier}; - -pub(crate) mod inner_product; -use inner_product::*; -pub(crate) use inner_product::IpProof; - -include!(concat!(env!("OUT_DIR"), "/generators.rs")); - -#[derive(Clone, Debug)] -pub(crate) struct AggregateRangeStatement<'a> { - commitments: &'a [EdwardsPoint], -} - -#[derive(Clone, Debug)] -pub(crate) struct AggregateRangeWitness { - commitments: Vec, -} - -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct AggregateRangeProof { - pub(crate) A: EdwardsPoint, - pub(crate) S: EdwardsPoint, - pub(crate) T1: EdwardsPoint, - pub(crate) T2: EdwardsPoint, - pub(crate) tau_x: Scalar, - pub(crate) mu: Scalar, - pub(crate) t_hat: Scalar, - pub(crate) ip: IpProof, -} - -impl<'a> AggregateRangeStatement<'a> { - pub(crate) fn new(commitments: &'a [EdwardsPoint]) -> Option { - if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) { - None?; - } - Some(Self { commitments }) - } -} - -impl AggregateRangeWitness { - pub(crate) fn new(commitments: Vec) -> Option { - if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) { - None?; - } - Some(Self { commitments }) - } -} - -impl<'a> AggregateRangeStatement<'a> { - fn initial_transcript(&self) -> (Scalar, Vec) { - let V = self.commitments.iter().map(|c| c * INV_EIGHT()).collect::>(); - (keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::>()), V) - } - - fn transcript_A_S(transcript: Scalar, A: EdwardsPoint, S: EdwardsPoint) -> (Scalar, Scalar) { - let mut buf = Vec::with_capacity(96); - buf.extend(transcript.to_bytes()); - buf.extend(A.compress().to_bytes()); - buf.extend(S.compress().to_bytes()); - let y = keccak256_to_scalar(buf); - let z = keccak256_to_scalar(y.to_bytes()); - (y, z) - } - - fn transcript_T12(transcript: Scalar, T1: EdwardsPoint, T2: EdwardsPoint) -> Scalar { - let mut buf = Vec::with_capacity(128); - buf.extend(transcript.to_bytes()); - buf.extend(transcript.to_bytes()); - buf.extend(T1.compress().to_bytes()); - buf.extend(T2.compress().to_bytes()); - keccak256_to_scalar(buf) - } - - fn transcript_tau_x_mu_t_hat( - transcript: Scalar, - tau_x: Scalar, - mu: Scalar, - t_hat: Scalar, - ) -> Scalar { - let mut buf = Vec::with_capacity(128); - buf.extend(transcript.to_bytes()); - buf.extend(transcript.to_bytes()); - buf.extend(tau_x.to_bytes()); - buf.extend(mu.to_bytes()); - buf.extend(t_hat.to_bytes()); - keccak256_to_scalar(buf) - } - - #[allow(clippy::needless_pass_by_value)] - pub(crate) fn prove( - self, - rng: &mut (impl RngCore + CryptoRng), - witness: AggregateRangeWitness, - ) -> Option { - if self.commitments != witness.commitments.iter().map(Commitment::calculate).collect::>() - { - None? - }; - - let generators = &GENERATORS; - - let (mut transcript, _) = self.initial_transcript(); - - // Find out the padded amount of commitments - let mut padded_pow_of_2 = 1; - while padded_pow_of_2 < witness.commitments.len() { - padded_pow_of_2 <<= 1; - } - - let mut aL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS); - for (i, commitment) in witness.commitments.iter().enumerate() { - let mut amount = commitment.amount; - for j in 0 .. COMMITMENT_BITS { - aL[(i * COMMITMENT_BITS) + j] = Scalar::from(amount & 1); - amount >>= 1; - } - } - let aR = aL.clone() - Scalar::ONE; - - let alpha = Scalar::random(&mut *rng); - - let A = { - let mut terms = Vec::with_capacity(1 + (2 * aL.len())); - terms.push((alpha, ED25519_BASEPOINT_POINT)); - for (aL, G) in aL.0.iter().zip(&generators.G) { - terms.push((*aL, *G)); - } - for (aR, H) in aR.0.iter().zip(&generators.H) { - terms.push((*aR, *H)); - } - let res = multiexp(&terms) * INV_EIGHT(); - terms.zeroize(); - res - }; - - let mut sL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS); - let mut sR = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS); - for i in 0 .. (padded_pow_of_2 * COMMITMENT_BITS) { - sL[i] = Scalar::random(&mut *rng); - sR[i] = Scalar::random(&mut *rng); - } - let rho = Scalar::random(&mut *rng); - - let S = { - let mut terms = Vec::with_capacity(1 + (2 * sL.len())); - terms.push((rho, ED25519_BASEPOINT_POINT)); - for (sL, G) in sL.0.iter().zip(&generators.G) { - terms.push((*sL, *G)); - } - for (sR, H) in sR.0.iter().zip(&generators.H) { - terms.push((*sR, *H)); - } - let res = multiexp(&terms) * INV_EIGHT(); - terms.zeroize(); - res - }; - - let (y, z) = Self::transcript_A_S(transcript, A, S); - transcript = z; - let z = ScalarVector::powers(z, 3 + padded_pow_of_2); - - let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS); - - let l = [aL - z[1], sL]; - let y_pow_n = ScalarVector::powers(y, aR.len()); - let mut r = [((aR + z[1]) * &y_pow_n), sR * &y_pow_n]; - { - for j in 0 .. padded_pow_of_2 { - for i in 0 .. COMMITMENT_BITS { - r[0].0[(j * COMMITMENT_BITS) + i] += z[2 + j] * twos[i]; - } - } - } - let t1 = (l[0].clone().inner_product(&r[1])) + (r[0].clone().inner_product(&l[1])); - let t2 = l[1].clone().inner_product(&r[1]); - - let tau_1 = Scalar::random(&mut *rng); - let T1 = { - let mut T1_terms = [(t1, *MONERO_H), (tau_1, ED25519_BASEPOINT_POINT)]; - for term in &mut T1_terms { - term.0 *= INV_EIGHT(); - } - let T1 = multiexp(&T1_terms); - T1_terms.zeroize(); - T1 - }; - let tau_2 = Scalar::random(&mut *rng); - let T2 = { - let mut T2_terms = [(t2, *MONERO_H), (tau_2, ED25519_BASEPOINT_POINT)]; - for term in &mut T2_terms { - term.0 *= INV_EIGHT(); - } - let T2 = multiexp(&T2_terms); - T2_terms.zeroize(); - T2 - }; - - transcript = Self::transcript_T12(transcript, T1, T2); - let x = transcript; - - let [l0, l1] = l; - let l = l0 + &(l1 * x); - let [r0, r1] = r; - let r = r0 + &(r1 * x); - let t_hat = l.clone().inner_product(&r); - let mut tau_x = ((tau_2 * x) + tau_1) * x; - { - for (i, commitment) in witness.commitments.iter().enumerate() { - tau_x += z[2 + i] * commitment.mask; - } - } - let mu = alpha + (rho * x); - - let y_inv_pow_n = ScalarVector::powers(y.invert(), l.len()); - - transcript = Self::transcript_tau_x_mu_t_hat(transcript, tau_x, mu, t_hat); - let x_ip = transcript; - - let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip) - .prove( - transcript, - IpWitness::new(l, r).expect("Bulletproofs::Original created an invalid IpWitness"), - ) - .expect("Bulletproofs::Original failed to prove the inner-product"); - - let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip }; - #[cfg(debug_assertions)] - { - let mut verifier = BulletproofsBatchVerifier::default(); - debug_assert!(self.verify(rng, &mut verifier, res.clone())); - debug_assert!(verifier.verify()); - } - Some(res) - } - - #[must_use] - pub(crate) fn verify( - self, - rng: &mut (impl RngCore + CryptoRng), - verifier: &mut BulletproofsBatchVerifier, - mut proof: AggregateRangeProof, - ) -> bool { - let mut padded_pow_of_2 = 1; - while padded_pow_of_2 < self.commitments.len() { - padded_pow_of_2 <<= 1; - } - let ip_rows = padded_pow_of_2 * COMMITMENT_BITS; - - while verifier.0.g_bold.len() < ip_rows { - verifier.0.g_bold.push(Scalar::ZERO); - verifier.0.h_bold.push(Scalar::ZERO); - } - - let (mut transcript, mut commitments) = self.initial_transcript(); - for commitment in &mut commitments { - *commitment = commitment.mul_by_cofactor(); - } - - let (y, z) = Self::transcript_A_S(transcript, proof.A, proof.S); - transcript = z; - let z = ScalarVector::powers(z, 3 + padded_pow_of_2); - transcript = Self::transcript_T12(transcript, proof.T1, proof.T2); - let x = transcript; - transcript = Self::transcript_tau_x_mu_t_hat(transcript, proof.tau_x, proof.mu, proof.t_hat); - let x_ip = transcript; - - proof.A = proof.A.mul_by_cofactor(); - proof.S = proof.S.mul_by_cofactor(); - proof.T1 = proof.T1.mul_by_cofactor(); - proof.T2 = proof.T2.mul_by_cofactor(); - - let y_pow_n = ScalarVector::powers(y, ip_rows); - let y_inv_pow_n = ScalarVector::powers(y.invert(), ip_rows); - - let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS); - - // 65 - { - let weight = Scalar::random(&mut *rng); - verifier.0.h += weight * proof.t_hat; - verifier.0.g += weight * proof.tau_x; - - // Now that we've accumulated the lhs, negate the weight and accumulate the rhs - // These will now sum to 0 if equal - let weight = -weight; - - verifier.0.h += weight * (z[1] - (z[2])) * y_pow_n.sum(); - - for (i, commitment) in commitments.iter().enumerate() { - verifier.0.other.push((weight * z[2 + i], *commitment)); - } - - for i in 0 .. padded_pow_of_2 { - verifier.0.h -= weight * z[3 + i] * twos.clone().sum(); - } - verifier.0.other.push((weight * x, proof.T1)); - verifier.0.other.push((weight * (x * x), proof.T2)); - } - - let ip_weight = Scalar::random(&mut *rng); - - // 66 - verifier.0.other.push((ip_weight, proof.A)); - verifier.0.other.push((ip_weight * x, proof.S)); - // We can replace these with a g_sum, h_sum scalar in the batch verifier - // It'd trade `2 * ip_rows` scalar additions (per proof) for one scalar addition and an - // additional term in the MSM - let ip_z = ip_weight * z[1]; - for i in 0 .. ip_rows { - verifier.0.h_bold[i] += ip_z; - } - let neg_ip_z = -ip_z; - for i in 0 .. ip_rows { - verifier.0.g_bold[i] += neg_ip_z; - } - { - for j in 0 .. padded_pow_of_2 { - for i in 0 .. COMMITMENT_BITS { - let full_i = (j * COMMITMENT_BITS) + i; - verifier.0.h_bold[full_i] += ip_weight * y_inv_pow_n[full_i] * z[2 + j] * twos[i]; - } - } - } - verifier.0.h += ip_weight * x_ip * proof.t_hat; - - // 67, 68 - verifier.0.g += ip_weight * -proof.mu; - let res = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip) - .verify(verifier, ip_rows, transcript, ip_weight, proof.ip); - res.is_ok() - } -} diff --git a/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs b/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs deleted file mode 100644 index 6468cdf1..00000000 --- a/networks/monero/ringct/bulletproofs/src/plus/aggregate_range_proof.rs +++ /dev/null @@ -1,264 +0,0 @@ -use std_shims::{vec, vec::Vec}; - -use rand_core::{RngCore, CryptoRng}; -use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; - -use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint}; - -use monero_primitives::{INV_EIGHT, Commitment, keccak256_to_scalar}; - -use crate::{ - batch_verifier::BulletproofsPlusBatchVerifier, - core::{MAX_COMMITMENTS, COMMITMENT_BITS, multiexp, multiexp_vartime}, - plus::{ - ScalarVector, PointVector, GeneratorsList, BpPlusGenerators, - transcript::*, - weighted_inner_product::{WipStatement, WipWitness, WipProof}, - padded_pow_of_2, u64_decompose, - }, -}; - -// Figure 3 of the Bulletproofs+ Paper -#[derive(Clone, Debug)] -pub(crate) struct AggregateRangeStatement<'a> { - generators: BpPlusGenerators, - V: &'a [EdwardsPoint], -} - -#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)] -pub(crate) struct AggregateRangeWitness(Vec); - -impl AggregateRangeWitness { - pub(crate) fn new(commitments: Vec) -> Option { - if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) { - return None; - } - - Some(AggregateRangeWitness(commitments)) - } -} - -/// Internal structure representing a Bulletproof+, as defined by Monero.. -#[doc(hidden)] -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct AggregateRangeProof { - pub(crate) A: EdwardsPoint, - pub(crate) wip: WipProof, -} - -struct AHatComputation { - y: Scalar, - d_descending_y_plus_z: ScalarVector, - y_mn_plus_one: Scalar, - z: Scalar, - z_pow: ScalarVector, - A_hat: EdwardsPoint, -} - -impl<'a> AggregateRangeStatement<'a> { - pub(crate) fn new(V: &'a [EdwardsPoint]) -> Option { - if V.is_empty() || (V.len() > MAX_COMMITMENTS) { - return None; - } - - Some(Self { generators: BpPlusGenerators::new(), V }) - } - - fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) { - let y = keccak256_to_scalar( - [transcript.to_bytes().as_ref(), A.compress().to_bytes().as_ref()].concat(), - ); - let z = keccak256_to_scalar(y.to_bytes().as_ref()); - *transcript = z; - (y, z) - } - - fn d_j(j: usize, m: usize) -> ScalarVector { - let mut d_j = Vec::with_capacity(m * COMMITMENT_BITS); - for _ in 0 .. (j - 1) * COMMITMENT_BITS { - d_j.push(Scalar::ZERO); - } - d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS).0); - for _ in 0 .. (m - j) * COMMITMENT_BITS { - d_j.push(Scalar::ZERO); - } - ScalarVector(d_j) - } - - fn compute_A_hat( - mut V: PointVector, - generators: &BpPlusGenerators, - transcript: &mut Scalar, - mut A: EdwardsPoint, - ) -> AHatComputation { - let (y, z) = Self::transcript_A(transcript, A); - A = A.mul_by_cofactor(); - - while V.len() < padded_pow_of_2(V.len()) { - V.0.push(EdwardsPoint::identity()); - } - let mn = V.len() * COMMITMENT_BITS; - - // 2, 4, 6, 8... powers of z, of length equivalent to the amount of commitments - let mut z_pow = Vec::with_capacity(V.len()); - // z**2 - z_pow.push(z * z); - - let mut d = ScalarVector::new(mn); - for j in 1 ..= V.len() { - z_pow.push( - *z_pow.last().expect("couldn't get last z_pow despite always being non-empty") * z_pow[0], - ); - d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1])); - } - - let mut ascending_y = ScalarVector(vec![y]); - for i in 1 .. d.len() { - ascending_y.0.push(ascending_y[i - 1] * y); - } - let y_pows = ascending_y.clone().sum(); - - let mut descending_y = ascending_y.clone(); - descending_y.0.reverse(); - - let d_descending_y = d.clone() * &descending_y; - let d_descending_y_plus_z = d_descending_y + z; - - let y_mn_plus_one = descending_y[0] * y; - - let mut commitment_accum = EdwardsPoint::identity(); - for (j, commitment) in V.0.iter().enumerate() { - commitment_accum += *commitment * z_pow[j]; - } - - let neg_z = -z; - let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2); - for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() { - A_terms.push((neg_z, generators.generator(GeneratorsList::GBold, i))); - A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold, i))); - } - A_terms.push((y_mn_plus_one, commitment_accum)); - A_terms.push(( - ((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * (z * z))), - BpPlusGenerators::g(), - )); - - AHatComputation { - y, - d_descending_y_plus_z, - y_mn_plus_one, - z, - z_pow: ScalarVector(z_pow), - A_hat: A + multiexp_vartime(&A_terms), - } - } - - pub(crate) fn prove( - self, - rng: &mut R, - witness: &AggregateRangeWitness, - ) -> Option { - // Check for consistency with the witness - if self.V.len() != witness.0.len() { - return None; - } - for (commitment, witness) in self.V.iter().zip(witness.0.iter()) { - if witness.calculate() != *commitment { - return None; - } - } - - let Self { generators, V } = self; - // Monero expects all of these points to be torsion-free - // Generally, for Bulletproofs, it sends points * INV_EIGHT and then performs a torsion clear - // by multiplying by 8 - // This also restores the original value due to the preprocessing - // Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable - // clearing its cofactor without mutating the value - // For some reason, these values are transcripted * INV_EIGHT, not as transmitted - let V = V.iter().map(|V| V * INV_EIGHT()).collect::>(); - let mut transcript = initial_transcript(V.iter()); - let mut V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::>(); - - // Pad V - while V.len() < padded_pow_of_2(V.len()) { - V.push(EdwardsPoint::identity()); - } - - let generators = generators.reduce(V.len() * COMMITMENT_BITS); - - let mut d_js = Vec::with_capacity(V.len()); - let mut a_l = ScalarVector(Vec::with_capacity(V.len() * COMMITMENT_BITS)); - for j in 1 ..= V.len() { - d_js.push(Self::d_j(j, V.len())); - #[allow(clippy::map_unwrap_or)] - a_l.0.append( - &mut u64_decompose( - *witness.0.get(j - 1).map(|commitment| &commitment.amount).unwrap_or(&0), - ) - .0, - ); - } - - let a_r = a_l.clone() - Scalar::ONE; - - let alpha = Scalar::random(&mut *rng); - - let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1); - for (i, a_l) in a_l.0.iter().enumerate() { - A_terms.push((*a_l, generators.generator(GeneratorsList::GBold, i))); - } - for (i, a_r) in a_r.0.iter().enumerate() { - A_terms.push((*a_r, generators.generator(GeneratorsList::HBold, i))); - } - A_terms.push((alpha, BpPlusGenerators::h())); - let mut A = multiexp(&A_terms); - A_terms.zeroize(); - - // Multiply by INV_EIGHT per earlier commentary - A *= INV_EIGHT(); - - let AHatComputation { y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat } = - Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A); - - let a_l = a_l - z; - let a_r = a_r + &d_descending_y_plus_z; - let mut alpha = alpha; - for j in 1 ..= witness.0.len() { - alpha += z_pow[j - 1] * witness.0[j - 1].mask * y_mn_plus_one; - } - - Some(AggregateRangeProof { - A, - wip: WipStatement::new(generators, A_hat, y) - .prove( - rng, - transcript, - &Zeroizing::new( - WipWitness::new(a_l, a_r, alpha) - .expect("Bulletproofs::Plus created an invalid WipWitness"), - ), - ) - .expect("Bulletproof::Plus failed to prove the weighted inner-product"), - }) - } - - pub(crate) fn verify( - self, - rng: &mut R, - verifier: &mut BulletproofsPlusBatchVerifier, - proof: AggregateRangeProof, - ) -> bool { - let Self { generators, V } = self; - - let V = V.iter().map(|V| V * INV_EIGHT()).collect::>(); - let mut transcript = initial_transcript(V.iter()); - let V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::>(); - - let generators = generators.reduce(V.len() * COMMITMENT_BITS); - - let AHatComputation { y, A_hat, .. } = - Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A); - WipStatement::new(generators, A_hat, y).verify(rng, verifier, transcript, proof.wip) - } -} diff --git a/networks/monero/ringct/bulletproofs/src/plus/mod.rs b/networks/monero/ringct/bulletproofs/src/plus/mod.rs deleted file mode 100644 index 465b878a..00000000 --- a/networks/monero/ringct/bulletproofs/src/plus/mod.rs +++ /dev/null @@ -1,84 +0,0 @@ -#![allow(non_snake_case)] - -use std_shims::sync::LazyLock; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, scalar::Scalar, edwards::EdwardsPoint}; - -use monero_generators::{H, Generators}; - -pub(crate) use crate::{scalar_vector::ScalarVector, point_vector::PointVector}; - -pub(crate) mod transcript; -pub(crate) mod weighted_inner_product; -pub(crate) use weighted_inner_product::*; -pub(crate) mod aggregate_range_proof; -pub(crate) use aggregate_range_proof::*; - -pub(crate) fn padded_pow_of_2(i: usize) -> usize { - let mut next_pow_of_2 = 1; - while next_pow_of_2 < i { - next_pow_of_2 <<= 1; - } - next_pow_of_2 -} - -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub(crate) enum GeneratorsList { - GBold, - HBold, -} - -#[derive(Clone, Debug)] -pub(crate) struct BpPlusGenerators { - g_bold: &'static [EdwardsPoint], - h_bold: &'static [EdwardsPoint], -} - -include!(concat!(env!("OUT_DIR"), "/generators_plus.rs")); - -impl BpPlusGenerators { - #[allow(clippy::new_without_default)] - pub(crate) fn new() -> Self { - let gens = &GENERATORS; - BpPlusGenerators { g_bold: &gens.G, h_bold: &gens.H } - } - - pub(crate) fn len(&self) -> usize { - self.g_bold.len() - } - - pub(crate) fn g() -> EdwardsPoint { - *H - } - - pub(crate) fn h() -> EdwardsPoint { - ED25519_BASEPOINT_POINT - } - - pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint { - match list { - GeneratorsList::GBold => self.g_bold[i], - GeneratorsList::HBold => self.h_bold[i], - } - } - - pub(crate) fn reduce(&self, generators: usize) -> Self { - // Round to the nearest power of 2 - let generators = padded_pow_of_2(generators); - assert!( - generators <= self.g_bold.len(), - "instantiated with less generators than application required" - ); - - BpPlusGenerators { g_bold: &self.g_bold[.. generators], h_bold: &self.h_bold[.. generators] } - } -} - -// Returns the little-endian decomposition. -fn u64_decompose(value: u64) -> ScalarVector { - let mut bits = ScalarVector::new(64); - for bit in 0 .. 64 { - bits[bit] = Scalar::from((value >> bit) & 1); - } - bits -} diff --git a/networks/monero/ringct/bulletproofs/src/plus/transcript.rs b/networks/monero/ringct/bulletproofs/src/plus/transcript.rs deleted file mode 100644 index e42f1ade..00000000 --- a/networks/monero/ringct/bulletproofs/src/plus/transcript.rs +++ /dev/null @@ -1,17 +0,0 @@ -use std_shims::{sync::LazyLock, vec::Vec}; - -use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint}; - -use monero_generators::hash_to_point; -use monero_primitives::{keccak256, keccak256_to_scalar}; - -// Monero starts BP+ transcripts with the following constant. -// Why this uses a hash_to_point is completely unknown. -pub(crate) static TRANSCRIPT: LazyLock<[u8; 32]> = - LazyLock::new(|| hash_to_point(keccak256(b"bulletproof_plus_transcript")).compress().to_bytes()); - -pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar { - let commitments_hash = - keccak256_to_scalar(commitments.flat_map(|V| V.compress().to_bytes()).collect::>()); - keccak256_to_scalar([TRANSCRIPT.as_ref(), &commitments_hash.to_bytes()].concat()) -} diff --git a/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs b/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs deleted file mode 100644 index 5b3c25c2..00000000 --- a/networks/monero/ringct/bulletproofs/src/plus/weighted_inner_product.rs +++ /dev/null @@ -1,409 +0,0 @@ -use std_shims::{vec, vec::Vec}; - -use rand_core::{RngCore, CryptoRng}; -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint}; - -use monero_primitives::{INV_EIGHT, keccak256_to_scalar}; -use crate::{ - core::{multiexp, multiexp_vartime, challenge_products}, - batch_verifier::BulletproofsPlusBatchVerifier, - plus::{ScalarVector, PointVector, GeneratorsList, BpPlusGenerators, padded_pow_of_2}, -}; - -// Figure 1 of the Bulletproofs+ paper -#[derive(Clone, Debug)] -pub(crate) struct WipStatement { - generators: BpPlusGenerators, - P: EdwardsPoint, - y: ScalarVector, -} - -impl Zeroize for WipStatement { - fn zeroize(&mut self) { - self.P.zeroize(); - self.y.zeroize(); - } -} - -#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)] -pub(crate) struct WipWitness { - a: ScalarVector, - b: ScalarVector, - alpha: Scalar, -} - -impl WipWitness { - pub(crate) fn new(mut a: ScalarVector, mut b: ScalarVector, alpha: Scalar) -> Option { - if a.0.is_empty() || (a.len() != b.len()) { - return None; - } - - // Pad to the nearest power of 2 - let missing = padded_pow_of_2(a.len()) - a.len(); - a.0.reserve(missing); - b.0.reserve(missing); - for _ in 0 .. missing { - a.0.push(Scalar::ZERO); - b.0.push(Scalar::ZERO); - } - - Some(Self { a, b, alpha }) - } -} - -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub(crate) struct WipProof { - pub(crate) L: Vec, - pub(crate) R: Vec, - pub(crate) A: EdwardsPoint, - pub(crate) B: EdwardsPoint, - pub(crate) r_answer: Scalar, - pub(crate) s_answer: Scalar, - pub(crate) delta_answer: Scalar, -} - -impl WipStatement { - pub(crate) fn new(generators: BpPlusGenerators, P: EdwardsPoint, y: Scalar) -> Self { - debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len())); - - // y ** n - let mut y_vec = ScalarVector::new(generators.len()); - y_vec[0] = y; - for i in 1 .. y_vec.len() { - y_vec[i] = y_vec[i - 1] * y; - } - - Self { generators, P, y: y_vec } - } - - fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar { - let e = keccak256_to_scalar( - [ - transcript.to_bytes().as_ref(), - L.compress().to_bytes().as_ref(), - R.compress().to_bytes().as_ref(), - ] - .concat(), - ); - *transcript = e; - e - } - - fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar { - let e = keccak256_to_scalar( - [ - transcript.to_bytes().as_ref(), - A.compress().to_bytes().as_ref(), - B.compress().to_bytes().as_ref(), - ] - .concat(), - ); - *transcript = e; - e - } - - // Prover's variant of the shared code block to calculate G/H/P when n > 1 - // Returns each permutation of G/H since the prover needs to do operation on each permutation - // P is dropped as it's unused in the prover's path - #[allow(clippy::too_many_arguments)] - fn next_G_H( - transcript: &mut Scalar, - mut g_bold1: PointVector, - mut g_bold2: PointVector, - mut h_bold1: PointVector, - mut h_bold2: PointVector, - L: EdwardsPoint, - R: EdwardsPoint, - y_inv_n_hat: Scalar, - ) -> (Scalar, Scalar, Scalar, Scalar, PointVector, PointVector) { - debug_assert_eq!(g_bold1.len(), g_bold2.len()); - debug_assert_eq!(h_bold1.len(), h_bold2.len()); - debug_assert_eq!(g_bold1.len(), h_bold1.len()); - - let e = Self::transcript_L_R(transcript, L, R); - let inv_e = e.invert(); - - // This vartime is safe as all of these arguments are public - let mut new_g_bold = Vec::with_capacity(g_bold1.len()); - let e_y_inv = e * y_inv_n_hat; - for g_bold in g_bold1.0.drain(..).zip(g_bold2.0.drain(..)) { - new_g_bold.push(multiexp_vartime(&[(inv_e, g_bold.0), (e_y_inv, g_bold.1)])); - } - - let mut new_h_bold = Vec::with_capacity(h_bold1.len()); - for h_bold in h_bold1.0.drain(..).zip(h_bold2.0.drain(..)) { - new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)])); - } - - let e_square = e * e; - let inv_e_square = inv_e * inv_e; - - (e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold)) - } - - pub(crate) fn prove( - self, - rng: &mut R, - mut transcript: Scalar, - witness: &WipWitness, - ) -> Option { - let WipStatement { generators, P, mut y } = self; - #[cfg(not(debug_assertions))] - let _ = P; - - if generators.len() != witness.a.len() { - return None; - } - let (g, h) = (BpPlusGenerators::g(), BpPlusGenerators::h()); - let mut g_bold = vec![]; - let mut h_bold = vec![]; - for i in 0 .. generators.len() { - g_bold.push(generators.generator(GeneratorsList::GBold, i)); - h_bold.push(generators.generator(GeneratorsList::HBold, i)); - } - let mut g_bold = PointVector(g_bold); - let mut h_bold = PointVector(h_bold); - - let mut y_inv = { - let mut i = 1; - let mut to_invert = vec![]; - while i < g_bold.len() { - to_invert.push(y[i - 1]); - i *= 2; - } - Scalar::batch_invert(&mut to_invert); - to_invert - }; - - // Check P has the expected relationship - #[cfg(debug_assertions)] - { - let mut P_terms = witness - .a - .0 - .iter() - .copied() - .zip(g_bold.0.iter().copied()) - .chain(witness.b.0.iter().copied().zip(h_bold.0.iter().copied())) - .collect::>(); - P_terms.push((witness.a.clone().weighted_inner_product(&witness.b, &y), g)); - P_terms.push((witness.alpha, h)); - debug_assert_eq!(multiexp(&P_terms), P); - P_terms.zeroize(); - } - - let mut a = witness.a.clone(); - let mut b = witness.b.clone(); - let mut alpha = witness.alpha; - - // From here on, g_bold.len() is used as n - debug_assert_eq!(g_bold.len(), a.len()); - - let mut L_vec = vec![]; - let mut R_vec = vec![]; - - // else n > 1 case from figure 1 - while g_bold.len() > 1 { - let (a1, a2) = a.clone().split(); - let (b1, b2) = b.clone().split(); - let (g_bold1, g_bold2) = g_bold.split(); - let (h_bold1, h_bold2) = h_bold.split(); - - let n_hat = g_bold1.len(); - debug_assert_eq!(a1.len(), n_hat); - debug_assert_eq!(a2.len(), n_hat); - debug_assert_eq!(b1.len(), n_hat); - debug_assert_eq!(b2.len(), n_hat); - debug_assert_eq!(g_bold1.len(), n_hat); - debug_assert_eq!(g_bold2.len(), n_hat); - debug_assert_eq!(h_bold1.len(), n_hat); - debug_assert_eq!(h_bold2.len(), n_hat); - - let y_n_hat = y[n_hat - 1]; - y.0.truncate(n_hat); - - let d_l = Scalar::random(&mut *rng); - let d_r = Scalar::random(&mut *rng); - - let c_l = a1.clone().weighted_inner_product(&b2, &y); - let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y); - - let y_inv_n_hat = y_inv - .pop() - .expect("couldn't pop y_inv despite y_inv being of same length as times iterated"); - - let mut L_terms = (a1.clone() * y_inv_n_hat) - .0 - .drain(..) - .zip(g_bold2.0.iter().copied()) - .chain(b2.0.iter().copied().zip(h_bold1.0.iter().copied())) - .collect::>(); - L_terms.push((c_l, g)); - L_terms.push((d_l, h)); - let L = multiexp(&L_terms) * INV_EIGHT(); - L_vec.push(L); - L_terms.zeroize(); - - let mut R_terms = (a2.clone() * y_n_hat) - .0 - .drain(..) - .zip(g_bold1.0.iter().copied()) - .chain(b1.0.iter().copied().zip(h_bold2.0.iter().copied())) - .collect::>(); - R_terms.push((c_r, g)); - R_terms.push((d_r, h)); - let R = multiexp(&R_terms) * INV_EIGHT(); - R_vec.push(R); - R_terms.zeroize(); - - let (e, inv_e, e_square, inv_e_square); - (e, inv_e, e_square, inv_e_square, g_bold, h_bold) = - Self::next_G_H(&mut transcript, g_bold1, g_bold2, h_bold1, h_bold2, L, R, y_inv_n_hat); - - a = (a1 * e) + &(a2 * (y_n_hat * inv_e)); - b = (b1 * inv_e) + &(b2 * e); - alpha += (d_l * e_square) + (d_r * inv_e_square); - - debug_assert_eq!(g_bold.len(), a.len()); - debug_assert_eq!(g_bold.len(), h_bold.len()); - debug_assert_eq!(g_bold.len(), b.len()); - } - - // n == 1 case from figure 1 - debug_assert_eq!(g_bold.len(), 1); - debug_assert_eq!(h_bold.len(), 1); - - debug_assert_eq!(a.len(), 1); - debug_assert_eq!(b.len(), 1); - - let r = Scalar::random(&mut *rng); - let s = Scalar::random(&mut *rng); - let delta = Scalar::random(&mut *rng); - let eta = Scalar::random(&mut *rng); - - let ry = r * y[0]; - - let mut A_terms = - vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)]; - let A = multiexp(&A_terms) * INV_EIGHT(); - A_terms.zeroize(); - - let mut B_terms = vec![(ry * s, g), (eta, h)]; - let B = multiexp(&B_terms) * INV_EIGHT(); - B_terms.zeroize(); - - let e = Self::transcript_A_B(&mut transcript, A, B); - - let r_answer = r + (a[0] * e); - let s_answer = s + (b[0] * e); - let delta_answer = eta + (delta * e) + (alpha * (e * e)); - - Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer }) - } - - pub(crate) fn verify( - self, - rng: &mut R, - verifier: &mut BulletproofsPlusBatchVerifier, - mut transcript: Scalar, - mut proof: WipProof, - ) -> bool { - let verifier_weight = Scalar::random(rng); - - let WipStatement { generators, P, y } = self; - - // Verify the L/R lengths - { - let mut lr_len = 0; - while (1 << lr_len) < generators.len() { - lr_len += 1; - } - if (proof.L.len() != lr_len) || - (proof.R.len() != lr_len) || - (generators.len() != (1 << lr_len)) - { - return false; - } - } - - let inv_y = { - let inv_y = y[0].invert(); - let mut res = Vec::with_capacity(y.len()); - res.push(inv_y); - while res.len() < y.len() { - res.push( - inv_y * res.last().expect("couldn't get last inv_y despite inv_y always being non-empty"), - ); - } - res - }; - - let mut e_is = Vec::with_capacity(proof.L.len()); - for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) { - e_is.push(Self::transcript_L_R(&mut transcript, *L, *R)); - *L = L.mul_by_cofactor(); - *R = R.mul_by_cofactor(); - } - - let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B); - proof.A = proof.A.mul_by_cofactor(); - proof.B = proof.B.mul_by_cofactor(); - let neg_e_square = verifier_weight * -(e * e); - - verifier.0.other.push((neg_e_square, P)); - - let mut challenges = Vec::with_capacity(proof.L.len()); - let product_cache = { - let mut inv_e_is = e_is.clone(); - Scalar::batch_invert(&mut inv_e_is); - - debug_assert_eq!(e_is.len(), inv_e_is.len()); - debug_assert_eq!(e_is.len(), proof.L.len()); - debug_assert_eq!(e_is.len(), proof.R.len()); - for ((e_i, inv_e_i), (L, R)) in - e_is.drain(..).zip(inv_e_is.drain(..)).zip(proof.L.iter().zip(proof.R.iter())) - { - debug_assert_eq!(e_i.invert(), inv_e_i); - - challenges.push((e_i, inv_e_i)); - - let e_i_square = e_i * e_i; - let inv_e_i_square = inv_e_i * inv_e_i; - verifier.0.other.push((neg_e_square * e_i_square, *L)); - verifier.0.other.push((neg_e_square * inv_e_i_square, *R)); - } - - challenge_products(&challenges) - }; - - while verifier.0.g_bold.len() < generators.len() { - verifier.0.g_bold.push(Scalar::ZERO); - } - while verifier.0.h_bold.len() < generators.len() { - verifier.0.h_bold.push(Scalar::ZERO); - } - - let re = proof.r_answer * e; - for i in 0 .. generators.len() { - let mut scalar = product_cache[i] * re; - if i > 0 { - scalar *= inv_y[i - 1]; - } - verifier.0.g_bold[i] += verifier_weight * scalar; - } - - let se = proof.s_answer * e; - for i in 0 .. generators.len() { - verifier.0.h_bold[i] += verifier_weight * (se * product_cache[product_cache.len() - 1 - i]); - } - - verifier.0.other.push((verifier_weight * -e, proof.A)); - verifier.0.g += verifier_weight * (proof.r_answer * y[0] * proof.s_answer); - verifier.0.h += verifier_weight * proof.delta_answer; - verifier.0.other.push((-verifier_weight, proof.B)); - - true - } -} diff --git a/networks/monero/ringct/bulletproofs/src/point_vector.rs b/networks/monero/ringct/bulletproofs/src/point_vector.rs deleted file mode 100644 index c2635038..00000000 --- a/networks/monero/ringct/bulletproofs/src/point_vector.rs +++ /dev/null @@ -1,59 +0,0 @@ -use core::ops::{Index, IndexMut}; -use std_shims::vec::Vec; - -use zeroize::Zeroize; - -use curve25519_dalek::edwards::EdwardsPoint; - -use crate::scalar_vector::ScalarVector; - -#[cfg(test)] -use crate::core::multiexp; - -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub(crate) struct PointVector(pub(crate) Vec); - -impl Index for PointVector { - type Output = EdwardsPoint; - fn index(&self, index: usize) -> &EdwardsPoint { - &self.0[index] - } -} - -impl IndexMut for PointVector { - fn index_mut(&mut self, index: usize) -> &mut EdwardsPoint { - &mut self.0[index] - } -} - -impl PointVector { - pub(crate) fn mul_vec(&self, vector: &ScalarVector) -> Self { - assert_eq!(self.len(), vector.len()); - let mut res = self.clone(); - for (i, val) in res.0.iter_mut().enumerate() { - *val *= vector.0[i]; - } - res - } - - #[cfg(test)] - pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint { - debug_assert_eq!(self.len(), vector.len()); - let mut res = Vec::with_capacity(self.len()); - for (point, scalar) in self.0.iter().copied().zip(vector.0.iter().copied()) { - res.push((scalar, point)); - } - multiexp(&res) - } - - pub(crate) fn len(&self) -> usize { - self.0.len() - } - - pub(crate) fn split(mut self) -> (Self, Self) { - debug_assert!(self.len() > 1); - let r = self.0.split_off(self.0.len() / 2); - debug_assert_eq!(self.len(), r.len()); - (self, PointVector(r)) - } -} diff --git a/networks/monero/ringct/bulletproofs/src/scalar_vector.rs b/networks/monero/ringct/bulletproofs/src/scalar_vector.rs deleted file mode 100644 index ae723a42..00000000 --- a/networks/monero/ringct/bulletproofs/src/scalar_vector.rs +++ /dev/null @@ -1,138 +0,0 @@ -use core::{ - borrow::Borrow, - ops::{Index, IndexMut, Add, Sub, Mul}, -}; -use std_shims::{vec, vec::Vec}; - -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint}; - -use crate::core::multiexp; - -#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)] -pub(crate) struct ScalarVector(pub(crate) Vec); - -impl Index for ScalarVector { - type Output = Scalar; - fn index(&self, index: usize) -> &Scalar { - &self.0[index] - } -} -impl IndexMut for ScalarVector { - fn index_mut(&mut self, index: usize) -> &mut Scalar { - &mut self.0[index] - } -} - -impl> Add for ScalarVector { - type Output = ScalarVector; - fn add(mut self, scalar: S) -> ScalarVector { - for s in &mut self.0 { - *s += scalar.borrow(); - } - self - } -} -impl> Sub for ScalarVector { - type Output = ScalarVector; - fn sub(mut self, scalar: S) -> ScalarVector { - for s in &mut self.0 { - *s -= scalar.borrow(); - } - self - } -} -impl> Mul for ScalarVector { - type Output = ScalarVector; - fn mul(mut self, scalar: S) -> ScalarVector { - for s in &mut self.0 { - *s *= scalar.borrow(); - } - self - } -} - -impl Add<&ScalarVector> for ScalarVector { - type Output = ScalarVector; - fn add(mut self, other: &ScalarVector) -> ScalarVector { - debug_assert_eq!(self.len(), other.len()); - for (s, o) in self.0.iter_mut().zip(other.0.iter()) { - *s += o; - } - self - } -} -impl Sub<&ScalarVector> for ScalarVector { - type Output = ScalarVector; - fn sub(mut self, other: &ScalarVector) -> ScalarVector { - debug_assert_eq!(self.len(), other.len()); - for (s, o) in self.0.iter_mut().zip(other.0.iter()) { - *s -= o; - } - self - } -} -impl Mul<&ScalarVector> for ScalarVector { - type Output = ScalarVector; - fn mul(mut self, other: &ScalarVector) -> ScalarVector { - debug_assert_eq!(self.len(), other.len()); - for (s, o) in self.0.iter_mut().zip(other.0.iter()) { - *s *= o; - } - self - } -} - -impl Mul<&[EdwardsPoint]> for &ScalarVector { - type Output = EdwardsPoint; - fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint { - debug_assert_eq!(self.len(), b.len()); - let mut multiexp_args = self.0.iter().copied().zip(b.iter().copied()).collect::>(); - let res = multiexp(&multiexp_args); - multiexp_args.zeroize(); - res - } -} - -impl ScalarVector { - pub(crate) fn new(len: usize) -> Self { - ScalarVector(vec![Scalar::ZERO; len]) - } - - pub(crate) fn powers(x: Scalar, len: usize) -> Self { - debug_assert!(len != 0); - - let mut res = Vec::with_capacity(len); - res.push(Scalar::ONE); - res.push(x); - for i in 2 .. len { - res.push(res[i - 1] * x); - } - res.truncate(len); - ScalarVector(res) - } - - pub(crate) fn len(&self) -> usize { - self.0.len() - } - - pub(crate) fn sum(mut self) -> Scalar { - self.0.drain(..).sum() - } - - pub(crate) fn inner_product(self, vector: &Self) -> Scalar { - (self * vector).sum() - } - - pub(crate) fn weighted_inner_product(self, vector: &Self, y: &Self) -> Scalar { - (self * vector * y).sum() - } - - pub(crate) fn split(mut self) -> (Self, Self) { - debug_assert!(self.len() > 1); - let r = self.0.split_off(self.0.len() / 2); - debug_assert_eq!(self.len(), r.len()); - (self, ScalarVector(r)) - } -} diff --git a/networks/monero/ringct/bulletproofs/src/tests/mod.rs b/networks/monero/ringct/bulletproofs/src/tests/mod.rs deleted file mode 100644 index fa4c8939..00000000 --- a/networks/monero/ringct/bulletproofs/src/tests/mod.rs +++ /dev/null @@ -1,56 +0,0 @@ -use rand_core::{RngCore, OsRng}; - -use curve25519_dalek::scalar::Scalar; - -use monero_primitives::Commitment; -use crate::{batch_verifier::BatchVerifier, Bulletproof, BulletproofError}; - -mod original; -mod plus; - -macro_rules! bulletproofs_tests { - ($name: ident, $max: ident, $plus: literal) => { - #[test] - fn $name() { - // Create Bulletproofs for all possible output quantities - let mut verifier = BatchVerifier::new(); - for i in 1 ..= 16 { - let commitments = (1 ..= i) - .map(|_| Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64())) - .collect::>(); - - let bp = if $plus { - Bulletproof::prove_plus(&mut OsRng, commitments.clone()).unwrap() - } else { - Bulletproof::prove(&mut OsRng, commitments.clone()).unwrap() - }; - - let commitments = commitments.iter().map(Commitment::calculate).collect::>(); - assert!(bp.verify(&mut OsRng, &commitments)); - assert!(bp.batch_verify(&mut OsRng, &mut verifier, &commitments)); - } - assert!(verifier.verify()); - } - - #[test] - fn $max() { - // Check Bulletproofs errors if we try to prove for too many outputs - let mut commitments = vec![]; - for _ in 0 .. 17 { - commitments.push(Commitment::new(Scalar::ZERO, 0)); - } - assert_eq!( - (if $plus { - Bulletproof::prove_plus(&mut OsRng, commitments) - } else { - Bulletproof::prove(&mut OsRng, commitments) - }) - .unwrap_err(), - BulletproofError::TooManyCommitments, - ); - } - }; -} - -bulletproofs_tests!(bulletproofs, bulletproofs_max, false); -bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true); diff --git a/networks/monero/ringct/bulletproofs/src/tests/original/inner_product.rs b/networks/monero/ringct/bulletproofs/src/tests/original/inner_product.rs deleted file mode 100644 index ce026e65..00000000 --- a/networks/monero/ringct/bulletproofs/src/tests/original/inner_product.rs +++ /dev/null @@ -1,75 +0,0 @@ -// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * b)) - -use rand_core::OsRng; - -use curve25519_dalek::Scalar; - -use monero_generators::H; - -use crate::{ - scalar_vector::ScalarVector, - point_vector::PointVector, - original::{ - GENERATORS, - inner_product::{IpStatement, IpWitness}, - }, - BulletproofsBatchVerifier, -}; - -#[test] -fn test_zero_inner_product() { - let statement = - IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; 1]), Scalar::ONE); - let witness = IpWitness::new(ScalarVector::new(1), ScalarVector::new(1)).unwrap(); - - let transcript = Scalar::random(&mut OsRng); - let proof = statement.clone().prove(transcript, witness).unwrap(); - - let mut verifier = BulletproofsBatchVerifier::default(); - verifier.0.g_bold = vec![Scalar::ZERO; 1]; - verifier.0.h_bold = vec![Scalar::ZERO; 1]; - statement.verify(&mut verifier, 1, transcript, Scalar::random(&mut OsRng), proof).unwrap(); - assert!(verifier.verify()); -} - -#[test] -fn test_inner_product() { - // P = sum(g_bold * a, h_bold * b, g * u * ) - let generators = &GENERATORS; - let mut verifier = BulletproofsBatchVerifier::default(); - verifier.0.g_bold = vec![Scalar::ZERO; 32]; - verifier.0.h_bold = vec![Scalar::ZERO; 32]; - for i in [1, 2, 4, 8, 16, 32] { - let g = *H; - let mut g_bold = vec![]; - let mut h_bold = vec![]; - for i in 0 .. i { - g_bold.push(generators.G[i]); - h_bold.push(generators.H[i]); - } - let g_bold = PointVector(g_bold); - let h_bold = PointVector(h_bold); - - let mut a = ScalarVector::new(i); - let mut b = ScalarVector::new(i); - - for i in 0 .. i { - a[i] = Scalar::random(&mut OsRng); - b[i] = Scalar::random(&mut OsRng); - } - - let P = g_bold.multiexp(&a) + h_bold.multiexp(&b) + (g * a.clone().inner_product(&b)); - - let statement = - IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; i]), Scalar::ONE); - let witness = IpWitness::new(a, b).unwrap(); - - let transcript = Scalar::random(&mut OsRng); - let proof = statement.clone().prove(transcript, witness).unwrap(); - - let weight = Scalar::random(&mut OsRng); - verifier.0.other.push((weight, P)); - statement.verify(&mut verifier, i, transcript, weight, proof).unwrap(); - } - assert!(verifier.verify()); -} diff --git a/networks/monero/ringct/bulletproofs/src/tests/original/mod.rs b/networks/monero/ringct/bulletproofs/src/tests/original/mod.rs deleted file mode 100644 index c0010b4f..00000000 --- a/networks/monero/ringct/bulletproofs/src/tests/original/mod.rs +++ /dev/null @@ -1,62 +0,0 @@ -use hex_literal::hex; -use rand_core::OsRng; - -use curve25519_dalek::scalar::Scalar; - -use monero_io::decompress_point; - -use crate::{ - original::{IpProof, AggregateRangeProof as OriginalProof}, - Bulletproof, -}; - -mod inner_product; - -#[test] -fn bulletproofs_vector() { - let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap(); - let point = |point| decompress_point(point).unwrap(); - - // Generated from Monero - assert!(Bulletproof::Original(OriginalProof { - A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")), - S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")), - T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")), - T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")), - tau_x: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")), - mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")), - ip: IpProof { - L: vec![ - point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")), - point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")), - point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")), - point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")), - point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")), - point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")), - point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")), - ], - R: vec![ - point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")), - point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")), - point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")), - point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")), - point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")), - point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")), - point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")), - ], - a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")), - b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")), - }, - t_hat: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603")) - }) - .verify( - &mut OsRng, - &[ - // For some reason, these vectors are * INV_EIGHT - point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f")) - .mul_by_cofactor(), - point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f")) - .mul_by_cofactor(), - ] - )); -} diff --git a/networks/monero/ringct/bulletproofs/src/tests/plus/aggregate_range_proof.rs b/networks/monero/ringct/bulletproofs/src/tests/plus/aggregate_range_proof.rs deleted file mode 100644 index ba5d0543..00000000 --- a/networks/monero/ringct/bulletproofs/src/tests/plus/aggregate_range_proof.rs +++ /dev/null @@ -1,28 +0,0 @@ -use rand_core::{RngCore, OsRng}; - -use curve25519_dalek::Scalar; - -use monero_primitives::Commitment; - -use crate::{ - batch_verifier::BulletproofsPlusBatchVerifier, - plus::aggregate_range_proof::{AggregateRangeStatement, AggregateRangeWitness}, -}; - -#[test] -fn test_aggregate_range_proof() { - let mut verifier = BulletproofsPlusBatchVerifier::default(); - for m in 1 ..= 16 { - let mut commitments = vec![]; - for _ in 0 .. m { - commitments.push(Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64())); - } - let commitment_points = commitments.iter().map(Commitment::calculate).collect::>(); - let statement = AggregateRangeStatement::new(&commitment_points).unwrap(); - let witness = AggregateRangeWitness::new(commitments).unwrap(); - - let proof = statement.clone().prove(&mut OsRng, &witness).unwrap(); - statement.verify(&mut OsRng, &mut verifier, proof); - } - assert!(verifier.verify()); -} diff --git a/networks/monero/ringct/bulletproofs/src/tests/plus/mod.rs b/networks/monero/ringct/bulletproofs/src/tests/plus/mod.rs deleted file mode 100644 index bd48add5..00000000 --- a/networks/monero/ringct/bulletproofs/src/tests/plus/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -#[cfg(test)] -mod weighted_inner_product; -#[cfg(test)] -mod aggregate_range_proof; diff --git a/networks/monero/ringct/bulletproofs/src/tests/plus/weighted_inner_product.rs b/networks/monero/ringct/bulletproofs/src/tests/plus/weighted_inner_product.rs deleted file mode 100644 index eaa00cd3..00000000 --- a/networks/monero/ringct/bulletproofs/src/tests/plus/weighted_inner_product.rs +++ /dev/null @@ -1,82 +0,0 @@ -// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha) - -use rand_core::OsRng; - -use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint}; - -use crate::{ - batch_verifier::BulletproofsPlusBatchVerifier, - plus::{ - ScalarVector, PointVector, GeneratorsList, BpPlusGenerators, - weighted_inner_product::{WipStatement, WipWitness}, - }, -}; - -#[test] -fn test_zero_weighted_inner_product() { - #[allow(non_snake_case)] - let P = EdwardsPoint::identity(); - let y = Scalar::random(&mut OsRng); - - let generators = BpPlusGenerators::new().reduce(1); - let statement = WipStatement::new(generators, P, y); - let witness = WipWitness::new(ScalarVector::new(1), ScalarVector::new(1), Scalar::ZERO).unwrap(); - - let transcript = Scalar::random(&mut OsRng); - let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap(); - - let mut verifier = BulletproofsPlusBatchVerifier::default(); - statement.verify(&mut OsRng, &mut verifier, transcript, proof); - assert!(verifier.verify()); -} - -#[test] -fn test_weighted_inner_product() { - // P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha) - let mut verifier = BulletproofsPlusBatchVerifier::default(); - let generators = BpPlusGenerators::new(); - for i in [1, 2, 4, 8, 16, 32] { - let generators = generators.reduce(i); - let g = BpPlusGenerators::g(); - let h = BpPlusGenerators::h(); - assert_eq!(generators.len(), i); - let mut g_bold = vec![]; - let mut h_bold = vec![]; - for i in 0 .. i { - g_bold.push(generators.generator(GeneratorsList::GBold, i)); - h_bold.push(generators.generator(GeneratorsList::HBold, i)); - } - let g_bold = PointVector(g_bold); - let h_bold = PointVector(h_bold); - - let mut a = ScalarVector::new(i); - let mut b = ScalarVector::new(i); - let alpha = Scalar::random(&mut OsRng); - - let y = Scalar::random(&mut OsRng); - let mut y_vec = ScalarVector::new(g_bold.len()); - y_vec[0] = y; - for i in 1 .. y_vec.len() { - y_vec[i] = y_vec[i - 1] * y; - } - - for i in 0 .. i { - a[i] = Scalar::random(&mut OsRng); - b[i] = Scalar::random(&mut OsRng); - } - - #[allow(non_snake_case)] - let P = g_bold.multiexp(&a) + - h_bold.multiexp(&b) + - (g * a.clone().weighted_inner_product(&b, &y_vec)) + - (h * alpha); - - let statement = WipStatement::new(generators, P, y); - let witness = WipWitness::new(a, b, alpha).unwrap(); - - let transcript = Scalar::random(&mut OsRng); - let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap(); - statement.verify(&mut OsRng, &mut verifier, transcript, proof); - } - assert!(verifier.verify()); -} diff --git a/networks/monero/ringct/clsag/Cargo.toml b/networks/monero/ringct/clsag/Cargo.toml deleted file mode 100644 index 801717c7..00000000 --- a/networks/monero/ringct/clsag/Cargo.toml +++ /dev/null @@ -1,65 +0,0 @@ -[package] -name = "monero-clsag" -version = "0.1.0" -description = "The CLSAG linkable ring signature, as defined by the Monero protocol" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/ringct/clsag" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false } - -thiserror = { version = "1", default-features = false, optional = true } - -rand_core = { version = "0.6", default-features = false } -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } -subtle = { version = "^2.4", default-features = false } - -# Cryptographic dependencies -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -# Multisig dependencies -rand_chacha = { version = "0.3", default-features = false, optional = true } -transcript = { package = "flexible-transcript", path = "../../../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true } -group = { version = "0.13", default-features = false, optional = true } -dalek-ff-group = { path = "../../../../crypto/dalek-ff-group", version = "0.4", default-features = false, optional = true } -frost = { package = "modular-frost", path = "../../../../crypto/frost", default-features = false, features = ["ed25519"], optional = true } - -# Other Monero dependencies -monero-io = { path = "../../io", version = "0.1", default-features = false } -monero-generators = { path = "../../generators", version = "0.4", default-features = false } -monero-primitives = { path = "../../primitives", version = "0.1", default-features = false } - -[dev-dependencies] -frost = { package = "modular-frost", path = "../../../../crypto/frost", default-features = false, features = ["ed25519", "tests"] } - -[features] -std = [ - "std-shims/std", - - "thiserror", - - "rand_core/std", - "zeroize/std", - "subtle/std", - - "rand_chacha?/std", - "transcript?/std", - "group?/alloc", - "dalek-ff-group?/std", - - "monero-io/std", - "monero-generators/std", - "monero-primitives/std", -] -multisig = ["rand_chacha", "transcript", "group", "dalek-ff-group", "frost", "std"] -default = ["std"] diff --git a/networks/monero/ringct/clsag/LICENSE b/networks/monero/ringct/clsag/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/ringct/clsag/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/ringct/clsag/README.md b/networks/monero/ringct/clsag/README.md deleted file mode 100644 index 4b90c86c..00000000 --- a/networks/monero/ringct/clsag/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Monero CLSAG - -The CLSAG linkable ring signature, as defined by the Monero protocol. - -Additionally included is a FROST-inspired threshold multisignature algorithm. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). -- `multisig`: Provides a FROST-inspired threshold multisignature algorithm for - use. diff --git a/networks/monero/ringct/clsag/src/lib.rs b/networks/monero/ringct/clsag/src/lib.rs deleted file mode 100644 index eb2f81b7..00000000 --- a/networks/monero/ringct/clsag/src/lib.rs +++ /dev/null @@ -1,434 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] -#![allow(non_snake_case)] - -use core::ops::Deref; -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use rand_core::{RngCore, CryptoRng}; - -use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; -use subtle::{ConstantTimeEq, ConditionallySelectable}; - -use curve25519_dalek::{ - constants::{ED25519_BASEPOINT_TABLE, ED25519_BASEPOINT_POINT}, - scalar::Scalar, - traits::{IsIdentity, MultiscalarMul, VartimePrecomputedMultiscalarMul}, - edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}, -}; - -use monero_io::*; -use monero_generators::hash_to_point; -use monero_primitives::{INV_EIGHT, G_PRECOMP, Commitment, Decoys, keccak256_to_scalar}; - -#[cfg(feature = "multisig")] -mod multisig; -#[cfg(feature = "multisig")] -pub use multisig::{ClsagMultisigMaskSender, ClsagAddendum, ClsagMultisig}; - -#[cfg(all(feature = "std", test))] -mod tests; - -/// Errors when working with CLSAGs. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum ClsagError { - /// The ring was invalid (such as being too small or too large). - #[cfg_attr(feature = "std", error("invalid ring"))] - InvalidRing, - /// The discrete logarithm of the key, scaling G, wasn't equivalent to the signing ring member. - #[cfg_attr(feature = "std", error("invalid commitment"))] - InvalidKey, - /// The commitment opening provided did not match the ring member's. - #[cfg_attr(feature = "std", error("invalid commitment"))] - InvalidCommitment, - /// The key image was invalid (such as being identity or torsioned) - #[cfg_attr(feature = "std", error("invalid key image"))] - InvalidImage, - /// The `D` component was invalid. - #[cfg_attr(feature = "std", error("invalid D"))] - InvalidD, - /// The `s` vector was invalid. - #[cfg_attr(feature = "std", error("invalid s"))] - InvalidS, - /// The `c1` variable was invalid. - #[cfg_attr(feature = "std", error("invalid c1"))] - InvalidC1, -} - -/// Context on the input being signed for. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)] -pub struct ClsagContext { - // The opening for the commitment of the signing ring member - commitment: Commitment, - // Selected ring members' positions, signer index, and ring - decoys: Decoys, -} - -impl ClsagContext { - /// Create a new context, as necessary for signing. - pub fn new(decoys: Decoys, commitment: Commitment) -> Result { - if decoys.len() > u8::MAX.into() { - Err(ClsagError::InvalidRing)?; - } - - // Validate the commitment matches - if decoys.signer_ring_members()[1] != commitment.calculate() { - Err(ClsagError::InvalidCommitment)?; - } - - Ok(ClsagContext { commitment, decoys }) - } -} - -#[allow(clippy::large_enum_variant)] -enum Mode { - Sign { signer_index: u8, A: EdwardsPoint, AH: EdwardsPoint }, - Verify { c1: Scalar, D_serialized: EdwardsPoint }, -} - -// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences -// -// Said differences are covered via the above Mode -fn core( - ring: &[[EdwardsPoint; 2]], - I: &EdwardsPoint, - pseudo_out: &EdwardsPoint, - msg_hash: &[u8; 32], - D_torsion_free: &EdwardsPoint, - s: &[Scalar], - A_c1: &Mode, -) -> ((EdwardsPoint, Scalar, Scalar), Scalar) { - let n = ring.len(); - - let images_precomp = match A_c1 { - Mode::Sign { .. } => None, - Mode::Verify { .. } => Some(VartimeEdwardsPrecomputation::new([I, D_torsion_free])), - }; - let D_inv_eight = D_torsion_free * INV_EIGHT(); - - // Generate the transcript - // Instead of generating multiple, a single transcript is created and then edited as needed - const PREFIX: &[u8] = b"CLSAG_"; - #[rustfmt::skip] - const AGG_0: &[u8] = b"agg_0"; - #[rustfmt::skip] - const ROUND: &[u8] = b"round"; - const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len(); - - let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32); - to_hash.extend(PREFIX); - to_hash.extend(AGG_0); - to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]); - - let mut P = Vec::with_capacity(n); - for member in ring { - P.push(member[0]); - to_hash.extend(member[0].compress().to_bytes()); - } - - let mut C = Vec::with_capacity(n); - for member in ring { - C.push(member[1] - pseudo_out); - to_hash.extend(member[1].compress().to_bytes()); - } - - to_hash.extend(I.compress().to_bytes()); - match A_c1 { - Mode::Sign { .. } => { - to_hash.extend(D_inv_eight.compress().to_bytes()); - } - Mode::Verify { D_serialized, .. } => { - to_hash.extend(D_serialized.compress().to_bytes()); - } - } - to_hash.extend(pseudo_out.compress().to_bytes()); - // mu_P with agg_0 - let mu_P = keccak256_to_scalar(&to_hash); - // mu_C with agg_1 - to_hash[PREFIX_AGG_0_LEN - 1] = b'1'; - let mu_C = keccak256_to_scalar(&to_hash); - - // Truncate it for the round transcript, altering the DST as needed - to_hash.truncate(((2 * n) + 1) * 32); - for i in 0 .. ROUND.len() { - to_hash[PREFIX.len() + i] = ROUND[i]; - } - // Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be - // truncated just to add it back - to_hash.extend(pseudo_out.compress().to_bytes()); - to_hash.extend(msg_hash); - - // Configure the loop based on if we're signing or verifying - let start; - let end; - let mut c; - match A_c1 { - Mode::Sign { signer_index, A, AH } => { - let signer_index = usize::from(*signer_index); - start = signer_index + 1; - end = signer_index + n; - to_hash.extend(A.compress().to_bytes()); - to_hash.extend(AH.compress().to_bytes()); - c = keccak256_to_scalar(&to_hash); - } - - Mode::Verify { c1, .. } => { - start = 0; - end = n; - c = *c1; - } - } - - // Perform the core loop - let mut c1 = c; - for i in (start .. end).map(|i| i % n) { - let c_p = mu_P * c; - let c_c = mu_C * c; - - // (s_i * G) + (c_p * P_i) + (c_c * C_i) - let L = match A_c1 { - Mode::Sign { .. } => { - EdwardsPoint::multiscalar_mul([s[i], c_p, c_c], [ED25519_BASEPOINT_POINT, P[i], C[i]]) - } - Mode::Verify { .. } => { - G_PRECOMP().vartime_mixed_multiscalar_mul([s[i]], [c_p, c_c], [P[i], C[i]]) - } - }; - - let PH = hash_to_point(P[i].compress().0); - - // (c_p * I) + (c_c * D) + (s_i * PH) - let R = match A_c1 { - Mode::Sign { .. } => { - EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D_torsion_free, &PH]) - } - Mode::Verify { .. } => images_precomp - .as_ref() - .expect("value populated when verifying wasn't populated") - .vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]), - }; - - to_hash.truncate(((2 * n) + 3) * 32); - to_hash.extend(L.compress().to_bytes()); - to_hash.extend(R.compress().to_bytes()); - c = keccak256_to_scalar(&to_hash); - - // This will only execute once and shouldn't need to be constant time. Making it constant time - // removes the risk of branch prediction creating timing differences depending on ring index - // however - c1.conditional_assign(&c, i.ct_eq(&(n - 1))); - } - - // This first tuple is needed to continue signing, the latter is the c to be tested/worked with - ((D_inv_eight, c * mu_P, c * mu_C), c1) -} - -/// The CLSAG signature, as used in Monero. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Clsag { - /// The difference of the commitment randomnesses, scaling the key image generator. - pub D: EdwardsPoint, - /// The responses for each ring member. - pub s: Vec, - /// The first challenge in the ring. - pub c1: Scalar, -} - -struct ClsagSignCore { - incomplete_clsag: Clsag, - pseudo_out: EdwardsPoint, - key_challenge: Scalar, - challenged_mask: Scalar, -} - -impl Clsag { - // Sign core is the extension of core as needed for signing, yet is shared between single signer - // and multisig, hence why it's still core - fn sign_core( - rng: &mut R, - I: &EdwardsPoint, - input: &ClsagContext, - mask: Scalar, - msg_hash: &[u8; 32], - A: EdwardsPoint, - AH: EdwardsPoint, - ) -> ClsagSignCore { - let signer_index = input.decoys.signer_index(); - - let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate(); - let mask_delta = input.commitment.mask - mask; - - let H = hash_to_point(input.decoys.ring()[usize::from(signer_index)][0].compress().0); - let D = H * mask_delta; - let mut s = Vec::with_capacity(input.decoys.ring().len()); - for _ in 0 .. input.decoys.ring().len() { - s.push(Scalar::random(rng)); - } - let ((D, c_p, c_c), c1) = core( - input.decoys.ring(), - I, - &pseudo_out, - msg_hash, - &D, - &s, - &Mode::Sign { signer_index, A, AH }, - ); - - ClsagSignCore { - incomplete_clsag: Clsag { D, s, c1 }, - pseudo_out, - key_challenge: c_p, - challenged_mask: c_c * mask_delta, - } - } - - /// Sign CLSAG signatures for the provided inputs. - /// - /// Monero ensures the rerandomized input commitments have the same value as the outputs by - /// checking `sum(rerandomized_input_commitments) - sum(output_commitments) == 0`. This requires - /// not only the amounts balance, yet also - /// `sum(input_commitment_masks) - sum(output_commitment_masks)`. - /// - /// Monero solves this by following the wallet protocol to determine each output commitment's - /// randomness, then using random masks for all but the last input. The last input is - /// rerandomized to the necessary mask for the equation to balance. - /// - /// Due to Monero having this behavior, it only makes sense to sign CLSAGs as a list, hence this - /// API being the way it is. - /// - /// `inputs` is of the form (discrete logarithm of the key, context). - /// - /// `sum_outputs` is for the sum of the output commitments' masks. - /// - /// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which - /// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do - /// not use this if you don't know what you're doing. - pub fn sign( - rng: &mut R, - mut inputs: Vec<(Zeroizing, ClsagContext)>, - sum_outputs: Scalar, - msg_hash: [u8; 32], - ) -> Result, ClsagError> { - // Create the key images - let mut key_image_generators = vec![]; - let mut key_images = vec![]; - for input in &inputs { - let key = input.1.decoys.signer_ring_members()[0]; - - // Check the key is consistent - if (ED25519_BASEPOINT_TABLE * input.0.deref()) != key { - Err(ClsagError::InvalidKey)?; - } - - let key_image_generator = hash_to_point(key.compress().0); - key_image_generators.push(key_image_generator); - key_images.push(key_image_generator * input.0.deref()); - } - - let mut res = Vec::with_capacity(inputs.len()); - let mut sum_pseudo_outs = Scalar::ZERO; - for i in 0 .. inputs.len() { - let mask; - // If this is the last input, set the mask as described above - if i == (inputs.len() - 1) { - mask = sum_outputs - sum_pseudo_outs; - } else { - mask = Scalar::random(rng); - sum_pseudo_outs += mask; - } - - let mut nonce = Zeroizing::new(Scalar::random(rng)); - let ClsagSignCore { mut incomplete_clsag, pseudo_out, key_challenge, challenged_mask } = - Clsag::sign_core( - rng, - &key_images[i], - &inputs[i].1, - mask, - &msg_hash, - nonce.deref() * ED25519_BASEPOINT_TABLE, - nonce.deref() * key_image_generators[i], - ); - // Effectively r - c x, except c x is (c_p x) + (c_c z), where z is the delta between the - // ring member's commitment and our pseudo-out commitment (which will only have a known - // discrete log over G if the amounts cancel out) - incomplete_clsag.s[usize::from(inputs[i].1.decoys.signer_index())] = - nonce.deref() - ((key_challenge * inputs[i].0.deref()) + challenged_mask); - let clsag = incomplete_clsag; - - // Zeroize private keys and nonces. - inputs[i].0.zeroize(); - nonce.zeroize(); - - debug_assert!(clsag - .verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg_hash) - .is_ok()); - - res.push((clsag, pseudo_out)); - } - - Ok(res) - } - - /// Verify a CLSAG signature for the provided context. - /// - /// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which - /// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do - /// not use this if you don't know what you're doing. - pub fn verify( - &self, - ring: &[[EdwardsPoint; 2]], - I: &EdwardsPoint, - pseudo_out: &EdwardsPoint, - msg_hash: &[u8; 32], - ) -> Result<(), ClsagError> { - // Preliminary checks - // s, c1, and points must also be encoded canonically, which is checked at time of decode - if ring.is_empty() { - Err(ClsagError::InvalidRing)?; - } - if ring.len() != self.s.len() { - Err(ClsagError::InvalidS)?; - } - if I.is_identity() || (!I.is_torsion_free()) { - Err(ClsagError::InvalidImage)?; - } - - let D_torsion_free = self.D.mul_by_cofactor(); - if D_torsion_free.is_identity() { - Err(ClsagError::InvalidD)?; - } - - let (_, c1) = core( - ring, - I, - pseudo_out, - msg_hash, - &D_torsion_free, - &self.s, - &Mode::Verify { c1: self.c1, D_serialized: self.D }, - ); - if c1 != self.c1 { - Err(ClsagError::InvalidC1)?; - } - Ok(()) - } - - /// Write a CLSAG. - pub fn write(&self, w: &mut W) -> io::Result<()> { - write_raw_vec(write_scalar, &self.s, w)?; - w.write_all(&self.c1.to_bytes())?; - write_point(&self.D, w) - } - - /// Read a CLSAG. - pub fn read(decoys: usize, r: &mut R) -> io::Result { - Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? }) - } -} diff --git a/networks/monero/ringct/clsag/src/multisig.rs b/networks/monero/ringct/clsag/src/multisig.rs deleted file mode 100644 index 5fc098ad..00000000 --- a/networks/monero/ringct/clsag/src/multisig.rs +++ /dev/null @@ -1,389 +0,0 @@ -use core::{ops::Deref, fmt::Debug}; -use std_shims::{ - sync::{Arc, Mutex}, - io::{self, Read, Write}, - collections::HashMap, -}; - -use rand_core::{RngCore, CryptoRng, SeedableRng}; -use rand_chacha::ChaCha20Rng; - -use zeroize::{Zeroize, Zeroizing}; - -use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint}; - -use group::{ - ff::{Field, PrimeField}, - Group, GroupEncoding, -}; - -use transcript::{Transcript, RecommendedTranscript}; -use dalek_ff_group as dfg; -use frost::{ - dkg::lagrange, - curve::Ed25519, - Participant, FrostError, ThresholdKeys, ThresholdView, - algorithm::{WriteAddendum, Algorithm}, -}; - -use monero_generators::hash_to_point; - -use crate::{ClsagContext, Clsag}; - -impl ClsagContext { - fn transcript(&self, transcript: &mut T) { - // Doesn't domain separate as this is considered part of the larger CLSAG proof - - // Ring index - transcript.append_message(b"signer_index", [self.decoys.signer_index()]); - - // Ring - for (i, pair) in self.decoys.ring().iter().enumerate() { - // Doesn't include global output indexes as CLSAG doesn't care/won't be affected by it - // They're just a unreliable reference to this data which will be included in the message - // if somehow relevant - transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]); - // This also transcripts the key image generator since it's derived from this key - transcript.append_message(b"key", pair[0].compress().to_bytes()); - transcript.append_message(b"commitment", pair[1].compress().to_bytes()) - } - - // Doesn't include the commitment's parts as the above ring + index includes the commitment - // The only potential malleability would be if the G/H relationship is known, breaking the - // discrete log problem, which breaks everything already - } -} - -/// A channel to send the mask to use for the pseudo-out (rerandomized commitment) with. -/// -/// A mask must be sent along this channel before any preprocess addendums are handled. -#[derive(Debug)] -pub struct ClsagMultisigMaskSender { - buf: Arc>>, -} -#[derive(Debug)] -struct ClsagMultisigMaskReceiver { - buf: Arc>>, -} -impl ClsagMultisigMaskSender { - fn new() -> (ClsagMultisigMaskSender, ClsagMultisigMaskReceiver) { - let buf = Arc::new(Mutex::new(None)); - (ClsagMultisigMaskSender { buf: buf.clone() }, ClsagMultisigMaskReceiver { buf }) - } - - /// Send a mask to a CLSAG multisig instance. - pub fn send(self, mask: Scalar) { - // There is no risk this was prior set as this consumes `self`, which does not implement - // `Clone` - *self.buf.lock() = Some(mask); - } -} -impl ClsagMultisigMaskReceiver { - fn recv(self) -> Option { - *self.buf.lock() - } -} - -/// Addendum produced during the signing process. -#[derive(Clone, PartialEq, Eq, Zeroize, Debug)] -pub struct ClsagAddendum { - key_image_share: dfg::EdwardsPoint, -} - -impl ClsagAddendum { - /// The key image share within this addendum. - pub fn key_image_share(&self) -> dfg::EdwardsPoint { - self.key_image_share - } -} - -impl WriteAddendum for ClsagAddendum { - fn write(&self, writer: &mut W) -> io::Result<()> { - writer.write_all(self.key_image_share.compress().to_bytes().as_ref()) - } -} - -#[allow(non_snake_case)] -#[derive(Clone, PartialEq, Eq, Debug)] -struct Interim { - p: Scalar, - c: Scalar, - - clsag: Clsag, - pseudo_out: EdwardsPoint, -} - -/// FROST-inspired algorithm for producing a CLSAG signature. -/// -/// Before this has its `process_addendum` called, a mask must be set. -/// -/// The message signed is expected to be a 32-byte value. Per Monero, it's the keccak256 hash of -/// the transaction data which is signed. This will panic if the message is not a 32-byte value. -#[allow(non_snake_case)] -#[derive(Debug)] -pub struct ClsagMultisig { - transcript: RecommendedTranscript, - - key_image_generator: EdwardsPoint, - key_image_shares: HashMap<[u8; 32], dfg::EdwardsPoint>, - image: Option, - - context: ClsagContext, - - mask_recv: Option, - mask: Option, - - msg_hash: Option<[u8; 32]>, - interim: Option, -} - -impl ClsagMultisig { - /// Construct a new instance of multisignature CLSAG signing. - pub fn new( - transcript: RecommendedTranscript, - context: ClsagContext, - ) -> (ClsagMultisig, ClsagMultisigMaskSender) { - let (mask_send, mask_recv) = ClsagMultisigMaskSender::new(); - ( - ClsagMultisig { - transcript, - - key_image_generator: hash_to_point(context.decoys.signer_ring_members()[0].compress().0), - key_image_shares: HashMap::new(), - image: None, - - context, - - mask_recv: Some(mask_recv), - mask: None, - - msg_hash: None, - interim: None, - }, - mask_send, - ) - } - - /// The key image generator used by the signer. - pub fn key_image_generator(&self) -> EdwardsPoint { - self.key_image_generator - } -} - -impl Algorithm for ClsagMultisig { - type Transcript = RecommendedTranscript; - type Addendum = ClsagAddendum; - // We output the CLSAG and the key image, which requires an interactive protocol to obtain - type Signature = (Clsag, EdwardsPoint); - - // We need the nonce represented against both G and the key image generator - fn nonces(&self) -> Vec> { - vec![vec![dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.key_image_generator)]] - } - - // We also publish our share of the key image - fn preprocess_addendum( - &mut self, - _rng: &mut R, - keys: &ThresholdKeys, - ) -> ClsagAddendum { - ClsagAddendum { - key_image_share: dfg::EdwardsPoint(self.key_image_generator) * keys.secret_share().deref(), - } - } - - fn read_addendum(&self, reader: &mut R) -> io::Result { - let mut bytes = [0; 32]; - reader.read_exact(&mut bytes)?; - // dfg ensures the point is torsion free - let xH = Option::::from(dfg::EdwardsPoint::from_bytes(&bytes)) - .ok_or_else(|| io::Error::other("invalid key image"))?; - // Ensure this is a canonical point - if xH.to_bytes() != bytes { - Err(io::Error::other("non-canonical key image"))?; - } - - Ok(ClsagAddendum { key_image_share: xH }) - } - - fn process_addendum( - &mut self, - view: &ThresholdView, - l: Participant, - addendum: ClsagAddendum, - ) -> Result<(), FrostError> { - if self.image.is_none() { - self.transcript.domain_separate(b"CLSAG"); - // Transcript the ring - self.context.transcript(&mut self.transcript); - // Fetch the mask from the Mutex - // We set it to a variable to ensure our view of it is consistent - // It was this or a mpsc channel... std doesn't have oneshot :/ - self.mask = Some( - self - .mask_recv - .take() - .expect("image was none multiple times, despite setting to Some on first iteration") - .recv() - .ok_or(FrostError::InternalError("CLSAG mask was not provided"))?, - ); - // Transcript the mask - self.transcript.append_message(b"mask", self.mask.expect("mask wasn't set").to_bytes()); - - // Init the image to the offset - self.image = Some(dfg::EdwardsPoint(self.key_image_generator) * view.offset()); - } - - // Transcript this participant's contribution - self.transcript.append_message(b"participant", l.to_bytes()); - self - .transcript - .append_message(b"key_image_share", addendum.key_image_share.compress().to_bytes()); - - // Accumulate the interpolated share - let interpolated_key_image_share = - addendum.key_image_share * lagrange::(l, view.included()); - *self.image.as_mut().expect("image populated on first iteration wasn't Some") += - interpolated_key_image_share; - - self - .key_image_shares - .insert(view.verification_share(l).to_bytes(), interpolated_key_image_share); - - Ok(()) - } - - fn transcript(&mut self) -> &mut Self::Transcript { - &mut self.transcript - } - - fn sign_share( - &mut self, - view: &ThresholdView, - nonce_sums: &[Vec], - nonces: Vec>, - msg_hash: &[u8], - ) -> dfg::Scalar { - // Use the transcript to get a seeded random number generator - // - // The transcript contains private data, preventing passive adversaries from recreating this - // process even if they have access to the commitments/key image share broadcast so far - // - // Specifically, the transcript contains the signer's index within the ring, along with the - // opening of the commitment being re-randomized (and what it's re-randomized to) - let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses")); - - let msg_hash = msg_hash.try_into().expect("CLSAG message hash should be 32-bytes"); - self.msg_hash = Some(msg_hash); - - let sign_core = Clsag::sign_core( - &mut rng, - &self.image.expect("verifying a share despite never processing any addendums").0, - &self.context, - self.mask.expect("mask wasn't set"), - &msg_hash, - nonce_sums[0][0].0, - nonce_sums[0][1].0, - ); - self.interim = Some(Interim { - p: sign_core.key_challenge, - c: sign_core.challenged_mask, - clsag: sign_core.incomplete_clsag, - pseudo_out: sign_core.pseudo_out, - }); - - // r - p x, where p is the challenge for the keys - *nonces[0] - dfg::Scalar(sign_core.key_challenge) * view.secret_share().deref() - } - - #[must_use] - fn verify( - &self, - _: dfg::EdwardsPoint, - _: &[Vec], - sum: dfg::Scalar, - ) -> Option { - let interim = self.interim.as_ref().expect("verify called before sign_share"); - let mut clsag = interim.clsag.clone(); - // We produced shares as `r - p x`, yet the signature is actually `r - p x - c x` - // Substract `c x` (saved as `c`) now - clsag.s[usize::from(self.context.decoys.signer_index())] = sum.0 - interim.c; - if clsag - .verify( - self.context.decoys.ring(), - &self.image.expect("verifying a signature despite never processing any addendums").0, - &interim.pseudo_out, - self.msg_hash.as_ref().expect("verify called before sign_share"), - ) - .is_ok() - { - return Some((clsag, interim.pseudo_out)); - } - None - } - - fn verify_share( - &self, - verification_share: dfg::EdwardsPoint, - nonces: &[Vec], - share: dfg::Scalar, - ) -> Result, ()> { - let interim = self.interim.as_ref().expect("verify_share called before sign_share"); - - // For a share `r - p x`, the following two equalities should hold: - // - `(r - p x)G == R.0 - pV`, where `V = xG` - // - `(r - p x)H == R.1 - pK`, where `K = xH` (the key image share) - // - // This is effectively a discrete log equality proof for: - // V, K over G, H - // with nonces - // R.0, R.1 - // and solution - // s - // - // Which is a batch-verifiable rewrite of the traditional CP93 proof - // (and also writable as Generalized Schnorr Protocol) - // - // That means that given a proper challenge, this alone can be certainly argued to prove the - // key image share is well-formed and the provided signature so proves for that. - - // This is a bit funky as it doesn't prove the nonces are well-formed however. They're part of - // the prover data/transcript for a CP93/GSP proof, not part of the statement. This practically - // is fine, for a variety of reasons (given a consistent `x`, a consistent `r` can be - // extracted, and the nonces as used in CLSAG are also part of its prover data/transcript). - - let key_image_share = self.key_image_shares[&verification_share.to_bytes()]; - - // Hash every variable relevant here, using the hash output as the random weight - let mut weight_transcript = - RecommendedTranscript::new(b"monero-serai v0.1 ClsagMultisig::verify_share"); - weight_transcript.append_message(b"G", dfg::EdwardsPoint::generator().to_bytes()); - weight_transcript.append_message(b"H", self.key_image_generator.to_bytes()); - weight_transcript.append_message(b"xG", verification_share.to_bytes()); - weight_transcript.append_message(b"xH", key_image_share.to_bytes()); - weight_transcript.append_message(b"rG", nonces[0][0].to_bytes()); - weight_transcript.append_message(b"rH", nonces[0][1].to_bytes()); - weight_transcript.append_message(b"c", dfg::Scalar(interim.p).to_repr()); - weight_transcript.append_message(b"s", share.to_repr()); - let weight = weight_transcript.challenge(b"weight"); - let weight = dfg::Scalar(Scalar::from_bytes_mod_order_wide(&weight.into())); - - let part_one = vec![ - (share, dfg::EdwardsPoint::generator()), - // -(R.0 - pV) == -R.0 + pV - (-dfg::Scalar::ONE, nonces[0][0]), - (dfg::Scalar(interim.p), verification_share), - ]; - - let mut part_two = vec![ - (weight * share, dfg::EdwardsPoint(self.key_image_generator)), - // -(R.1 - pK) == -R.1 + pK - (-weight, nonces[0][1]), - (weight * dfg::Scalar(interim.p), key_image_share), - ]; - - let mut all = part_one; - all.append(&mut part_two); - Ok(all) - } -} diff --git a/networks/monero/ringct/clsag/src/tests.rs b/networks/monero/ringct/clsag/src/tests.rs deleted file mode 100644 index ff994445..00000000 --- a/networks/monero/ringct/clsag/src/tests.rs +++ /dev/null @@ -1,131 +0,0 @@ -use core::ops::Deref; - -use zeroize::Zeroizing; -use rand_core::{RngCore, OsRng}; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar}; - -#[cfg(feature = "multisig")] -use transcript::{Transcript, RecommendedTranscript}; -#[cfg(feature = "multisig")] -use frost::curve::Ed25519; - -use monero_generators::hash_to_point; -use monero_primitives::{Commitment, Decoys}; -use crate::{ClsagContext, Clsag}; -#[cfg(feature = "multisig")] -use crate::ClsagMultisig; - -#[cfg(feature = "multisig")] -use frost::{ - Participant, - sign::AlgorithmMachine, - tests::{key_gen, algorithm_machines_without_clone, sign_without_clone}, -}; - -const RING_LEN: u64 = 11; -const AMOUNT: u64 = 1337; - -#[cfg(feature = "multisig")] -const RING_INDEX: u8 = 3; - -#[test] -fn clsag() { - for real in 0 .. RING_LEN { - let msg_hash = [1; 32]; - - let mut secrets = (Zeroizing::new(Scalar::ZERO), Scalar::ZERO); - let mut ring = vec![]; - for i in 0 .. RING_LEN { - let dest = Zeroizing::new(Scalar::random(&mut OsRng)); - let mask = Scalar::random(&mut OsRng); - let amount; - if i == real { - secrets = (dest.clone(), mask); - amount = AMOUNT; - } else { - amount = OsRng.next_u64(); - } - ring - .push([dest.deref() * ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]); - } - - let (mut clsag, pseudo_out) = Clsag::sign( - &mut OsRng, - vec![( - secrets.0.clone(), - ClsagContext::new( - Decoys::new((1 ..= RING_LEN).collect(), u8::try_from(real).unwrap(), ring.clone()) - .unwrap(), - Commitment::new(secrets.1, AMOUNT), - ) - .unwrap(), - )], - Scalar::random(&mut OsRng), - msg_hash, - ) - .unwrap() - .swap_remove(0); - - let image = - hash_to_point((ED25519_BASEPOINT_TABLE * secrets.0.deref()).compress().0) * secrets.0.deref(); - clsag.verify(&ring, &image, &pseudo_out, &msg_hash).unwrap(); - - // make sure verification fails if we throw a random `c1` at it. - clsag.c1 = Scalar::random(&mut OsRng); - assert!(clsag.verify(&ring, &image, &pseudo_out, &msg_hash).is_err()); - } -} - -#[cfg(feature = "multisig")] -#[test] -fn clsag_multisig() { - let keys = key_gen::<_, Ed25519>(&mut OsRng); - - let randomness = Scalar::random(&mut OsRng); - let mut ring = vec![]; - for i in 0 .. RING_LEN { - let dest; - let mask; - let amount; - if i != u64::from(RING_INDEX) { - dest = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE; - mask = Scalar::random(&mut OsRng); - amount = OsRng.next_u64(); - } else { - dest = keys[&Participant::new(1).unwrap()].group_key().0; - mask = randomness; - amount = AMOUNT; - } - ring.push([dest, Commitment::new(mask, amount).calculate()]); - } - - let mask = Scalar::random(&mut OsRng); - let params = || { - let (algorithm, mask_send) = ClsagMultisig::new( - RecommendedTranscript::new(b"Monero Serai CLSAG Test"), - ClsagContext::new( - Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(), - Commitment::new(randomness, AMOUNT), - ) - .unwrap(), - ); - mask_send.send(mask); - algorithm - }; - - sign_without_clone( - &mut OsRng, - keys.clone(), - keys.values().map(|keys| (keys.params().i(), params())).collect(), - algorithm_machines_without_clone( - &mut OsRng, - &keys, - keys - .values() - .map(|keys| (keys.params().i(), AlgorithmMachine::new(params(), keys.clone()))) - .collect(), - ), - &[1; 32], - ); -} diff --git a/networks/monero/ringct/mlsag/Cargo.toml b/networks/monero/ringct/mlsag/Cargo.toml deleted file mode 100644 index d666ebfa..00000000 --- a/networks/monero/ringct/mlsag/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -name = "monero-mlsag" -version = "0.1.0" -description = "The MLSAG linkable ring signature, as defined by the Monero protocol" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/ringct/mlsag" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false } - -thiserror = { version = "1", default-features = false, optional = true } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -# Cryptographic dependencies -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -# Other Monero dependencies -monero-io = { path = "../../io", version = "0.1", default-features = false } -monero-generators = { path = "../../generators", version = "0.4", default-features = false } -monero-primitives = { path = "../../primitives", version = "0.1", default-features = false } - -[features] -std = [ - "std-shims/std", - - "thiserror", - - "zeroize/std", - - "monero-io/std", - "monero-generators/std", - "monero-primitives/std", -] -default = ["std"] diff --git a/networks/monero/ringct/mlsag/LICENSE b/networks/monero/ringct/mlsag/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/ringct/mlsag/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/ringct/mlsag/README.md b/networks/monero/ringct/mlsag/README.md deleted file mode 100644 index 40e979b6..00000000 --- a/networks/monero/ringct/mlsag/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Monero MLSAG - -The MLSAG linkable ring signature, as defined by the Monero protocol. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/ringct/mlsag/src/lib.rs b/networks/monero/ringct/mlsag/src/lib.rs deleted file mode 100644 index ac2e482f..00000000 --- a/networks/monero/ringct/mlsag/src/lib.rs +++ /dev/null @@ -1,242 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] -#![allow(non_snake_case)] - -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::{traits::IsIdentity, Scalar, EdwardsPoint}; - -use monero_io::*; -use monero_generators::{H, hash_to_point}; -use monero_primitives::keccak256_to_scalar; - -/// Errors when working with MLSAGs. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum MlsagError { - /// Invalid ring (such as too small or too large). - #[cfg_attr(feature = "std", error("invalid ring"))] - InvalidRing, - /// Invalid amount of key images. - #[cfg_attr(feature = "std", error("invalid amount of key images"))] - InvalidAmountOfKeyImages, - /// Invalid ss matrix. - #[cfg_attr(feature = "std", error("invalid ss"))] - InvalidSs, - /// Invalid key image. - #[cfg_attr(feature = "std", error("invalid key image"))] - InvalidKeyImage, - /// Invalid ci vector. - #[cfg_attr(feature = "std", error("invalid ci"))] - InvalidCi, -} - -/// A vector of rings, forming a matrix, to verify the MLSAG with. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct RingMatrix { - matrix: Vec>, -} - -impl RingMatrix { - /// Construct a ring matrix from an already formatted series of points. - fn new(matrix: Vec>) -> Result { - // Monero requires that there is more than one ring member for MLSAG signatures: - // https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/ - // src/ringct/rctSigs.cpp#L462 - if matrix.len() < 2 { - Err(MlsagError::InvalidRing)?; - } - for member in &matrix { - if member.is_empty() || (member.len() != matrix[0].len()) { - Err(MlsagError::InvalidRing)?; - } - } - - Ok(RingMatrix { matrix }) - } - - /// Construct a ring matrix for an individual output. - pub fn individual( - ring: &[[EdwardsPoint; 2]], - pseudo_out: EdwardsPoint, - ) -> Result { - let mut matrix = Vec::with_capacity(ring.len()); - for ring_member in ring { - matrix.push(vec![ring_member[0], ring_member[1] - pseudo_out]); - } - RingMatrix::new(matrix) - } - - /// Iterate over the members of the matrix. - fn iter(&self) -> impl Iterator { - self.matrix.iter().map(AsRef::as_ref) - } - - /// Get the amount of members in the ring. - pub fn members(&self) -> usize { - self.matrix.len() - } - - /// Get the length of a ring member. - /// - /// A ring member is a vector of points for which the signer knows all of the discrete logarithms - /// of. - pub fn member_len(&self) -> usize { - // this is safe to do as the constructors don't allow empty rings - self.matrix[0].len() - } -} - -/// The MLSAG linkable ring signature, as used in Monero. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct Mlsag { - ss: Vec>, - cc: Scalar, -} - -impl Mlsag { - /// Write a MLSAG. - pub fn write(&self, w: &mut W) -> io::Result<()> { - for ss in &self.ss { - write_raw_vec(write_scalar, ss, w)?; - } - write_scalar(&self.cc, w) - } - - /// Read a MLSAG. - pub fn read(mixins: usize, ss_2_elements: usize, r: &mut R) -> io::Result { - Ok(Mlsag { - ss: (0 .. mixins) - .map(|_| read_raw_vec(read_scalar, ss_2_elements, r)) - .collect::>()?, - cc: read_scalar(r)?, - }) - } - - /// Verify a MLSAG. - /// - /// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which - /// makes assumptions on what has already been transcripted and bound to within `msg`. Do not use - /// this if you don't know what you're doing. - pub fn verify( - &self, - msg: &[u8; 32], - ring: &RingMatrix, - key_images: &[EdwardsPoint], - ) -> Result<(), MlsagError> { - // Mlsag allows for layers to not need linkability, hence they don't need key images - // Monero requires that there is always only 1 non-linkable layer - the amount commitments. - if ring.member_len() != (key_images.len() + 1) { - Err(MlsagError::InvalidAmountOfKeyImages)?; - } - - let mut buf = Vec::with_capacity(6 * 32); - buf.extend_from_slice(msg); - - let mut ci = self.cc; - - // This is an iterator over the key images as options with an added entry of `None` at the - // end for the non-linkable layer - let key_images_iter = key_images.iter().map(|ki| Some(*ki)).chain(core::iter::once(None)); - - if ring.matrix.len() != self.ss.len() { - Err(MlsagError::InvalidSs)?; - } - - for (ring_member, ss) in ring.iter().zip(&self.ss) { - if ring_member.len() != ss.len() { - Err(MlsagError::InvalidSs)?; - } - - for ((ring_member_entry, s), ki) in ring_member.iter().zip(ss).zip(key_images_iter.clone()) { - #[allow(non_snake_case)] - let L = EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, ring_member_entry, s); - - let compressed_ring_member_entry = ring_member_entry.compress(); - buf.extend_from_slice(compressed_ring_member_entry.as_bytes()); - buf.extend_from_slice(L.compress().as_bytes()); - - // Not all dimensions need to be linkable, e.g. commitments, and only linkable layers need - // to have key images. - if let Some(ki) = ki { - if ki.is_identity() || (!ki.is_torsion_free()) { - Err(MlsagError::InvalidKeyImage)?; - } - - #[allow(non_snake_case)] - let R = (s * hash_to_point(compressed_ring_member_entry.to_bytes())) + (ci * ki); - buf.extend_from_slice(R.compress().as_bytes()); - } - } - - ci = keccak256_to_scalar(&buf); - // keep the msg in the buffer. - buf.drain(msg.len() ..); - } - - if ci != self.cc { - Err(MlsagError::InvalidCi)? - } - Ok(()) - } -} - -/// Builder for a RingMatrix when using an aggregate signature. -/// -/// This handles the formatting as necessary. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct AggregateRingMatrixBuilder { - key_ring: Vec>, - amounts_ring: Vec, - sum_out: EdwardsPoint, -} - -impl AggregateRingMatrixBuilder { - /// Create a new AggregateRingMatrixBuilder. - /// - /// This takes in the transaction's outputs' commitments and fee used. - pub fn new(commitments: &[EdwardsPoint], fee: u64) -> Self { - AggregateRingMatrixBuilder { - key_ring: vec![], - amounts_ring: vec![], - sum_out: commitments.iter().sum::() + (*H * Scalar::from(fee)), - } - } - - /// Push a ring of [output key, commitment] to the matrix. - pub fn push_ring(&mut self, ring: &[[EdwardsPoint; 2]]) -> Result<(), MlsagError> { - if self.key_ring.is_empty() { - self.key_ring = vec![vec![]; ring.len()]; - // Now that we know the length of the ring, fill the `amounts_ring`. - self.amounts_ring = vec![-self.sum_out; ring.len()]; - } - - if (self.amounts_ring.len() != ring.len()) || ring.is_empty() { - // All the rings in an aggregate matrix must be the same length. - return Err(MlsagError::InvalidRing); - } - - for (i, ring_member) in ring.iter().enumerate() { - self.key_ring[i].push(ring_member[0]); - self.amounts_ring[i] += ring_member[1] - } - - Ok(()) - } - - /// Build and return the [`RingMatrix`]. - pub fn build(mut self) -> Result { - for (i, amount_commitment) in self.amounts_ring.drain(..).enumerate() { - self.key_ring[i].push(amount_commitment); - } - RingMatrix::new(self.key_ring) - } -} diff --git a/networks/monero/rpc/Cargo.toml b/networks/monero/rpc/Cargo.toml deleted file mode 100644 index e5e6a650..00000000 --- a/networks/monero/rpc/Cargo.toml +++ /dev/null @@ -1,47 +0,0 @@ -[package] -name = "monero-rpc" -version = "0.1.0" -description = "Trait for an RPC connection to a Monero daemon, built around monero-serai" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/rpc" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false } - -thiserror = { version = "1", default-features = false, optional = true } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } -hex = { version = "0.4", default-features = false, features = ["alloc"] } -serde = { version = "1", default-features = false, features = ["derive", "alloc"] } -serde_json = { version = "1", default-features = false, features = ["alloc"] } - -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -monero-serai = { path = "..", default-features = false } -monero-address = { path = "../wallet/address", default-features = false } - -[features] -std = [ - "std-shims/std", - - "thiserror", - - "zeroize/std", - "hex/std", - "serde/std", - "serde_json/std", - - "monero-serai/std", - "monero-address/std", -] -default = ["std"] diff --git a/networks/monero/rpc/LICENSE b/networks/monero/rpc/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/rpc/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/rpc/README.md b/networks/monero/rpc/README.md deleted file mode 100644 index 4badf1d8..00000000 --- a/networks/monero/rpc/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Monero RPC - -Trait for an RPC connection to a Monero daemon, built around monero-serai. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/rpc/simple-request/Cargo.toml b/networks/monero/rpc/simple-request/Cargo.toml deleted file mode 100644 index 9698c7d6..00000000 --- a/networks/monero/rpc/simple-request/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "monero-simple-request-rpc" -version = "0.1.0" -description = "RPC connection to a Monero daemon via simple-request, built around monero-serai" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/rpc/simple-request" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -hex = { version = "0.4", default-features = false, features = ["alloc"] } -zeroize = { version = "^1.5", default-features = false, features = ["alloc", "std"] } -digest_auth = { version = "0.3", default-features = false } -simple-request = { path = "../../../../common/request", version = "0.1", default-features = false, features = ["tls"] } -tokio = { version = "1", default-features = false } - -monero-rpc = { path = "..", default-features = false, features = ["std"] } - -[dev-dependencies] -monero-address = { path = "../../wallet/address", default-features = false, features = ["std"] } - -tokio = { version = "1", default-features = false, features = ["macros"] } diff --git a/networks/monero/rpc/simple-request/LICENSE b/networks/monero/rpc/simple-request/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/rpc/simple-request/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/rpc/simple-request/README.md b/networks/monero/rpc/simple-request/README.md deleted file mode 100644 index 947e777e..00000000 --- a/networks/monero/rpc/simple-request/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Monero simple-request RPC - -RPC connection to a Monero daemon via simple-request, built around monero-serai. diff --git a/networks/monero/rpc/simple-request/src/lib.rs b/networks/monero/rpc/simple-request/src/lib.rs deleted file mode 100644 index 0b53e209..00000000 --- a/networks/monero/rpc/simple-request/src/lib.rs +++ /dev/null @@ -1,278 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] - -use core::future::Future; -use std::{sync::Arc, io::Read, time::Duration}; - -use tokio::sync::Mutex; - -use zeroize::Zeroizing; -use digest_auth::{WwwAuthenticateHeader, AuthContext}; -use simple_request::{ - hyper::{StatusCode, header::HeaderValue, Request}, - Response, Client, -}; - -use monero_rpc::{RpcError, Rpc}; - -const DEFAULT_TIMEOUT: Duration = Duration::from_secs(30); - -#[derive(Clone, Debug)] -enum Authentication { - // If unauthenticated, use a single client - Unauthenticated(Client), - // If authenticated, use a single client which supports being locked and tracks its nonce - // This ensures that if a nonce is requested, another caller doesn't make a request invalidating - // it - Authenticated { - username: Zeroizing, - password: Zeroizing, - #[allow(clippy::type_complexity)] - connection: Arc, Client)>>, - }, -} - -/// An HTTP(S) transport for the RPC. -/// -/// Requires tokio. -#[derive(Clone, Debug)] -pub struct SimpleRequestRpc { - authentication: Authentication, - url: String, - request_timeout: Duration, -} - -impl SimpleRequestRpc { - fn digest_auth_challenge( - response: &Response, - ) -> Result, RpcError> { - Ok(if let Some(header) = response.headers().get("www-authenticate") { - Some(( - digest_auth::parse(header.to_str().map_err(|_| { - RpcError::InvalidNode("www-authenticate header wasn't a string".to_string()) - })?) - .map_err(|_| RpcError::InvalidNode("invalid digest-auth response".to_string()))?, - 0, - )) - } else { - None - }) - } - - /// Create a new HTTP(S) RPC connection. - /// - /// A daemon requiring authentication can be used via including the username and password in the - /// URL. - pub async fn new(url: String) -> Result { - Self::with_custom_timeout(url, DEFAULT_TIMEOUT).await - } - - /// Create a new HTTP(S) RPC connection with a custom timeout. - /// - /// A daemon requiring authentication can be used via including the username and password in the - /// URL. - pub async fn with_custom_timeout( - mut url: String, - request_timeout: Duration, - ) -> Result { - let authentication = if url.contains('@') { - // Parse out the username and password - let url_clone = Zeroizing::new(url); - let split_url = url_clone.split('@').collect::>(); - if split_url.len() != 2 { - Err(RpcError::ConnectionError("invalid amount of login specifications".to_string()))?; - } - let mut userpass = split_url[0]; - url = split_url[1].to_string(); - - // If there was additionally a protocol string, restore that to the daemon URL - if userpass.contains("://") { - let split_userpass = userpass.split("://").collect::>(); - if split_userpass.len() != 2 { - Err(RpcError::ConnectionError("invalid amount of protocol specifications".to_string()))?; - } - url = split_userpass[0].to_string() + "://" + &url; - userpass = split_userpass[1]; - } - - let split_userpass = userpass.split(':').collect::>(); - if split_userpass.len() > 2 { - Err(RpcError::ConnectionError("invalid amount of passwords".to_string()))?; - } - - let client = Client::without_connection_pool(&url) - .map_err(|_| RpcError::ConnectionError("invalid URL".to_string()))?; - // Obtain the initial challenge, which also somewhat validates this connection - let challenge = Self::digest_auth_challenge( - &client - .request( - Request::post(url.clone()) - .body(vec![].into()) - .map_err(|e| RpcError::ConnectionError(format!("couldn't make request: {e:?}")))?, - ) - .await - .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?, - )?; - Authentication::Authenticated { - username: Zeroizing::new(split_userpass[0].to_string()), - password: Zeroizing::new((*split_userpass.get(1).unwrap_or(&"")).to_string()), - connection: Arc::new(Mutex::new((challenge, client))), - } - } else { - Authentication::Unauthenticated(Client::with_connection_pool()) - }; - - Ok(SimpleRequestRpc { authentication, url, request_timeout }) - } -} - -impl SimpleRequestRpc { - async fn inner_post(&self, route: &str, body: Vec) -> Result, RpcError> { - let request_fn = |uri| { - Request::post(uri) - .body(body.clone().into()) - .map_err(|e| RpcError::ConnectionError(format!("couldn't make request: {e:?}"))) - }; - - async fn body_from_response(response: Response<'_>) -> Result, RpcError> { - let mut res = Vec::with_capacity(128); - response - .body() - .await - .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))? - .read_to_end(&mut res) - .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?; - Ok(res) - } - - for attempt in 0 .. 2 { - return Ok(match &self.authentication { - Authentication::Unauthenticated(client) => { - body_from_response( - client - .request(request_fn(self.url.clone() + "/" + route)?) - .await - .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?, - ) - .await? - } - Authentication::Authenticated { username, password, connection } => { - let mut connection_lock = connection.lock().await; - - let mut request = request_fn("/".to_string() + route)?; - - // If we don't have an auth challenge, obtain one - if connection_lock.0.is_none() { - connection_lock.0 = Self::digest_auth_challenge( - &connection_lock - .1 - .request(request) - .await - .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?, - )?; - request = request_fn("/".to_string() + route)?; - } - - // Insert the challenge response, if we have a challenge - if let Some((challenge, cnonce)) = connection_lock.0.as_mut() { - // Update the cnonce - // Overflow isn't a concern as this is a u64 - *cnonce += 1; - - let mut context = AuthContext::new_post::<_, _, _, &[u8]>( - <_ as AsRef>::as_ref(username), - <_ as AsRef>::as_ref(password), - "/".to_string() + route, - None, - ); - context.set_custom_cnonce(hex::encode(cnonce.to_le_bytes())); - - request.headers_mut().insert( - "Authorization", - HeaderValue::from_str( - &challenge - .respond(&context) - .map_err(|_| { - RpcError::InvalidNode("couldn't respond to digest-auth challenge".to_string()) - })? - .to_header_string(), - ) - .map_err(|_| { - RpcError::InternalError( - "digest-auth challenge response wasn't a valid string for an HTTP header" - .to_string(), - ) - })?, - ); - } - - let response = connection_lock - .1 - .request(request) - .await - .map_err(|e| RpcError::ConnectionError(format!("{e:?}"))); - - let (error, is_stale) = match &response { - Err(e) => (Some(e.clone()), false), - Ok(response) => ( - None, - if response.status() == StatusCode::UNAUTHORIZED { - if let Some(header) = response.headers().get("www-authenticate") { - header - .to_str() - .map_err(|_| { - RpcError::InvalidNode("www-authenticate header wasn't a string".to_string()) - })? - .contains("stale") - } else { - false - } - } else { - false - }, - ), - }; - - // If the connection entered an error state, drop the cached challenge as challenges are - // per-connection - // We don't need to create a new connection as simple-request will for us - if error.is_some() || is_stale { - connection_lock.0 = None; - // If we're not already on our second attempt, move to the next loop iteration - // (retrying all of this once) - if attempt == 0 { - continue; - } - if let Some(e) = error { - Err(e)? - } else { - debug_assert!(is_stale); - Err(RpcError::InvalidNode( - "node claimed fresh connection had stale authentication".to_string(), - ))? - } - } else { - body_from_response(response.expect("no response yet also no error?")).await? - } - } - }); - } - - unreachable!() - } -} - -impl Rpc for SimpleRequestRpc { - fn post( - &self, - route: &str, - body: Vec, - ) -> impl Send + Future, RpcError>> { - async move { - tokio::time::timeout(self.request_timeout, self.inner_post(route, body)) - .await - .map_err(|e| RpcError::ConnectionError(format!("{e:?}")))? - } - } -} diff --git a/networks/monero/rpc/simple-request/tests/tests.rs b/networks/monero/rpc/simple-request/tests/tests.rs deleted file mode 100644 index 2f7964d6..00000000 --- a/networks/monero/rpc/simple-request/tests/tests.rs +++ /dev/null @@ -1,144 +0,0 @@ -use std::sync::LazyLock; -use tokio::sync::Mutex; - -use monero_address::{Network, MoneroAddress}; - -// monero-rpc doesn't include a transport -// We can't include the simple-request crate there as then we'd have a cyclical dependency -// Accordingly, we test monero-rpc here (implicitly testing the simple-request transport) -use monero_simple_request_rpc::*; - -static SEQUENTIAL: LazyLock> = LazyLock::new(|| Mutex::new(())); - -const ADDRESS: &str = - "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey"; - -#[tokio::test] -async fn test_rpc() { - use monero_rpc::Rpc; - - let guard = SEQUENTIAL.lock().await; - - let rpc = - SimpleRequestRpc::new("http://serai:seraidex@127.0.0.1:18081".to_string()).await.unwrap(); - - { - // Test get_height - let height = rpc.get_height().await.unwrap(); - // The height should be the amount of blocks on chain - // The number of a block should be its zero-indexed position - // Accordingly, there should be no block whose number is the height - assert!(rpc.get_block_by_number(height).await.is_err()); - let block_number = height - 1; - // There should be a block just prior - let block = rpc.get_block_by_number(block_number).await.unwrap(); - - // Also test the block RPC routes are consistent - assert_eq!(block.number().unwrap(), block_number); - assert_eq!(rpc.get_block(block.hash()).await.unwrap(), block); - assert_eq!(rpc.get_block_hash(block_number).await.unwrap(), block.hash()); - - // And finally the hardfork version route - assert_eq!(rpc.get_hardfork_version().await.unwrap(), block.header.hardfork_version); - } - - // Test generate_blocks - for amount_of_blocks in [1, 5] { - let (blocks, number) = rpc - .generate_blocks( - &MoneroAddress::from_str(Network::Mainnet, ADDRESS).unwrap(), - amount_of_blocks, - ) - .await - .unwrap(); - let height = rpc.get_height().await.unwrap(); - assert_eq!(number, height - 1); - - let mut actual_blocks = Vec::with_capacity(amount_of_blocks); - for i in (height - amount_of_blocks) .. height { - actual_blocks.push(rpc.get_block_by_number(i).await.unwrap().hash()); - } - assert_eq!(blocks, actual_blocks); - } - - drop(guard); -} - -#[tokio::test] -async fn test_decoy_rpc() { - use monero_rpc::{Rpc, DecoyRpc}; - - let guard = SEQUENTIAL.lock().await; - - let rpc = - SimpleRequestRpc::new("http://serai:seraidex@127.0.0.1:18081".to_string()).await.unwrap(); - - // Ensure there's blocks on-chain - rpc - .generate_blocks(&MoneroAddress::from_str(Network::Mainnet, ADDRESS).unwrap(), 100) - .await - .unwrap(); - - // Test get_output_distribution - // Our documentation for our Rust fn defines it as taking two block numbers - { - let distribution_len = rpc.get_output_distribution_end_height().await.unwrap(); - assert_eq!(distribution_len, rpc.get_height().await.unwrap()); - - rpc.get_output_distribution(0 ..= distribution_len).await.unwrap_err(); - assert_eq!( - rpc.get_output_distribution(0 .. distribution_len).await.unwrap().len(), - distribution_len - ); - assert_eq!( - rpc.get_output_distribution(.. distribution_len).await.unwrap().len(), - distribution_len - ); - - assert_eq!( - rpc.get_output_distribution(.. (distribution_len - 1)).await.unwrap().len(), - distribution_len - 1 - ); - assert_eq!( - rpc.get_output_distribution(1 .. distribution_len).await.unwrap().len(), - distribution_len - 1 - ); - - assert_eq!(rpc.get_output_distribution(0 ..= 0).await.unwrap().len(), 1); - assert_eq!(rpc.get_output_distribution(0 ..= 1).await.unwrap().len(), 2); - assert_eq!(rpc.get_output_distribution(1 ..= 1).await.unwrap().len(), 1); - - rpc.get_output_distribution(0 .. 0).await.unwrap_err(); - #[allow(clippy::reversed_empty_ranges)] - rpc.get_output_distribution(1 .. 0).await.unwrap_err(); - } - - drop(guard); -} - -// This test passes yet requires a mainnet node, which we don't have reliable access to in CI. -/* -#[tokio::test] -async fn test_zero_out_tx_o_indexes() { - use monero_rpc::Rpc; - - let guard = SEQUENTIAL.lock().await; - - let rpc = SimpleRequestRpc::new("https://node.sethforprivacy.com".to_string()).await.unwrap(); - - assert_eq!( - rpc - .get_o_indexes( - hex::decode("17ce4c8feeb82a6d6adaa8a89724b32bf4456f6909c7f84c8ce3ee9ebba19163") - .unwrap() - .try_into() - .unwrap() - ) - .await - .unwrap(), - Vec::::new() - ); - - drop(guard); -} -*/ diff --git a/networks/monero/rpc/src/lib.rs b/networks/monero/rpc/src/lib.rs deleted file mode 100644 index d59ba821..00000000 --- a/networks/monero/rpc/src/lib.rs +++ /dev/null @@ -1,1310 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use core::{ - future::Future, - fmt::Debug, - ops::{Bound, RangeBounds}, -}; -use std_shims::{ - alloc::format, - vec, - vec::Vec, - io, - string::{String, ToString}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint}; - -use serde::{Serialize, Deserialize, de::DeserializeOwned}; -use serde_json::{Value, json}; - -use monero_serai::{ - io::*, - transaction::{Input, Timelock, Pruned, Transaction}, - block::Block, - DEFAULT_LOCK_WINDOW, -}; -use monero_address::Address; - -// Number of blocks the fee estimate will be valid for -// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c -// /src/wallet/wallet2.cpp#L121 -const GRACE_BLOCKS_FOR_FEE_ESTIMATE: u64 = 10; - -// Monero errors if more than 100 is requested unless using a non-restricted RPC -// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 -// /src/rpc/core_rpc_server.cpp#L75 -const TXS_PER_REQUEST: usize = 100; - -/// An error from the RPC. -#[derive(Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum RpcError { - /// An internal error. - #[cfg_attr(feature = "std", error("internal error ({0})"))] - InternalError(String), - /// A connection error with the node. - #[cfg_attr(feature = "std", error("connection error ({0})"))] - ConnectionError(String), - /// The node is invalid per the expected protocol. - #[cfg_attr(feature = "std", error("invalid node ({0})"))] - InvalidNode(String), - /// Requested transactions weren't found. - #[cfg_attr(feature = "std", error("transactions not found"))] - TransactionsNotFound(Vec<[u8; 32]>), - /// The transaction was pruned. - /// - /// Pruned transactions are not supported at this time. - #[cfg_attr(feature = "std", error("pruned transaction"))] - PrunedTransaction, - /// A transaction (sent or received) was invalid. - #[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))] - InvalidTransaction([u8; 32]), - /// The returned fee was unusable. - #[cfg_attr(feature = "std", error("unexpected fee response"))] - InvalidFee, - /// The priority intended for use wasn't usable. - #[cfg_attr(feature = "std", error("invalid priority"))] - InvalidPriority, -} - -/// A block which is able to be scanned. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct ScannableBlock { - /// The block which is being scanned. - pub block: Block, - /// The non-miner transactions within this block. - pub transactions: Vec>, - /// The output index for the first RingCT output within this block. - /// - /// None if there are no RingCT outputs within this block, Some otherwise. - pub output_index_for_first_ringct_output: Option, -} - -/// A struct containing a fee rate. -/// -/// The fee rate is defined as a per-weight cost, along with a mask for rounding purposes. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub struct FeeRate { - /// The fee per-weight of the transaction. - per_weight: u64, - /// The mask to round with. - mask: u64, -} - -impl FeeRate { - /// Construct a new fee rate. - pub fn new(per_weight: u64, mask: u64) -> Result { - if (per_weight == 0) || (mask == 0) { - Err(RpcError::InvalidFee)?; - } - Ok(FeeRate { per_weight, mask }) - } - - /// Write the FeeRate. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> { - w.write_all(&self.per_weight.to_le_bytes())?; - w.write_all(&self.mask.to_le_bytes()) - } - - /// Serialize the FeeRate to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(16); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read a FeeRate. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut impl io::Read) -> io::Result { - let per_weight = read_u64(r)?; - let mask = read_u64(r)?; - FeeRate::new(per_weight, mask).map_err(io::Error::other) - } - - /// Calculate the fee to use from the weight. - /// - /// This function may panic upon overflow. - pub fn calculate_fee_from_weight(&self, weight: usize) -> u64 { - let fee = - self.per_weight * u64::try_from(weight).expect("couldn't convert weight (usize) to u64"); - let fee = fee.div_ceil(self.mask) * self.mask; - debug_assert_eq!( - Some(weight), - self.calculate_weight_from_fee(fee), - "Miscalculated weight from fee" - ); - fee - } - - /// Calculate the weight from the fee. - /// - /// Returns `None` if the weight would not fit within a `usize`. - pub fn calculate_weight_from_fee(&self, fee: u64) -> Option { - usize::try_from(fee / self.per_weight).ok() - } -} - -/// The priority for the fee. -/// -/// Higher-priority transactions will be included in blocks earlier. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[allow(non_camel_case_types)] -pub enum FeePriority { - /// The `Unimportant` priority, as defined by Monero. - Unimportant, - /// The `Normal` priority, as defined by Monero. - Normal, - /// The `Elevated` priority, as defined by Monero. - Elevated, - /// The `Priority` priority, as defined by Monero. - Priority, - /// A custom priority. - Custom { - /// The numeric representation of the priority, as used within the RPC. - priority: u32, - }, -} - -/// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/ -/// src/simplewallet/simplewallet.cpp#L161 -impl FeePriority { - pub(crate) fn fee_priority(&self) -> u32 { - match self { - FeePriority::Unimportant => 1, - FeePriority::Normal => 2, - FeePriority::Elevated => 3, - FeePriority::Priority => 4, - FeePriority::Custom { priority, .. } => *priority, - } - } -} - -#[derive(Debug, Deserialize)] -struct JsonRpcResponse { - result: T, -} - -#[derive(Debug, Deserialize)] -struct TransactionResponse { - tx_hash: String, - as_hex: String, - pruned_as_hex: String, -} -#[derive(Debug, Deserialize)] -struct TransactionsResponse { - #[serde(default)] - missed_tx: Vec, - txs: Vec, -} - -/// The response to an query for the information of a RingCT output. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct OutputInformation { - /// The block number of the block this output was added to the chain in. - /// - /// This is equivalent to he height of the blockchain at the time the block was added. - pub height: usize, - /// If the output is unlocked, per the node's local view. - pub unlocked: bool, - /// The output's key. - /// - /// This is a CompressedEdwardsY, not an EdwardsPoint, as it may be invalid. CompressedEdwardsY - /// only asserts validity on decompression and allows representing compressed types. - pub key: CompressedEdwardsY, - /// The output's commitment. - pub commitment: EdwardsPoint, - /// The transaction which created this output. - pub transaction: [u8; 32], -} - -fn rpc_hex(value: &str) -> Result, RpcError> { - hex::decode(value).map_err(|_| RpcError::InvalidNode("expected hex wasn't hex".to_string())) -} - -fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> { - rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode("hash wasn't 32-bytes".to_string())) -} - -fn rpc_point(point: &str) -> Result { - decompress_point( - rpc_hex(point)? - .try_into() - .map_err(|_| RpcError::InvalidNode(format!("invalid point: {point}")))?, - ) - .ok_or_else(|| RpcError::InvalidNode(format!("invalid point: {point}"))) -} - -/// An RPC connection to a Monero daemon. -/// -/// This is abstract such that users can use an HTTP library (which being their choice), a -/// Tor/i2p-based transport, or even a memory buffer an external service somehow routes. -/// -/// While no implementors are directly provided, [monero-simple-request-rpc]( -/// https://github.com/serai-dex/serai/tree/develop/networks/monero/rpc/simple-request -/// ) is recommended. -pub trait Rpc: Sync + Clone { - /// Perform a POST request to the specified route with the specified body. - /// - /// The implementor is left to handle anything such as authentication. - fn post( - &self, - route: &str, - body: Vec, - ) -> impl Send + Future, RpcError>>; - - /// Perform a RPC call to the specified route with the provided parameters. - /// - /// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via - /// `json_rpc_call`. - fn rpc_call( - &self, - route: &str, - params: Option, - ) -> impl Send + Future> { - async move { - let res = self - .post( - route, - if let Some(params) = params.as_ref() { - serde_json::to_string(params) - .map_err(|e| { - RpcError::InternalError(format!( - "couldn't convert parameters ({params:?}) to JSON: {e:?}" - )) - })? - .into_bytes() - } else { - vec![] - }, - ) - .await?; - let res_str = std_shims::str::from_utf8(&res) - .map_err(|_| RpcError::InvalidNode("response wasn't utf-8".to_string()))?; - serde_json::from_str(res_str) - .map_err(|_| RpcError::InvalidNode(format!("response wasn't the expected json: {res_str}"))) - } - } - - /// Perform a JSON-RPC call with the specified method with the provided parameters. - fn json_rpc_call( - &self, - method: &str, - params: Option, - ) -> impl Send + Future> { - async move { - let mut req = json!({ "method": method }); - if let Some(params) = params { - req - .as_object_mut() - .expect("accessing object as object failed?") - .insert("params".into(), params); - } - Ok(self.rpc_call::<_, JsonRpcResponse>("json_rpc", Some(req)).await?.result) - } - } - - /// Perform a binary call to the specified route with the provided parameters. - fn bin_call( - &self, - route: &str, - params: Vec, - ) -> impl Send + Future, RpcError>> { - async move { self.post(route, params).await } - } - - /// Get the active blockchain protocol version. - /// - /// This is specifically the major version within the most recent block header. - fn get_hardfork_version(&self) -> impl Send + Future> { - async move { - #[derive(Debug, Deserialize)] - struct HeaderResponse { - major_version: u8, - } - - #[derive(Debug, Deserialize)] - struct LastHeaderResponse { - block_header: HeaderResponse, - } - - Ok( - self - .json_rpc_call::("get_last_block_header", None) - .await? - .block_header - .major_version, - ) - } - } - - /// Get the height of the Monero blockchain. - /// - /// The height is defined as the amount of blocks on the blockchain. For a blockchain with only - /// its genesis block, the height will be 1. - fn get_height(&self) -> impl Send + Future> { - async move { - #[derive(Debug, Deserialize)] - struct HeightResponse { - height: usize, - } - let res = self.rpc_call::, HeightResponse>("get_height", None).await?.height; - if res == 0 { - Err(RpcError::InvalidNode("node responded with 0 for the height".to_string()))?; - } - Ok(res) - } - } - - /// Get the specified transactions. - /// - /// The received transactions will be hashed in order to verify the correct transactions were - /// returned. - fn get_transactions( - &self, - hashes: &[[u8; 32]], - ) -> impl Send + Future, RpcError>> { - async move { - if hashes.is_empty() { - return Ok(vec![]); - } - - let mut hashes_hex = hashes.iter().map(hex::encode).collect::>(); - let mut all_txs = Vec::with_capacity(hashes.len()); - while !hashes_hex.is_empty() { - let this_count = TXS_PER_REQUEST.min(hashes_hex.len()); - - let txs: TransactionsResponse = self - .rpc_call( - "get_transactions", - Some(json!({ - "txs_hashes": hashes_hex.drain(.. this_count).collect::>(), - })), - ) - .await?; - - if !txs.missed_tx.is_empty() { - Err(RpcError::TransactionsNotFound( - txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::>()?, - ))?; - } - if txs.txs.len() != this_count { - Err(RpcError::InvalidNode( - "not missing any transactions yet didn't return all transactions".to_string(), - ))?; - } - - all_txs.extend(txs.txs); - } - - all_txs - .iter() - .enumerate() - .map(|(i, res)| { - // https://github.com/monero-project/monero/issues/8311 - let buf = rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?; - let mut buf = buf.as_slice(); - let tx = Transaction::read(&mut buf).map_err(|_| match hash_hex(&res.tx_hash) { - Ok(hash) => RpcError::InvalidTransaction(hash), - Err(err) => err, - })?; - if !buf.is_empty() { - Err(RpcError::InvalidNode("transaction had extra bytes after it".to_string()))?; - } - - // We check this to ensure we didn't read a pruned transaction when we meant to read an - // actual transaction. That shouldn't be possible, as they have different serializations, - // yet it helps to ensure that if we applied the above exception (using the pruned data), - // it was for the right reason - if res.as_hex.is_empty() { - match tx.prefix().inputs.first() { - Some(Input::Gen { .. }) => (), - _ => Err(RpcError::PrunedTransaction)?, - } - } - - // This does run a few keccak256 hashes, which is pointless if the node is trusted - // In exchange, this provides resilience against invalid/malicious nodes - if tx.hash() != hashes[i] { - Err(RpcError::InvalidNode( - "replied with transaction wasn't the requested transaction".to_string(), - ))?; - } - - Ok(tx) - }) - .collect() - } - } - - /// Get the specified transactions in their pruned format. - fn get_pruned_transactions( - &self, - hashes: &[[u8; 32]], - ) -> impl Send + Future>, RpcError>> { - async move { - if hashes.is_empty() { - return Ok(vec![]); - } - - let mut hashes_hex = hashes.iter().map(hex::encode).collect::>(); - let mut all_txs = Vec::with_capacity(hashes.len()); - while !hashes_hex.is_empty() { - let this_count = TXS_PER_REQUEST.min(hashes_hex.len()); - - let txs: TransactionsResponse = self - .rpc_call( - "get_transactions", - Some(json!({ - "txs_hashes": hashes_hex.drain(.. this_count).collect::>(), - "prune": true, - })), - ) - .await?; - - if !txs.missed_tx.is_empty() { - Err(RpcError::TransactionsNotFound( - txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::>()?, - ))?; - } - - all_txs.extend(txs.txs); - } - - all_txs - .iter() - .map(|res| { - let buf = rpc_hex(&res.pruned_as_hex)?; - let mut buf = buf.as_slice(); - let tx = - Transaction::::read(&mut buf).map_err(|_| match hash_hex(&res.tx_hash) { - Ok(hash) => RpcError::InvalidTransaction(hash), - Err(err) => err, - })?; - if !buf.is_empty() { - Err(RpcError::InvalidNode("pruned transaction had extra bytes after it".to_string()))?; - } - Ok(tx) - }) - .collect() - } - } - - /// Get the specified transaction. - /// - /// The received transaction will be hashed in order to verify the correct transaction was - /// returned. - fn get_transaction( - &self, - tx: [u8; 32], - ) -> impl Send + Future> { - async move { self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0)) } - } - - /// Get the specified transaction in its pruned format. - fn get_pruned_transaction( - &self, - tx: [u8; 32], - ) -> impl Send + Future, RpcError>> { - async move { self.get_pruned_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0)) } - } - - /// Get the hash of a block from the node. - /// - /// `number` is the block's zero-indexed position on the blockchain (`0` for the genesis block, - /// `height - 1` for the latest block). - fn get_block_hash( - &self, - number: usize, - ) -> impl Send + Future> { - async move { - #[derive(Debug, Deserialize)] - struct BlockHeaderResponse { - hash: String, - } - #[derive(Debug, Deserialize)] - struct BlockHeaderByHeightResponse { - block_header: BlockHeaderResponse, - } - - let header: BlockHeaderByHeightResponse = - self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?; - hash_hex(&header.block_header.hash) - } - } - - /// Get a block from the node by its hash. - /// - /// The received block will be hashed in order to verify the correct block was returned. - fn get_block(&self, hash: [u8; 32]) -> impl Send + Future> { - async move { - #[derive(Debug, Deserialize)] - struct BlockResponse { - blob: String, - } - - let res: BlockResponse = - self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?; - - let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()) - .map_err(|_| RpcError::InvalidNode("invalid block".to_string()))?; - if block.hash() != hash { - Err(RpcError::InvalidNode("different block than requested (hash)".to_string()))?; - } - Ok(block) - } - } - - /// Get a block from the node by its number. - /// - /// `number` is the block's zero-indexed position on the blockchain (`0` for the genesis block, - /// `height - 1` for the latest block). - fn get_block_by_number( - &self, - number: usize, - ) -> impl Send + Future> { - async move { - #[derive(Debug, Deserialize)] - struct BlockResponse { - blob: String, - } - - let res: BlockResponse = - self.json_rpc_call("get_block", Some(json!({ "height": number }))).await?; - - let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()) - .map_err(|_| RpcError::InvalidNode("invalid block".to_string()))?; - - // Make sure this is actually the block for this number - match block.miner_transaction.prefix().inputs.first() { - Some(Input::Gen(actual)) => { - if *actual == number { - Ok(block) - } else { - Err(RpcError::InvalidNode("different block than requested (number)".to_string())) - } - } - _ => Err(RpcError::InvalidNode( - "block's miner_transaction didn't have an input of kind Input::Gen".to_string(), - )), - } - } - } - - /// Get a block's scannable form. - fn get_scannable_block( - &self, - block: Block, - ) -> impl Send + Future> { - async move { - let transactions = self.get_pruned_transactions(&block.transactions).await?; - - /* - Requesting the output index for each output we sucessfully scan would cause a loss of - privacy. We could instead request the output indexes for all outputs we scan, yet this - would notably increase the amount of RPC calls we make. - - We solve this by requesting the output index for the first RingCT output in the block, which - should be within the miner transaction. Then, as we scan transactions, we update the output - index ourselves. - - Please note we only will scan RingCT outputs so we only need to track the RingCT output - index. This decision was made due to spending CN outputs potentially having burdensome - requirements (the need to make a v1 TX due to insufficient decoys). - - We bound ourselves to only scanning RingCT outputs by only scanning v2 transactions. This is - safe and correct since: - - 1) v1 transactions cannot create RingCT outputs. - - https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - /src/cryptonote_basic/cryptonote_format_utils.cpp#L866-L869 - - 2) v2 miner transactions implicitly create RingCT outputs. - - https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - /src/blockchain_db/blockchain_db.cpp#L232-L241 - - 3) v2 transactions must create RingCT outputs. - - https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45 - /src/cryptonote_core/blockchain.cpp#L3055-L3065 - - That does bound on the hard fork version being >= 3, yet all v2 TXs have a hard fork - version > 3. - - https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - /src/cryptonote_core/blockchain.cpp#L3417 - */ - - // Get the index for the first output - let mut output_index_for_first_ringct_output = None; - let miner_tx_hash = block.miner_transaction.hash(); - let miner_tx = Transaction::::from(block.miner_transaction.clone()); - for (hash, tx) in core::iter::once((&miner_tx_hash, &miner_tx)) - .chain(block.transactions.iter().zip(&transactions)) - { - // If this isn't a RingCT output, or there are no outputs, move to the next TX - if (!matches!(tx, Transaction::V2 { .. })) || tx.prefix().outputs.is_empty() { - continue; - } - - let index = *self.get_o_indexes(*hash).await?.first().ok_or_else(|| { - RpcError::InvalidNode( - "requested output indexes for a TX with outputs and got none".to_string(), - ) - })?; - output_index_for_first_ringct_output = Some(index); - break; - } - - Ok(ScannableBlock { block, transactions, output_index_for_first_ringct_output }) - } - } - - /// Get a block's scannable form by its hash. - // TODO: get_blocks.bin - fn get_scannable_block_by_hash( - &self, - hash: [u8; 32], - ) -> impl Send + Future> { - async move { self.get_scannable_block(self.get_block(hash).await?).await } - } - - /// Get a block's scannable form by its number. - // TODO: get_blocks_by_height.bin - fn get_scannable_block_by_number( - &self, - number: usize, - ) -> impl Send + Future> { - async move { self.get_scannable_block(self.get_block_by_number(number).await?).await } - } - - /// Get the currently estimated fee rate from the node. - /// - /// This may be manipulated to unsafe levels and MUST be sanity checked. - /// - /// This MUST NOT be expected to be deterministic in any way. - fn get_fee_rate( - &self, - priority: FeePriority, - ) -> impl Send + Future> { - async move { - #[derive(Debug, Deserialize)] - struct FeeResponse { - status: String, - fees: Option>, - fee: u64, - quantization_mask: u64, - } - - let res: FeeResponse = self - .json_rpc_call( - "get_fee_estimate", - Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })), - ) - .await?; - - if res.status != "OK" { - Err(RpcError::InvalidFee)?; - } - - if let Some(fees) = res.fees { - // https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/ - // src/wallet/wallet2.cpp#L7615-L7620 - let priority_idx = usize::try_from(if priority.fee_priority() >= 4 { - 3 - } else { - priority.fee_priority().saturating_sub(1) - }) - .map_err(|_| RpcError::InvalidPriority)?; - - if priority_idx >= fees.len() { - Err(RpcError::InvalidPriority) - } else { - FeeRate::new(fees[priority_idx], res.quantization_mask) - } - } else { - // https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/ - // src/wallet/wallet2.cpp#L7569-L7584 - // https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/ - // src/wallet/wallet2.cpp#L7660-L7661 - let priority_idx = usize::try_from(if priority.fee_priority() == 0 { - 1 - } else { - priority.fee_priority() - 1 - }) - .map_err(|_| RpcError::InvalidPriority)?; - let multipliers = [1, 5, 25, 1000]; - if priority_idx >= multipliers.len() { - // though not an RPC error, it seems sensible to treat as such - Err(RpcError::InvalidPriority)?; - } - let fee_multiplier = multipliers[priority_idx]; - - FeeRate::new(res.fee * fee_multiplier, res.quantization_mask) - } - } - } - - /// Publish a transaction. - fn publish_transaction( - &self, - tx: &Transaction, - ) -> impl Send + Future> { - async move { - #[allow(dead_code)] - #[derive(Debug, Deserialize)] - struct SendRawResponse { - status: String, - double_spend: bool, - fee_too_low: bool, - invalid_input: bool, - invalid_output: bool, - low_mixin: bool, - not_relayed: bool, - overspend: bool, - too_big: bool, - too_few_outputs: bool, - reason: String, - } - - let res: SendRawResponse = self - .rpc_call( - "send_raw_transaction", - Some(json!({ "tx_as_hex": hex::encode(tx.serialize()), "do_sanity_checks": false })), - ) - .await?; - - if res.status != "OK" { - Err(RpcError::InvalidTransaction(tx.hash()))?; - } - - Ok(()) - } - } - - /// Generate blocks, with the specified address receiving the block reward. - /// - /// Returns the hashes of the generated blocks and the last block's number. - fn generate_blocks( - &self, - address: &Address, - block_count: usize, - ) -> impl Send + Future, usize), RpcError>> { - async move { - #[derive(Debug, Deserialize)] - struct BlocksResponse { - blocks: Vec, - height: usize, - } - - let res = self - .json_rpc_call::( - "generateblocks", - Some(json!({ - "wallet_address": address.to_string(), - "amount_of_blocks": block_count - })), - ) - .await?; - - let mut blocks = Vec::with_capacity(res.blocks.len()); - for block in res.blocks { - blocks.push(hash_hex(&block)?); - } - Ok((blocks, res.height)) - } - } - - /// Get the output indexes of the specified transaction. - fn get_o_indexes( - &self, - hash: [u8; 32], - ) -> impl Send + Future, RpcError>> { - async move { - // Given the immaturity of Rust epee libraries, this is a homegrown one which is only - // validated to work against this specific function - - // Header for EPEE, an 8-byte magic and a version - const EPEE_HEADER: &[u8] = b"\x01\x11\x01\x01\x01\x01\x02\x01\x01"; - - // Read an EPEE VarInt, distinct from the VarInts used throughout the rest of the protocol - fn read_epee_vi(reader: &mut R) -> io::Result { - let vi_start = read_byte(reader)?; - let len = match vi_start & 0b11 { - 0 => 1, - 1 => 2, - 2 => 4, - 3 => 8, - _ => unreachable!(), - }; - let mut vi = u64::from(vi_start >> 2); - for i in 1 .. len { - vi |= u64::from(read_byte(reader)?) << (((i - 1) * 8) + 6); - } - Ok(vi) - } - - let mut request = EPEE_HEADER.to_vec(); - // Number of fields (shifted over 2 bits as the 2 LSBs are reserved for metadata) - request.push(1 << 2); - // Length of field name - request.push(4); - // Field name - request.extend(b"txid"); - // Type of field - request.push(10); - // Length of string, since this byte array is technically a string - request.push(32 << 2); - // The "string" - request.extend(hash); - - let indexes_buf = self.bin_call("get_o_indexes.bin", request).await?; - let mut indexes: &[u8] = indexes_buf.as_ref(); - - (|| { - let mut res = None; - let mut has_status = false; - - if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER { - Err(io::Error::other("invalid header"))?; - } - - let read_object = |reader: &mut &[u8]| -> io::Result> { - // Read the amount of fields - let fields = read_byte(reader)? >> 2; - - for _ in 0 .. fields { - // Read the length of the field's name - let name_len = read_byte(reader)?; - // Read the name of the field - let name = read_raw_vec(read_byte, name_len.into(), reader)?; - - let type_with_array_flag = read_byte(reader)?; - // The type of this field, without the potentially set array flag - let kind = type_with_array_flag & (!0x80); - let has_array_flag = type_with_array_flag != kind; - - // Read this many instances of the field - let iters = if has_array_flag { read_epee_vi(reader)? } else { 1 }; - - // Check the field type - { - #[allow(clippy::match_same_arms)] - let (expected_type, expected_array_flag) = match name.as_slice() { - b"o_indexes" => (5, true), - b"status" => (10, false), - b"untrusted" => (11, false), - b"credits" => (5, false), - b"top_hash" => (10, false), - // On-purposely prints name as a byte vector to prevent printing arbitrary strings - // This is a self-describing format so we don't have to error here, yet we don't - // claim this to be a complete deserialization function - // To ensure it works for this specific use case, it's best to ensure it's limited - // to this specific use case (ensuring we have less variables to deal with) - _ => { - Err(io::Error::other(format!("unrecognized field in get_o_indexes: {name:?}")))? - } - }; - if (expected_type != kind) || (expected_array_flag != has_array_flag) { - let fmt_array_bool = |array_bool| if array_bool { "array" } else { "not array" }; - Err(io::Error::other(format!( - "field {name:?} was {kind} ({}), expected {expected_type} ({})", - fmt_array_bool(has_array_flag), - fmt_array_bool(expected_array_flag) - )))?; - } - } - - let read_field_as_bytes = match kind { - /* - // i64 - 1 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader), - // i32 - 2 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader), - // i16 - 3 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader), - // i8 - 4 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader), - */ - // u64 - 5 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader), - /* - // u32 - 6 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader), - // u16 - 7 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader), - // u8 - 8 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader), - // double - 9 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader), - */ - // string, or any collection of bytes - 10 => |reader: &mut &[u8]| { - let len = read_epee_vi(reader)?; - read_raw_vec( - read_byte, - len.try_into().map_err(|_| io::Error::other("u64 length exceeded usize"))?, - reader, - ) - }, - // bool - 11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader), - /* - // object, errors here as it shouldn't be used on this call - 12 => { - |_: &mut &[u8]| Err(io::Error::other("node used object in reply to get_o_indexes")) - } - // array, so far unused - 13 => |_: &mut &[u8]| Err(io::Error::other("node used the unused array type")), - */ - _ => |_: &mut &[u8]| Err(io::Error::other("node used an invalid type")), - }; - - let mut bytes_res = vec![]; - for _ in 0 .. iters { - bytes_res.push(read_field_as_bytes(reader)?); - } - - let mut actual_res = Vec::with_capacity(bytes_res.len()); - match name.as_slice() { - b"o_indexes" => { - for o_index in bytes_res { - actual_res.push(read_u64(&mut o_index.as_slice())?); - } - res = Some(actual_res); - } - b"status" => { - if bytes_res - .first() - .ok_or_else(|| io::Error::other("status was a 0-length array"))? - .as_slice() != - b"OK" - { - Err(io::Error::other("response wasn't OK"))?; - } - has_status = true; - } - b"untrusted" | b"credits" | b"top_hash" => continue, - _ => Err(io::Error::other("unrecognized field in get_o_indexes"))?, - } - } - - if !has_status { - Err(io::Error::other("response didn't contain a status"))?; - } - - // If the Vec was empty, it would've been omitted, hence the unwrap_or - Ok(res.unwrap_or(vec![])) - }; - - read_object(&mut indexes) - })() - .map_err(|e| RpcError::InvalidNode(format!("invalid binary response: {e:?}"))) - } - } -} - -/// A trait for any object which can be used to select RingCT decoys. -/// -/// An implementation is provided for any satisfier of `Rpc`. It is not recommended to use an `Rpc` -/// object to satisfy this. This should be satisfied by a local store of the output distribution, -/// both for performance and to prevent potential attacks a remote node can perform. -pub trait DecoyRpc: Sync { - /// Get the height the output distribution ends at. - /// - /// This is equivalent to the height of the blockchain it's for. This is intended to be cheaper - /// than fetching the entire output distribution. - fn get_output_distribution_end_height( - &self, - ) -> impl Send + Future>; - - /// Get the RingCT (zero-amount) output distribution. - /// - /// `range` is in terms of block numbers. The result may be smaller than the requested range if - /// the range starts before RingCT outputs were created on-chain. - fn get_output_distribution( - &self, - range: impl Send + RangeBounds, - ) -> impl Send + Future, RpcError>>; - - /// Get the specified outputs from the RingCT (zero-amount) pool. - fn get_outs( - &self, - indexes: &[u64], - ) -> impl Send + Future, RpcError>>; - - /// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their - /// timelock has been satisfied. - /// - /// The timelock being satisfied is distinct from being free of the 10-block lock applied to all - /// Monero transactions. - /// - /// The node is trusted for if the output is unlocked unless `fingerprintable_deterministic` is - /// set to true. If `fingerprintable_deterministic` is set to true, the node's local view isn't - /// used, yet the transaction's timelock is checked to be unlocked at the specified `height`. - /// This offers a deterministic decoy selection, yet is fingerprintable as time-based timelocks - /// aren't evaluated (and considered locked, preventing their selection). - fn get_unlocked_outputs( - &self, - indexes: &[u64], - height: usize, - fingerprintable_deterministic: bool, - ) -> impl Send + Future>, RpcError>>; -} - -impl DecoyRpc for R { - fn get_output_distribution_end_height( - &self, - ) -> impl Send + Future> { - async move { ::get_height(self).await } - } - - fn get_output_distribution( - &self, - range: impl Send + RangeBounds, - ) -> impl Send + Future, RpcError>> { - async move { - #[derive(Default, Debug, Deserialize)] - struct Distribution { - distribution: Vec, - // A blockchain with just its genesis block has a height of 1 - start_height: usize, - } - - #[derive(Debug, Deserialize)] - struct Distributions { - distributions: [Distribution; 1], - status: String, - } - - let from = match range.start_bound() { - Bound::Included(from) => *from, - Bound::Excluded(from) => from.checked_add(1).ok_or_else(|| { - RpcError::InternalError("range's from wasn't representable".to_string()) - })?, - Bound::Unbounded => 0, - }; - let to = match range.end_bound() { - Bound::Included(to) => *to, - Bound::Excluded(to) => to - .checked_sub(1) - .ok_or_else(|| RpcError::InternalError("range's to wasn't representable".to_string()))?, - Bound::Unbounded => self.get_height().await? - 1, - }; - if from > to { - Err(RpcError::InternalError(format!( - "malformed range: inclusive start {from}, inclusive end {to}" - )))?; - } - - let zero_zero_case = (from == 0) && (to == 0); - let distributions: Distributions = self - .json_rpc_call( - "get_output_distribution", - Some(json!({ - "binary": false, - "amounts": [0], - "cumulative": true, - // These are actually block numbers, not heights - "from_height": from, - "to_height": if zero_zero_case { 1 } else { to }, - })), - ) - .await?; - - if distributions.status != "OK" { - Err(RpcError::ConnectionError( - "node couldn't service this request for the output distribution".to_string(), - ))?; - } - - let mut distributions = distributions.distributions; - let Distribution { start_height, mut distribution } = core::mem::take(&mut distributions[0]); - // start_height is also actually a block number, and it should be at least `from` - // It may be after depending on when these outputs first appeared on the blockchain - // Unfortunately, we can't validate without a binary search to find the RingCT activation - // block and an iterative search from there, so we solely sanity check it - if start_height < from { - Err(RpcError::InvalidNode(format!( - "requested distribution from {from} and got from {start_height}" - )))?; - } - // It shouldn't be after `to` though - if start_height > to { - Err(RpcError::InvalidNode(format!( - "requested distribution to {to} and got from {start_height}" - )))?; - } - - let expected_len = if zero_zero_case { - 2 - } else { - (to - start_height).checked_add(1).ok_or_else(|| { - RpcError::InternalError("expected length of distribution exceeded usize".to_string()) - })? - }; - // Yet this is actually a height - if expected_len != distribution.len() { - Err(RpcError::InvalidNode(format!( - "distribution length ({}) wasn't of the requested length ({})", - distribution.len(), - expected_len - )))?; - } - // Requesting to = 0 returns the distribution for the entire chain - // We work around this by requesting 0, 1 (yielding two blocks), then popping the second - // block - if zero_zero_case { - distribution.pop(); - } - - // Check the distribution monotonically increases - { - let mut monotonic = 0; - for d in &distribution { - if *d < monotonic { - Err(RpcError::InvalidNode( - "received output distribution didn't increase monotonically".to_string(), - ))?; - } - monotonic = *d; - } - } - - Ok(distribution) - } - } - - fn get_outs( - &self, - indexes: &[u64], - ) -> impl Send + Future, RpcError>> { - async move { - #[derive(Debug, Deserialize)] - struct OutputResponse { - height: usize, - unlocked: bool, - key: String, - mask: String, - txid: String, - } - - #[derive(Debug, Deserialize)] - struct OutsResponse { - status: String, - outs: Vec, - } - - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/rpc/core_rpc_server.cpp#L67 - const MAX_OUTS: usize = 5000; - - let mut res = Vec::with_capacity(indexes.len()); - for indexes in indexes.chunks(MAX_OUTS) { - let rpc_res: OutsResponse = self - .rpc_call( - "get_outs", - Some(json!({ - "get_txid": true, - "outputs": indexes.iter().map(|o| json!({ - "amount": 0, - "index": o - })).collect::>() - })), - ) - .await?; - - if rpc_res.status != "OK" { - Err(RpcError::InvalidNode("bad response to get_outs".to_string()))?; - } - - res.extend( - rpc_res - .outs - .into_iter() - .map(|output| { - Ok(OutputInformation { - height: output.height, - unlocked: output.unlocked, - key: CompressedEdwardsY( - rpc_hex(&output.key)? - .try_into() - .map_err(|_| RpcError::InvalidNode("output key wasn't 32 bytes".to_string()))?, - ), - commitment: rpc_point(&output.mask)?, - transaction: hash_hex(&output.txid)?, - }) - }) - .collect::, RpcError>>()?, - ); - } - - Ok(res) - } - } - - fn get_unlocked_outputs( - &self, - indexes: &[u64], - height: usize, - fingerprintable_deterministic: bool, - ) -> impl Send + Future>, RpcError>> { - async move { - let outs = self.get_outs(indexes).await?; - - // Only need to fetch txs to do deterministic check on timelock - let txs = if fingerprintable_deterministic { - self.get_transactions(&outs.iter().map(|out| out.transaction).collect::>()).await? - } else { - vec![] - }; - - // TODO: https://github.com/serai-dex/serai/issues/104 - outs - .iter() - .enumerate() - .map(|(i, out)| { - // Allow keys to be invalid, though if they are, return None to trigger selection of a - // new decoy - // Only valid keys can be used in CLSAG proofs, hence the need for re-selection, yet - // invalid keys may honestly exist on the blockchain - let Some(key) = out.key.decompress() else { - return Ok(None); - }; - Ok(Some([key, out.commitment]).filter(|_| { - if fingerprintable_deterministic { - // https://github.com/monero-project/monero/blob - // /cc73fe71162d564ffda8e549b79a350bca53c454/src/cryptonote_core - // /blockchain.cpp#L90 - const ACCEPTED_TIMELOCK_DELTA: usize = 1; - - // https://github.com/monero-project/monero/blob - // /cc73fe71162d564ffda8e549b79a350bca53c454/src/cryptonote_core - // /blockchain.cpp#L3836 - out.height.checked_add(DEFAULT_LOCK_WINDOW).is_some_and(|locked| locked <= height) && - (Timelock::Block(height.wrapping_add(ACCEPTED_TIMELOCK_DELTA - 1)) >= - txs[i].prefix().additional_timelock) - } else { - out.unlocked - } - })) - }) - .collect() - } - } -} diff --git a/networks/monero/src/block.rs b/networks/monero/src/block.rs deleted file mode 100644 index 5ab85cc7..00000000 --- a/networks/monero/src/block.rs +++ /dev/null @@ -1,165 +0,0 @@ -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use crate::{ - io::*, - primitives::keccak256, - merkle::merkle_root, - transaction::{Input, Transaction}, -}; - -const CORRECT_BLOCK_HASH_202612: [u8; 32] = - hex_literal::hex!("426d16cff04c71f8b16340b722dc4010a2dd3831c22041431f772547ba6e331a"); -const EXISTING_BLOCK_HASH_202612: [u8; 32] = - hex_literal::hex!("bbd604d2ba11ba27935e006ed39c9bfdd99b76bf4a50654bc1e1e61217962698"); - -/// A Monero block's header. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct BlockHeader { - /// The hard fork of the protocol this block follows. - /// - /// Per the C++ codebase, this is the `major_version`. - pub hardfork_version: u8, - /// A signal for a proposed hard fork. - /// - /// Per the C++ codebase, this is the `minor_version`. - pub hardfork_signal: u8, - /// Seconds since the epoch. - pub timestamp: u64, - /// The previous block's hash. - pub previous: [u8; 32], - /// The nonce used to mine the block. - /// - /// Miners should increment this while attempting to find a block with a hash satisfying the PoW - /// rules. - pub nonce: u32, -} - -impl BlockHeader { - /// Write the BlockHeader. - pub fn write(&self, w: &mut W) -> io::Result<()> { - write_varint(&self.hardfork_version, w)?; - write_varint(&self.hardfork_signal, w)?; - write_varint(&self.timestamp, w)?; - w.write_all(&self.previous)?; - w.write_all(&self.nonce.to_le_bytes()) - } - - /// Serialize the BlockHeader to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut serialized = vec![]; - self.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - /// Read a BlockHeader. - pub fn read(r: &mut R) -> io::Result { - Ok(BlockHeader { - hardfork_version: read_varint(r)?, - hardfork_signal: read_varint(r)?, - timestamp: read_varint(r)?, - previous: read_bytes(r)?, - nonce: read_bytes(r).map(u32::from_le_bytes)?, - }) - } -} - -/// A Monero block. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Block { - /// The block's header. - pub header: BlockHeader, - /// The miner's transaction. - pub miner_transaction: Transaction, - /// The transactions within this block. - pub transactions: Vec<[u8; 32]>, -} - -impl Block { - /// The zero-indexed position of this block within the blockchain. - /// - /// This information comes from the Block's miner transaction. If the miner transaction isn't - /// structed as expected, this will return None. This will return Some for any Block which would - /// pass the consensus rules. - // https://github.com/monero-project/monero/blob/a1dc85c5373a30f14aaf7dcfdd95f5a7375d3623 - // /src/cryptonote_core/blockchain.cpp#L1365-L1382 - pub fn number(&self) -> Option { - match &self.miner_transaction { - Transaction::V1 { prefix, .. } | Transaction::V2 { prefix, .. } => { - match prefix.inputs.first() { - Some(Input::Gen(number)) => Some(*number), - _ => None, - } - } - } - } - - /// Write the Block. - pub fn write(&self, w: &mut W) -> io::Result<()> { - self.header.write(w)?; - self.miner_transaction.write(w)?; - write_varint(&self.transactions.len(), w)?; - for tx in &self.transactions { - w.write_all(tx)?; - } - Ok(()) - } - - /// Serialize the Block to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut serialized = vec![]; - self.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - /// Serialize the block as required for the proof of work hash. - /// - /// This is distinct from the serialization required for the block hash. To get the block hash, - /// use the [`Block::hash`] function. - pub fn serialize_pow_hash(&self) -> Vec { - let mut blob = self.header.serialize(); - blob.extend_from_slice(&merkle_root(self.miner_transaction.hash(), &self.transactions)); - write_varint( - &(1 + - u64::try_from(self.transactions.len()) - .expect("amount of transactions in block exceeded u64::MAX")), - &mut blob, - ) - .expect("write failed but doesn't fail"); - blob - } - - /// Get the hash of this block. - pub fn hash(&self) -> [u8; 32] { - let mut hashable = self.serialize_pow_hash(); - // Monero pre-appends a VarInt of the block-to-hash'ss length before getting the block hash, - // but doesn't do this when getting the proof of work hash :) - let mut hashing_blob = Vec::with_capacity(9 + hashable.len()); - write_varint( - &u64::try_from(hashable.len()).expect("length of block hash's preimage exceeded u64::MAX"), - &mut hashing_blob, - ) - .expect("write failed but doesn't fail"); - hashing_blob.append(&mut hashable); - - let hash = keccak256(hashing_blob); - if hash == CORRECT_BLOCK_HASH_202612 { - return EXISTING_BLOCK_HASH_202612; - }; - hash - } - - /// Read a Block. - pub fn read(r: &mut R) -> io::Result { - Ok(Block { - header: BlockHeader::read(r)?, - miner_transaction: Transaction::read(r)?, - transactions: (0_usize .. read_varint(r)?) - .map(|_| read_bytes(r)) - .collect::>()?, - }) - } -} diff --git a/networks/monero/src/lib.rs b/networks/monero/src/lib.rs deleted file mode 100644 index 2eaa63e1..00000000 --- a/networks/monero/src/lib.rs +++ /dev/null @@ -1,39 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use monero_io as io; -pub use monero_generators as generators; -pub use monero_primitives as primitives; - -mod merkle; - -/// Ring Signature structs and functionality. -pub mod ring_signatures; - -/// RingCT structs and functionality. -pub mod ringct; - -/// Transaction structs and functionality. -pub mod transaction; -/// Block structs and functionality. -pub mod block; - -#[cfg(test)] -mod tests; - -/// The minimum amount of blocks an output is locked for. -/// -/// If Monero suffered a re-organization, any transactions which selected decoys belonging to -/// recent blocks would become invalidated. Accordingly, transactions must use decoys which are -/// presumed to not be invalidated in the future. If wallets only selected n-block-old outputs as -/// decoys, then any ring member within the past n blocks would have to be the real spend. -/// Preventing this at the consensus layer ensures privacy and integrity. -pub const DEFAULT_LOCK_WINDOW: usize = 10; - -/// The minimum amount of blocks a coinbase output is locked for. -pub const COINBASE_LOCK_WINDOW: usize = 60; - -/// Monero's block time target, in seconds. -pub const BLOCK_TIME: usize = 120; diff --git a/networks/monero/src/merkle.rs b/networks/monero/src/merkle.rs deleted file mode 100644 index 2be31df2..00000000 --- a/networks/monero/src/merkle.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std_shims::vec::Vec; - -use crate::primitives::keccak256; - -pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] { - match leafs.len() { - 0 => root, - 1 => keccak256([root, leafs[0]].concat()), - _ => { - let mut hashes = Vec::with_capacity(1 + leafs.len()); - hashes.push(root); - hashes.extend(leafs); - - // Monero preprocess this so the length is a power of 2 - let mut high_pow_2 = 4; // 4 is the lowest value this can be - while high_pow_2 < hashes.len() { - high_pow_2 *= 2; - } - let low_pow_2 = high_pow_2 / 2; - - // Merge right-most hashes until we're at the low_pow_2 - { - let overage = hashes.len() - low_pow_2; - let mut rightmost = hashes.drain((low_pow_2 - overage) ..); - // This is true since we took overage from beneath and above low_pow_2, taking twice as - // many elements as overage - debug_assert_eq!(rightmost.len() % 2, 0); - - let mut paired_hashes = Vec::with_capacity(overage); - while let Some(left) = rightmost.next() { - let right = rightmost.next().expect("rightmost is of even length"); - paired_hashes.push(keccak256([left.as_ref(), &right].concat())); - } - drop(rightmost); - - hashes.extend(paired_hashes); - assert_eq!(hashes.len(), low_pow_2); - } - - // Do a traditional pairing off - let mut new_hashes = Vec::with_capacity(hashes.len() / 2); - while hashes.len() > 1 { - let mut i = 0; - while i < hashes.len() { - new_hashes.push(keccak256([hashes[i], hashes[i + 1]].concat())); - i += 2; - } - - hashes = new_hashes; - new_hashes = Vec::with_capacity(hashes.len() / 2); - } - hashes[0] - } - } -} diff --git a/networks/monero/src/ring_signatures.rs b/networks/monero/src/ring_signatures.rs deleted file mode 100644 index c76d8d89..00000000 --- a/networks/monero/src/ring_signatures.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std_shims::{ - io::{self, *}, - vec::Vec, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::{EdwardsPoint, Scalar}; - -use crate::{io::*, generators::hash_to_point, primitives::keccak256_to_scalar}; - -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub(crate) struct Signature { - #[cfg(test)] - pub(crate) c: Scalar, - #[cfg(test)] - pub(crate) s: Scalar, - #[cfg(not(test))] - c: Scalar, - #[cfg(not(test))] - s: Scalar, -} - -impl Signature { - fn write(&self, w: &mut W) -> io::Result<()> { - write_scalar(&self.c, w)?; - write_scalar(&self.s, w)?; - Ok(()) - } - - fn read(r: &mut R) -> io::Result { - Ok(Signature { c: read_scalar(r)?, s: read_scalar(r)? }) - } -} - -/// A ring signature. -/// -/// This was used by the original Cryptonote transaction protocol and was deprecated with RingCT. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct RingSignature { - #[cfg(test)] - pub(crate) sigs: Vec, - #[cfg(not(test))] - sigs: Vec, -} - -impl RingSignature { - /// Write the RingSignature. - pub fn write(&self, w: &mut W) -> io::Result<()> { - for sig in &self.sigs { - sig.write(w)?; - } - Ok(()) - } - - /// Read a RingSignature. - pub fn read(members: usize, r: &mut R) -> io::Result { - Ok(RingSignature { sigs: read_raw_vec(Signature::read, members, r)? }) - } - - /// Verify the ring signature. - pub fn verify(&self, msg: &[u8; 32], ring: &[EdwardsPoint], key_image: &EdwardsPoint) -> bool { - if ring.len() != self.sigs.len() { - return false; - } - - let mut buf = Vec::with_capacity(32 + (2 * 32 * ring.len())); - buf.extend_from_slice(msg); - - let mut sum = Scalar::ZERO; - for (ring_member, sig) in ring.iter().zip(&self.sigs) { - /* - The traditional Schnorr signature is: - r = sample() - c = H(r G || m) - s = r - c x - Verified as: - s G + c A == R - - Each ring member here performs a dual-Schnorr signature for: - s G + c A - s HtP(A) + c K - Where the transcript is pushed both these values, r G, r HtP(A) for the real spend. - This also serves as a DLEq proof between the key and the key image. - - Checking sum(c) == H(transcript) acts a disjunction, where any one of the `c`s can be - modified to cause the intended sum, if and only if a corresponding `s` value is known. - */ - - #[allow(non_snake_case)] - let Li = EdwardsPoint::vartime_double_scalar_mul_basepoint(&sig.c, ring_member, &sig.s); - buf.extend_from_slice(Li.compress().as_bytes()); - #[allow(non_snake_case)] - let Ri = (sig.s * hash_to_point(ring_member.compress().to_bytes())) + (sig.c * key_image); - buf.extend_from_slice(Ri.compress().as_bytes()); - - sum += sig.c; - } - sum == keccak256_to_scalar(buf) - } -} diff --git a/networks/monero/src/ringct.rs b/networks/monero/src/ringct.rs deleted file mode 100644 index 220f289d..00000000 --- a/networks/monero/src/ringct.rs +++ /dev/null @@ -1,478 +0,0 @@ -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::edwards::EdwardsPoint; - -pub use monero_mlsag as mlsag; -pub use monero_clsag as clsag; -pub use monero_borromean as borromean; -pub use monero_bulletproofs as bulletproofs; - -use crate::{ - io::*, - ringct::{mlsag::Mlsag, clsag::Clsag, borromean::BorromeanRange, bulletproofs::Bulletproof}, -}; - -/// An encrypted amount. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum EncryptedAmount { - /// The original format for encrypted amounts. - Original { - /// A mask used with a mask derived from the shared secret to encrypt the amount. - mask: [u8; 32], - /// The amount, as a scalar, encrypted. - amount: [u8; 32], - }, - /// The "compact" format for encrypted amounts. - Compact { - /// The amount, as a u64, encrypted. - amount: [u8; 8], - }, -} - -impl EncryptedAmount { - /// Read an EncryptedAmount from a reader. - pub fn read(compact: bool, r: &mut R) -> io::Result { - Ok(if !compact { - EncryptedAmount::Original { mask: read_bytes(r)?, amount: read_bytes(r)? } - } else { - EncryptedAmount::Compact { amount: read_bytes(r)? } - }) - } - - /// Write the EncryptedAmount to a writer. - pub fn write(&self, w: &mut W) -> io::Result<()> { - match self { - EncryptedAmount::Original { mask, amount } => { - w.write_all(mask)?; - w.write_all(amount) - } - EncryptedAmount::Compact { amount } => w.write_all(amount), - } - } -} - -/// The type of the RingCT data. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub enum RctType { - /// One MLSAG for multiple inputs and Borromean range proofs. - /// - /// This aligns with RCTTypeFull. - AggregateMlsagBorromean, - // One MLSAG for each input and a Borromean range proof. - /// - /// This aligns with RCTTypeSimple. - MlsagBorromean, - // One MLSAG for each input and a Bulletproof. - /// - /// This aligns with RCTTypeBulletproof. - MlsagBulletproofs, - /// One MLSAG for each input and a Bulletproof, yet using EncryptedAmount::Compact. - /// - /// This aligns with RCTTypeBulletproof2. - MlsagBulletproofsCompactAmount, - /// One CLSAG for each input and a Bulletproof. - /// - /// This aligns with RCTTypeCLSAG. - ClsagBulletproof, - /// One CLSAG for each input and a Bulletproof+. - /// - /// This aligns with RCTTypeBulletproofPlus. - ClsagBulletproofPlus, -} - -impl From for u8 { - fn from(rct_type: RctType) -> u8 { - match rct_type { - RctType::AggregateMlsagBorromean => 1, - RctType::MlsagBorromean => 2, - RctType::MlsagBulletproofs => 3, - RctType::MlsagBulletproofsCompactAmount => 4, - RctType::ClsagBulletproof => 5, - RctType::ClsagBulletproofPlus => 6, - } - } -} - -impl TryFrom for RctType { - type Error = (); - fn try_from(byte: u8) -> Result { - Ok(match byte { - 1 => RctType::AggregateMlsagBorromean, - 2 => RctType::MlsagBorromean, - 3 => RctType::MlsagBulletproofs, - 4 => RctType::MlsagBulletproofsCompactAmount, - 5 => RctType::ClsagBulletproof, - 6 => RctType::ClsagBulletproofPlus, - _ => Err(())?, - }) - } -} - -impl RctType { - /// True if this RctType uses compact encrypted amounts, false otherwise. - pub fn compact_encrypted_amounts(&self) -> bool { - match self { - RctType::AggregateMlsagBorromean | RctType::MlsagBorromean | RctType::MlsagBulletproofs => { - false - } - RctType::MlsagBulletproofsCompactAmount | - RctType::ClsagBulletproof | - RctType::ClsagBulletproofPlus => true, - } - } - - /// True if this RctType uses a Bulletproof, false otherwise. - pub(crate) fn bulletproof(&self) -> bool { - match self { - RctType::MlsagBulletproofs | - RctType::MlsagBulletproofsCompactAmount | - RctType::ClsagBulletproof => true, - RctType::AggregateMlsagBorromean | - RctType::MlsagBorromean | - RctType::ClsagBulletproofPlus => false, - } - } - - /// True if this RctType uses a Bulletproof+, false otherwise. - pub(crate) fn bulletproof_plus(&self) -> bool { - match self { - RctType::ClsagBulletproofPlus => true, - RctType::AggregateMlsagBorromean | - RctType::MlsagBorromean | - RctType::MlsagBulletproofs | - RctType::MlsagBulletproofsCompactAmount | - RctType::ClsagBulletproof => false, - } - } -} - -/// The base of the RingCT data. -/// -/// This excludes all proofs (which once initially verified do not need to be kept around) and -/// solely keeps data which either impacts the effects of the transactions or is needed to scan it. -/// -/// The one exception for this is `pseudo_outs`, which was originally present here yet moved to -/// RctPrunable in a later hard fork (causing it to be present in both). -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct RctBase { - /// The fee used by this transaction. - pub fee: u64, - /// The re-randomized amount commitments used within inputs. - /// - /// This field was deprecated and is empty for modern RctTypes. - pub pseudo_outs: Vec, - /// The encrypted amounts for the recipients to decrypt. - pub encrypted_amounts: Vec, - /// The output commitments. - pub commitments: Vec, -} - -impl RctBase { - /// Write the RctBase. - pub fn write(&self, w: &mut W, rct_type: RctType) -> io::Result<()> { - w.write_all(&[u8::from(rct_type)])?; - - write_varint(&self.fee, w)?; - if rct_type == RctType::MlsagBorromean { - write_raw_vec(write_point, &self.pseudo_outs, w)?; - } - for encrypted_amount in &self.encrypted_amounts { - encrypted_amount.write(w)?; - } - write_raw_vec(write_point, &self.commitments, w) - } - - /// Read a RctBase. - pub fn read( - inputs: usize, - outputs: usize, - r: &mut R, - ) -> io::Result> { - let rct_type = read_byte(r)?; - if rct_type == 0 { - return Ok(None); - } - let rct_type = - RctType::try_from(rct_type).map_err(|()| io::Error::other("invalid RCT type"))?; - - match rct_type { - RctType::AggregateMlsagBorromean | RctType::MlsagBorromean => {} - RctType::MlsagBulletproofs | - RctType::MlsagBulletproofsCompactAmount | - RctType::ClsagBulletproof | - RctType::ClsagBulletproofPlus => { - if outputs == 0 { - // Because the Bulletproofs(+) layout must be canonical, there must be 1 Bulletproof if - // Bulletproofs are in use - // If there are Bulletproofs, there must be a matching amount of outputs, implicitly - // banning 0 outputs - // Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced - Err(io::Error::other("RCT with Bulletproofs(+) had 0 outputs"))?; - } - } - } - - Ok(Some(( - rct_type, - RctBase { - fee: read_varint(r)?, - // Only read pseudo_outs if they have yet to be moved to RctPrunable - // This would apply to AggregateMlsagBorromean and MlsagBorromean, except - // AggregateMlsagBorromean doesn't use pseudo_outs due to using the sum of the output - // commitments directly as the effective singular pseudo-out - pseudo_outs: if rct_type == RctType::MlsagBorromean { - read_raw_vec(read_point, inputs, r)? - } else { - vec![] - }, - encrypted_amounts: (0 .. outputs) - .map(|_| EncryptedAmount::read(rct_type.compact_encrypted_amounts(), r)) - .collect::>()?, - commitments: read_raw_vec(read_point, outputs, r)?, - }, - ))) - } -} - -/// The prunable part of the RingCT data. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum RctPrunable { - /// An aggregate MLSAG with Borromean range proofs. - AggregateMlsagBorromean { - /// The aggregate MLSAG ring signature. - mlsag: Mlsag, - /// The Borromean range proofs for each output. - borromean: Vec, - }, - /// MLSAGs with Borromean range proofs. - MlsagBorromean { - /// The MLSAG ring signatures for each input. - mlsags: Vec, - /// The Borromean range proofs for each output. - borromean: Vec, - }, - /// MLSAGs with Bulletproofs. - MlsagBulletproofs { - /// The MLSAG ring signatures for each input. - mlsags: Vec, - /// The re-blinded commitments for the outputs being spent. - pseudo_outs: Vec, - /// The aggregate Bulletproof, proving the outputs are within range. - bulletproof: Bulletproof, - }, - /// MLSAGs with Bulletproofs and compact encrypted amounts. - /// - /// This has an identical layout to MlsagBulletproofs and is interpreted the exact same way. It's - /// only differentiated to ensure discovery of the correct RctType. - MlsagBulletproofsCompactAmount { - /// The MLSAG ring signatures for each input. - mlsags: Vec, - /// The re-blinded commitments for the outputs being spent. - pseudo_outs: Vec, - /// The aggregate Bulletproof, proving the outputs are within range. - bulletproof: Bulletproof, - }, - /// CLSAGs with Bulletproofs(+). - Clsag { - /// The CLSAGs for each input. - clsags: Vec, - /// The re-blinded commitments for the outputs being spent. - pseudo_outs: Vec, - /// The aggregate Bulletproof(+), proving the outputs are within range. - bulletproof: Bulletproof, - }, -} - -impl RctPrunable { - /// Write the RctPrunable. - pub fn write(&self, w: &mut W, rct_type: RctType) -> io::Result<()> { - match self { - RctPrunable::AggregateMlsagBorromean { borromean, mlsag } => { - write_raw_vec(BorromeanRange::write, borromean, w)?; - mlsag.write(w) - } - RctPrunable::MlsagBorromean { borromean, mlsags } => { - write_raw_vec(BorromeanRange::write, borromean, w)?; - write_raw_vec(Mlsag::write, mlsags, w) - } - RctPrunable::MlsagBulletproofs { bulletproof, mlsags, pseudo_outs } | - RctPrunable::MlsagBulletproofsCompactAmount { bulletproof, mlsags, pseudo_outs } => { - if rct_type == RctType::MlsagBulletproofs { - w.write_all(&1u32.to_le_bytes())?; - } else { - w.write_all(&[1])?; - } - bulletproof.write(w)?; - - write_raw_vec(Mlsag::write, mlsags, w)?; - write_raw_vec(write_point, pseudo_outs, w) - } - RctPrunable::Clsag { bulletproof, clsags, pseudo_outs } => { - w.write_all(&[1])?; - bulletproof.write(w)?; - - write_raw_vec(Clsag::write, clsags, w)?; - write_raw_vec(write_point, pseudo_outs, w) - } - } - } - - /// Serialize the RctPrunable to a `Vec`. - pub fn serialize(&self, rct_type: RctType) -> Vec { - let mut serialized = vec![]; - self - .write(&mut serialized, rct_type) - .expect("write failed but doesn't fail"); - serialized - } - - /// Read a RctPrunable. - pub fn read( - rct_type: RctType, - ring_length: usize, - inputs: usize, - outputs: usize, - r: &mut R, - ) -> io::Result { - Ok(match rct_type { - RctType::AggregateMlsagBorromean => RctPrunable::AggregateMlsagBorromean { - borromean: read_raw_vec(BorromeanRange::read, outputs, r)?, - mlsag: Mlsag::read( - ring_length, - inputs.checked_add(1).ok_or_else(|| { - io::Error::other("reading a MLSAG for more inputs than representable") - })?, - r, - )?, - }, - RctType::MlsagBorromean => RctPrunable::MlsagBorromean { - borromean: read_raw_vec(BorromeanRange::read, outputs, r)?, - mlsags: (0 .. inputs).map(|_| Mlsag::read(ring_length, 2, r)).collect::>()?, - }, - RctType::MlsagBulletproofs | RctType::MlsagBulletproofsCompactAmount => { - let bulletproof = { - if (if rct_type == RctType::MlsagBulletproofs { - u64::from(read_u32(r)?) - } else { - read_varint(r)? - }) != 1 - { - Err(io::Error::other("n bulletproofs instead of one"))?; - } - Bulletproof::read(r)? - }; - let mlsags = - (0 .. inputs).map(|_| Mlsag::read(ring_length, 2, r)).collect::>()?; - let pseudo_outs = read_raw_vec(read_point, inputs, r)?; - if rct_type == RctType::MlsagBulletproofs { - RctPrunable::MlsagBulletproofs { bulletproof, mlsags, pseudo_outs } - } else { - debug_assert_eq!(rct_type, RctType::MlsagBulletproofsCompactAmount); - RctPrunable::MlsagBulletproofsCompactAmount { bulletproof, mlsags, pseudo_outs } - } - } - RctType::ClsagBulletproof | RctType::ClsagBulletproofPlus => RctPrunable::Clsag { - bulletproof: { - if read_varint::<_, u64>(r)? != 1 { - Err(io::Error::other("n bulletproofs instead of one"))?; - } - (if rct_type == RctType::ClsagBulletproof { - Bulletproof::read - } else { - Bulletproof::read_plus - })(r)? - }, - clsags: (0 .. inputs).map(|_| Clsag::read(ring_length, r)).collect::>()?, - pseudo_outs: read_raw_vec(read_point, inputs, r)?, - }, - }) - } - - /// Write the RctPrunable as necessary for signing the signature. - pub(crate) fn signature_write(&self, w: &mut W) -> io::Result<()> { - match self { - RctPrunable::AggregateMlsagBorromean { borromean, .. } | - RctPrunable::MlsagBorromean { borromean, .. } => { - borromean.iter().try_for_each(|rs| rs.write(w)) - } - RctPrunable::MlsagBulletproofs { bulletproof, .. } | - RctPrunable::MlsagBulletproofsCompactAmount { bulletproof, .. } | - RctPrunable::Clsag { bulletproof, .. } => bulletproof.signature_write(w), - } - } -} - -/// The RingCT proofs. -/// -/// This contains both the RctBase and RctPrunable structs. -/// -/// The C++ codebase refers to this as rct_signatures. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct RctProofs { - /// The data necessary for handling this transaction. - pub base: RctBase, - /// The data necessary for verifying this transaction. - pub prunable: RctPrunable, -} - -impl RctProofs { - /// RctType for a given RctProofs struct. - pub fn rct_type(&self) -> RctType { - match &self.prunable { - RctPrunable::AggregateMlsagBorromean { .. } => RctType::AggregateMlsagBorromean, - RctPrunable::MlsagBorromean { .. } => RctType::MlsagBorromean, - RctPrunable::MlsagBulletproofs { .. } => RctType::MlsagBulletproofs, - RctPrunable::MlsagBulletproofsCompactAmount { .. } => RctType::MlsagBulletproofsCompactAmount, - RctPrunable::Clsag { bulletproof, .. } => { - if matches!(bulletproof, Bulletproof::Original { .. }) { - RctType::ClsagBulletproof - } else { - RctType::ClsagBulletproofPlus - } - } - } - } - - /// Write the RctProofs. - pub fn write(&self, w: &mut W) -> io::Result<()> { - let rct_type = self.rct_type(); - self.base.write(w, rct_type)?; - self.prunable.write(w, rct_type) - } - - /// Serialize the RctProofs to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut serialized = vec![]; - self.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - /// Read a RctProofs. - pub fn read( - ring_length: usize, - inputs: usize, - outputs: usize, - r: &mut R, - ) -> io::Result> { - let Some((rct_type, base)) = RctBase::read(inputs, outputs, r)? else { return Ok(None) }; - Ok(Some(RctProofs { - base, - prunable: RctPrunable::read(rct_type, ring_length, inputs, outputs, r)?, - })) - } -} - -/// A pruned set of RingCT proofs. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct PrunedRctProofs { - /// The type of RctProofs this used to be. - pub rct_type: RctType, - /// The data necessary for handling this transaction. - pub base: RctBase, -} diff --git a/networks/monero/src/tests/mod.rs b/networks/monero/src/tests/mod.rs deleted file mode 100644 index 8d760e2a..00000000 --- a/networks/monero/src/tests/mod.rs +++ /dev/null @@ -1 +0,0 @@ -mod transaction; diff --git a/networks/monero/src/tests/transaction.rs b/networks/monero/src/tests/transaction.rs deleted file mode 100644 index 165a36c3..00000000 --- a/networks/monero/src/tests/transaction.rs +++ /dev/null @@ -1,287 +0,0 @@ -use curve25519_dalek::{ - edwards::{CompressedEdwardsY, EdwardsPoint}, - scalar::Scalar, -}; - -use serde_json::Value; - -use crate::{ - ringct::RctPrunable, - transaction::{NotPruned, Transaction, Timelock, Input}, -}; - -const TRANSACTIONS: &str = include_str!("./vectors/transactions.json"); -const CLSAG_TX: &str = include_str!("./vectors/clsag_tx.json"); -const RING_DATA: &str = include_str!("./vectors/ring_data.json"); - -#[derive(serde::Deserialize)] -struct Vector { - id: String, - hex: String, - signature_hash: String, - tx: Value, -} - -fn tx_vectors() -> Vec { - serde_json::from_str(TRANSACTIONS).unwrap() -} - -fn point(hex: &Value) -> EdwardsPoint { - CompressedEdwardsY(hex::decode(hex.as_str().unwrap()).unwrap().try_into().unwrap()) - .decompress() - .unwrap() -} - -fn scalar(hex: &Value) -> Scalar { - Scalar::from_canonical_bytes(hex::decode(hex.as_str().unwrap()).unwrap().try_into().unwrap()) - .unwrap() -} - -fn point_vector(val: &Value) -> Vec { - let mut v = vec![]; - for hex in val.as_array().unwrap() { - v.push(point(hex)); - } - v -} - -fn scalar_vector(val: &Value) -> Vec { - let mut v = vec![]; - for hex in val.as_array().unwrap() { - v.push(scalar(hex)); - } - v -} - -#[test] -fn parse() { - for v in tx_vectors() { - let tx = - Transaction::::read(&mut hex::decode(v.hex.clone()).unwrap().as_slice()).unwrap(); - - // check version - assert_eq!(tx.version(), v.tx["version"]); - - // check unlock time - match tx.prefix().additional_timelock { - Timelock::None => assert_eq!(0, v.tx["unlock_time"]), - Timelock::Block(h) => assert_eq!(h, v.tx["unlock_time"]), - Timelock::Time(t) => assert_eq!(t, v.tx["unlock_time"]), - } - - // check inputs - let inputs = v.tx["vin"].as_array().unwrap(); - assert_eq!(tx.prefix().inputs.len(), inputs.len()); - for (i, input) in tx.prefix().inputs.iter().enumerate() { - match input { - Input::Gen(h) => assert_eq!(*h, inputs[i]["gen"]["height"]), - Input::ToKey { amount, key_offsets, key_image } => { - let key = &inputs[i]["key"]; - assert_eq!(amount.unwrap_or(0), key["amount"]); - assert_eq!(*key_image, point(&key["k_image"])); - assert_eq!(key_offsets, key["key_offsets"].as_array().unwrap()); - } - } - } - - // check outputs - let outputs = v.tx["vout"].as_array().unwrap(); - assert_eq!(tx.prefix().outputs.len(), outputs.len()); - for (i, output) in tx.prefix().outputs.iter().enumerate() { - assert_eq!(output.amount.unwrap_or(0), outputs[i]["amount"]); - if output.view_tag.is_some() { - assert_eq!(output.key, point(&outputs[i]["target"]["tagged_key"]["key"]).compress()); - let view_tag = - hex::decode(outputs[i]["target"]["tagged_key"]["view_tag"].as_str().unwrap()).unwrap(); - assert_eq!(view_tag.len(), 1); - assert_eq!(output.view_tag.unwrap(), view_tag[0]); - } else { - assert_eq!(output.key, point(&outputs[i]["target"]["key"]).compress()); - } - } - - // check extra - assert_eq!(tx.prefix().extra, v.tx["extra"].as_array().unwrap().as_slice()); - - match &tx { - Transaction::V1 { signatures, .. } => { - // check signatures for v1 txs - let sigs_array = v.tx["signatures"].as_array().unwrap(); - for (i, sig) in signatures.iter().enumerate() { - let tx_sig = hex::decode(sigs_array[i].as_str().unwrap()).unwrap(); - for (i, sig) in sig.sigs.iter().enumerate() { - let start = i * 64; - let c: [u8; 32] = tx_sig[start .. (start + 32)].try_into().unwrap(); - let s: [u8; 32] = tx_sig[(start + 32) .. (start + 64)].try_into().unwrap(); - assert_eq!(sig.c, Scalar::from_canonical_bytes(c).unwrap()); - assert_eq!(sig.s, Scalar::from_canonical_bytes(s).unwrap()); - } - } - } - Transaction::V2 { proofs: None, .. } => assert_eq!(v.tx["rct_signatures"]["type"], 0), - Transaction::V2 { proofs: Some(proofs), .. } => { - // check rct signatures - let rct = &v.tx["rct_signatures"]; - assert_eq!(u8::from(proofs.rct_type()), rct["type"]); - - assert_eq!(proofs.base.fee, rct["txnFee"]); - assert_eq!(proofs.base.commitments, point_vector(&rct["outPk"])); - let ecdh_info = rct["ecdhInfo"].as_array().unwrap(); - assert_eq!(proofs.base.encrypted_amounts.len(), ecdh_info.len()); - for (i, ecdh) in proofs.base.encrypted_amounts.iter().enumerate() { - let mut buf = vec![]; - ecdh.write(&mut buf).unwrap(); - assert_eq!(buf, hex::decode(ecdh_info[i]["amount"].as_str().unwrap()).unwrap()); - } - - // check ringct prunable - match &proofs.prunable { - RctPrunable::Clsag { bulletproof: _, clsags, pseudo_outs } => { - // check bulletproofs - /* TODO - for (i, bp) in bulletproofs.iter().enumerate() { - match bp { - Bulletproof::Original(o) => { - let bps = v.tx["rctsig_prunable"]["bp"].as_array().unwrap(); - assert_eq!(bulletproofs.len(), bps.len()); - assert_eq!(o.A, point(&bps[i]["A"])); - assert_eq!(o.S, point(&bps[i]["S"])); - assert_eq!(o.T1, point(&bps[i]["T1"])); - assert_eq!(o.T2, point(&bps[i]["T2"])); - assert_eq!(o.taux, scalar(&bps[i]["taux"])); - assert_eq!(o.mu, scalar(&bps[i]["mu"])); - assert_eq!(o.L, point_vector(&bps[i]["L"])); - assert_eq!(o.R, point_vector(&bps[i]["R"])); - assert_eq!(o.a, scalar(&bps[i]["a"])); - assert_eq!(o.b, scalar(&bps[i]["b"])); - assert_eq!(o.t, scalar(&bps[i]["t"])); - } - Bulletproof::Plus(p) => { - let bps = v.tx["rctsig_prunable"]["bpp"].as_array().unwrap(); - assert_eq!(bulletproofs.len(), bps.len()); - assert_eq!(p.A, point(&bps[i]["A"])); - assert_eq!(p.A1, point(&bps[i]["A1"])); - assert_eq!(p.B, point(&bps[i]["B"])); - assert_eq!(p.r1, scalar(&bps[i]["r1"])); - assert_eq!(p.s1, scalar(&bps[i]["s1"])); - assert_eq!(p.d1, scalar(&bps[i]["d1"])); - assert_eq!(p.L, point_vector(&bps[i]["L"])); - assert_eq!(p.R, point_vector(&bps[i]["R"])); - } - } - } - */ - - // check clsags - let cls = v.tx["rctsig_prunable"]["CLSAGs"].as_array().unwrap(); - for (i, cl) in clsags.iter().enumerate() { - assert_eq!(cl.D, point(&cls[i]["D"])); - assert_eq!(cl.c1, scalar(&cls[i]["c1"])); - assert_eq!(cl.s, scalar_vector(&cls[i]["s"])); - } - - // check pseudo outs - assert_eq!(pseudo_outs, &point_vector(&v.tx["rctsig_prunable"]["pseudoOuts"])); - } - // TODO: Add - _ => panic!("non-null/CLSAG test vector"), - } - } - } - - // check serialized hex - let mut buf = Vec::new(); - tx.write(&mut buf).unwrap(); - let serialized_tx = hex::encode(&buf); - assert_eq!(serialized_tx, v.hex); - } -} - -#[test] -fn signature_hash() { - for v in tx_vectors() { - let tx = Transaction::read(&mut hex::decode(v.hex.clone()).unwrap().as_slice()).unwrap(); - // check for signature hashes - if let Some(sig_hash) = tx.signature_hash() { - assert_eq!(sig_hash, hex::decode(v.signature_hash.clone()).unwrap().as_slice()); - } else { - // make sure it is a miner tx. - assert!(matches!(tx.prefix().inputs[0], Input::Gen(_))); - } - } -} - -#[test] -fn hash() { - for v in &tx_vectors() { - let tx = Transaction::read(&mut hex::decode(v.hex.clone()).unwrap().as_slice()).unwrap(); - assert_eq!(tx.hash(), hex::decode(v.id.clone()).unwrap().as_slice()); - } -} - -#[test] -fn clsag() { - /* - // following keys belong to the wallet that created the CLSAG_TX, and to the - // CLSAG_TX itself and here for debug purposes in case this test unexpectedly fails some day. - let view_key = "9df81dd2e369004d3737850e4f0abaf2111720f270b174acf8e08547e41afb0b"; - let spend_key = "25f7339ce03a0206129c0bdd78396f80bf28183ccd16084d4ab1cbaf74f0c204"; - let tx_key = "650c8038e5c6f1c533cacc1713ac27ef3ec70d7feedde0c5b37556d915b4460c"; - */ - - #[derive(serde::Deserialize)] - struct TxData { - hex: String, - tx: Value, - } - #[derive(serde::Deserialize)] - struct OutData { - key: Value, - mask: Value, - } - let tx_data = serde_json::from_str::(CLSAG_TX).unwrap(); - let out_data = serde_json::from_str::>>(RING_DATA).unwrap(); - let tx = - Transaction::::read(&mut hex::decode(tx_data.hex).unwrap().as_slice()).unwrap(); - - // gather rings - let mut rings = vec![]; - for data in out_data { - let mut ring = vec![]; - for out in &data { - ring.push([point(&out.key), point(&out.mask)]); - } - rings.push(ring) - } - - // gather key images - let mut key_images = vec![]; - let inputs = tx_data.tx["vin"].as_array().unwrap(); - for input in inputs { - key_images.push(point(&input["key"]["k_image"])); - } - - // gather pseudo_outs - let mut pseudo_outs = vec![]; - let pouts = tx_data.tx["rctsig_prunable"]["pseudoOuts"].as_array().unwrap(); - for po in pouts { - pseudo_outs.push(point(po)); - } - - // verify clsags - match tx { - Transaction::V2 { proofs: Some(ref proofs), .. } => match &proofs.prunable { - RctPrunable::Clsag { bulletproof: _, clsags, .. } => { - for (i, cls) in clsags.iter().enumerate() { - cls - .verify(&rings[i], &key_images[i], &pseudo_outs[i], &tx.signature_hash().unwrap()) - .unwrap(); - } - } - // TODO: Add - _ => panic!("non-CLSAG test vector"), - }, - // TODO: Add - _ => panic!("non-CLSAG test vector"), - } -} diff --git a/networks/monero/src/tests/vectors/clsag_tx.json b/networks/monero/src/tests/vectors/clsag_tx.json deleted file mode 100644 index b41b5a15..00000000 --- a/networks/monero/src/tests/vectors/clsag_tx.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "hex": "020002020010020102010101010302010c0201060103d8c6f077bb201ffdc16407df206cb5962ec635a4a4c9cd7551b88698d1bef497020010000402040801010303030101020104018267c18a435f4a5dea50ad0f10755a4fd7783340beb3a3903a67fa14938edf420200039716cdbae38def9a74e7df5402c108270a1d5fc87c7e5ebaaaed68aae77701e3cf0003082e27ca8af2b9e3004156c152aa98503b548b1591fdcd839ab550612ae6c9dc7e2c01a57c93fb0ca77ab96b7dfd7380c4842d1e58c055430e0d425cd1c76c578cca390209019519f8c1ce5e20300680e5a0da09acd081c0dd2c7178a341382720ada87588a96ac5cff1623fd2e4aaf56ed395a325393fbd950428a3ff7e6dc6c559669c8d5e8fb80d5e979c8a81c89754201d4bd094c37c143759260e282555dfed3100013256ca0156c1c34dc569565039c27f784b45ec50ba816f69b54ae3df98d841070f51aec2a8afd4991d5bbf50b785d0bdc2a6491c5ab45795d7ce3b08d63282907c52f9951e711cb6a2cba1aba1f7849a669345711263cc736e2d4e1c7308c5e7cb97e948ed647f89fc9869fb9c9a5a742e5e7be419cce7a5e99a5b21cb491f00003ec1da7e8cec39b709d46fab65f59f5f6147c1e4429d18d8bf6e3e62639102a300ce6006a20403ef021a197b6c632ac280e674c7aad08290424271dec4de010710ee7895389150dd15017cfd5f47ea9dddd11e218251433906f62aff6b8cb2b5f8cca25add297da40d7cddbea718703ff9ad3795fcdc172a34c73179326c16f5274de69073281f3276d800fe7fbd01a07d14a42ce367c32727a9f0bc8c8d6ab4b3b17dd981bdc522595fc1bfe83ad3976876fb3bb2e4bd4392ac1a94ac22cbcc326ede82d1af2f1ec9d4ac596b22d035c7f1ac11d8ace7c5a70b30e39596ded794077ae55144e3f4b0c17cbc4f5a960129eb5321077bb7e2b9e4621e17fbcf2960abae1e1a9f89af21cc2fcce410a839186b8da92966415d6dd3ad772d652cbe075af46b97ae7062ccbaa328e371a351492f6860832c5bfdd7b77e8611b7441ecfa0967e66c13cb9ab348bf78a15bbd2d9bec6b8ec5cdd5f84a91580758247da84afed22ec2cf89d632e406fdc927e48ebfaacd0a0b715a968c9cfc74fff611f4cda4b6cb9eb1e044a71c58a832c5ae7551833c0ba2ab6f9d1e466e5757c230157cd3099686bf89e8f9eb822ea702e13e38f669603dd3c7c8be90daf192de689ab2078d16cf489f3782e70469fbe01f918297e0db6cef3bf48e0293b6856d348fda3a2d76bf899432acef74aa42961be28635d1899509b9d368bc42a18e08d2b94b055da149139c347f7c0b2a381dfaa12aaaabe076f38fe12372d1ba17cd0d808ed5b4b911f8cee2e45841a4c879f40968e455ba5a796b27c968be0f7e88daf0b766fcf2c5986fbe14b2e0433cecb04af100ec81d03e2875d25483d0a9dc9dc0a42150a64e894af1655e9ab99f629826f63c01e44b366c5fe2959c7396450360a3156ad081764b5904a7654fe82a2b1d52db46361c0b08dfeee383165641e6e0e5733e5fb99fc8c75ba5cf230518b1e384d4441251840e810aed950eb27899809711d42c54f8fc0647537e249e510738412c399b915ff923e9209cdd12820720b8b07086f3361d6b95934f994a8ac4fb6a9598f11d54bbbcfc33e71b9f73570012b3520914dfab3f3fe15abad981d8ed71dab71ac8f45f187f62ad440a83d000e08fc039ece25e7eadd0ce169ccda8182321cd73eba6f6d0e4f482a061eb4190fe4051e6988a47165cb2cf39973b1a555cc92d662f4e856a91c0cd51a486b960cfc850c4fc854f9a4aade4336942cb50cb50ae3bc31d3da50b719196d5fd40f02b1addad16de443e825bf7177beaac79adc6b198115f408a391a94a8517b7e50fd57663df52309c0a00b0b61373f895206771be8b185c54da6f805b561264aa019ef3bd1dcded26fc45a6a0e39cbb7bc6a7025ab858bc8e54a99da3aedce68f00bacc83a7eb3553ac626881188329b6ba86a53aaaaed9bd9efb0528f08c649c093f005dd0fa9620b0a40fc3f248c1d0edb8f70ff05c7254de0f8faab8315443021b3d279f5a4218c3126dee5d6eceae1c49eabdd04d8a0cdb6814c422b3ea69b3be3794f42081e65dc47b1d2fc2f5705cef816596416c373bd60abc4ff06b3f02ef34dc290f987607bdb16c1650307ea3bc0fc7a62ce86e7129293d7530c3cf09dc731e22c18daec3c639575421b079fa57be56693278125b2aa50c299ac4f8020714c6ac666b7fb7471c63adda93f1fa6733729f7b6e326ac04744f9c3223d0456ee515d0bfe27101f907cca958dddb90717bed5229c1a02928fab9e7be4e4012c96d3acda0ebca72e63f41efdad5c9baa19bffd1216e4c3e2e5564e823b57054a3a2cf2c3318f214d23f24304655e73d5001518633757f6cbe6711f2a5f2601df20a753caaa87a32fe627b6ce7573ce77957c7b6401959824fd49bc7063670fb18fcc1f2de113affd868eb76c7fbe12997024dc493b6a26563a80574a52760a7b384fd2f9d23d8dfe4d226b15086751d4f383d4bca7cf080fd471b8a218b709b539f4e5417677f43627ef06b70c24edacce80bdd10ca2ac9af8aa3f6453cc08da75ee99409447225843c143fca551167a4aa5fd2354a5420c35c0006731950d6c356218d8cf365e084d9bb52c793322aa2d8d05c4164d9ffe81ce09e4f17802efa7461d375a5cff4c17ab0cdc5767a8f7d34091921fd4620660470ea9305f00dd9e6ee5ca4054ac0b36d4e2b58006224559cc19a3a4e48f66aa596295541007f2524b2198f3c0c688fbbc38590f59674b25e528ac2115a0f7da805d9c5810065f95c7c7ece23d2de922e55a77f967baab6d9db543e49734a8c4bc23c5ae640edb904851b4856c5a1ce4729957f4d000e70cb88c56d80bf6e693a5c67d5661911374d7aa7f6e6f4a5b340a9954d9cf8bd5d2f4b4a37f946e15bca800978ae745eec2096b3def10f9703a6e2040df0d8a89bf1562bb29d3a13df2f9a77c3e064e", - "tx": { - "version": 2, - "unlock_time": 0, - "vin": [ { - "key": { - "amount": 0, - "key_offsets": [ 2, 1, 2, 1, 1, 1, 1, 3, 2, 1, 12, 2, 1, 6, 1, 3], - "k_image": "d8c6f077bb201ffdc16407df206cb5962ec635a4a4c9cd7551b88698d1bef497" - } - }, { - "key": { - "amount": 0, - "key_offsets": [ 0, 4, 2, 4, 8, 1, 1, 3, 3, 3, 1, 1, 2, 1, 4, 1 - ], - "k_image": "8267c18a435f4a5dea50ad0f10755a4fd7783340beb3a3903a67fa14938edf42" - } - } - ], - "vout": [ { - "amount": 0, - "target": { - "tagged_key": { - "key": "9716cdbae38def9a74e7df5402c108270a1d5fc87c7e5ebaaaed68aae77701e3", - "view_tag": "cf" - } - } - }, { - "amount": 0, - "target": { - "tagged_key": { - "key": "082e27ca8af2b9e3004156c152aa98503b548b1591fdcd839ab550612ae6c9dc", - "view_tag": "7e" - } - } - } - ], - "extra": [ 1, 165, 124, 147, 251, 12, 167, 122, 185, 107, 125, 253, 115, 128, 196, 132, 45, 30, 88, 192, 85, 67, 14, 13, 66, 92, 209, 199, 108, 87, 140, 202, 57, 2, 9, 1, 149, 25, 248, 193, 206, 94, 32, 48 - ], - "rct_signatures": { - "type": 6, - "txnFee": 2605200000, - "ecdhInfo": [ { - "amount": "acd081c0dd2c7178" - }, { - "amount": "a341382720ada875" - }], - "outPk": [ "88a96ac5cff1623fd2e4aaf56ed395a325393fbd950428a3ff7e6dc6c559669c", "8d5e8fb80d5e979c8a81c89754201d4bd094c37c143759260e282555dfed3100"] - }, - "rctsig_prunable": { - "nbp": 1, - "bpp": [ { - "A": "3256ca0156c1c34dc569565039c27f784b45ec50ba816f69b54ae3df98d84107", - "A1": "0f51aec2a8afd4991d5bbf50b785d0bdc2a6491c5ab45795d7ce3b08d6328290", - "B": "7c52f9951e711cb6a2cba1aba1f7849a669345711263cc736e2d4e1c7308c5e7", - "r1": "cb97e948ed647f89fc9869fb9c9a5a742e5e7be419cce7a5e99a5b21cb491f00", - "s1": "003ec1da7e8cec39b709d46fab65f59f5f6147c1e4429d18d8bf6e3e62639102", - "d1": "a300ce6006a20403ef021a197b6c632ac280e674c7aad08290424271dec4de01", - "L": [ "10ee7895389150dd15017cfd5f47ea9dddd11e218251433906f62aff6b8cb2b5", "f8cca25add297da40d7cddbea718703ff9ad3795fcdc172a34c73179326c16f5", "274de69073281f3276d800fe7fbd01a07d14a42ce367c32727a9f0bc8c8d6ab4", "b3b17dd981bdc522595fc1bfe83ad3976876fb3bb2e4bd4392ac1a94ac22cbcc", "326ede82d1af2f1ec9d4ac596b22d035c7f1ac11d8ace7c5a70b30e39596ded7", "94077ae55144e3f4b0c17cbc4f5a960129eb5321077bb7e2b9e4621e17fbcf29", "60abae1e1a9f89af21cc2fcce410a839186b8da92966415d6dd3ad772d652cbe" - ], - "R": [ "5af46b97ae7062ccbaa328e371a351492f6860832c5bfdd7b77e8611b7441ecf", "a0967e66c13cb9ab348bf78a15bbd2d9bec6b8ec5cdd5f84a91580758247da84", "afed22ec2cf89d632e406fdc927e48ebfaacd0a0b715a968c9cfc74fff611f4c", "da4b6cb9eb1e044a71c58a832c5ae7551833c0ba2ab6f9d1e466e5757c230157", "cd3099686bf89e8f9eb822ea702e13e38f669603dd3c7c8be90daf192de689ab", "2078d16cf489f3782e70469fbe01f918297e0db6cef3bf48e0293b6856d348fd", "a3a2d76bf899432acef74aa42961be28635d1899509b9d368bc42a18e08d2b94" - ] - } - ], - "CLSAGs": [ { - "s": [ "b055da149139c347f7c0b2a381dfaa12aaaabe076f38fe12372d1ba17cd0d808", "ed5b4b911f8cee2e45841a4c879f40968e455ba5a796b27c968be0f7e88daf0b", "766fcf2c5986fbe14b2e0433cecb04af100ec81d03e2875d25483d0a9dc9dc0a", "42150a64e894af1655e9ab99f629826f63c01e44b366c5fe2959c7396450360a", "3156ad081764b5904a7654fe82a2b1d52db46361c0b08dfeee383165641e6e0e", "5733e5fb99fc8c75ba5cf230518b1e384d4441251840e810aed950eb27899809", "711d42c54f8fc0647537e249e510738412c399b915ff923e9209cdd12820720b", "8b07086f3361d6b95934f994a8ac4fb6a9598f11d54bbbcfc33e71b9f7357001", "2b3520914dfab3f3fe15abad981d8ed71dab71ac8f45f187f62ad440a83d000e", "08fc039ece25e7eadd0ce169ccda8182321cd73eba6f6d0e4f482a061eb4190f", "e4051e6988a47165cb2cf39973b1a555cc92d662f4e856a91c0cd51a486b960c", "fc850c4fc854f9a4aade4336942cb50cb50ae3bc31d3da50b719196d5fd40f02", "b1addad16de443e825bf7177beaac79adc6b198115f408a391a94a8517b7e50f", "d57663df52309c0a00b0b61373f895206771be8b185c54da6f805b561264aa01", "9ef3bd1dcded26fc45a6a0e39cbb7bc6a7025ab858bc8e54a99da3aedce68f00", "bacc83a7eb3553ac626881188329b6ba86a53aaaaed9bd9efb0528f08c649c09"], - "c1": "3f005dd0fa9620b0a40fc3f248c1d0edb8f70ff05c7254de0f8faab831544302", - "D": "1b3d279f5a4218c3126dee5d6eceae1c49eabdd04d8a0cdb6814c422b3ea69b3" - }, { - "s": [ "be3794f42081e65dc47b1d2fc2f5705cef816596416c373bd60abc4ff06b3f02", "ef34dc290f987607bdb16c1650307ea3bc0fc7a62ce86e7129293d7530c3cf09", "dc731e22c18daec3c639575421b079fa57be56693278125b2aa50c299ac4f802", "0714c6ac666b7fb7471c63adda93f1fa6733729f7b6e326ac04744f9c3223d04", "56ee515d0bfe27101f907cca958dddb90717bed5229c1a02928fab9e7be4e401", "2c96d3acda0ebca72e63f41efdad5c9baa19bffd1216e4c3e2e5564e823b5705", "4a3a2cf2c3318f214d23f24304655e73d5001518633757f6cbe6711f2a5f2601", "df20a753caaa87a32fe627b6ce7573ce77957c7b6401959824fd49bc7063670f", "b18fcc1f2de113affd868eb76c7fbe12997024dc493b6a26563a80574a52760a", "7b384fd2f9d23d8dfe4d226b15086751d4f383d4bca7cf080fd471b8a218b709", "b539f4e5417677f43627ef06b70c24edacce80bdd10ca2ac9af8aa3f6453cc08", "da75ee99409447225843c143fca551167a4aa5fd2354a5420c35c0006731950d", "6c356218d8cf365e084d9bb52c793322aa2d8d05c4164d9ffe81ce09e4f17802", "efa7461d375a5cff4c17ab0cdc5767a8f7d34091921fd4620660470ea9305f00", "dd9e6ee5ca4054ac0b36d4e2b58006224559cc19a3a4e48f66aa596295541007", "f2524b2198f3c0c688fbbc38590f59674b25e528ac2115a0f7da805d9c581006"], - "c1": "5f95c7c7ece23d2de922e55a77f967baab6d9db543e49734a8c4bc23c5ae640e", - "D": "db904851b4856c5a1ce4729957f4d000e70cb88c56d80bf6e693a5c67d566191" - }], - "pseudoOuts": [ "1374d7aa7f6e6f4a5b340a9954d9cf8bd5d2f4b4a37f946e15bca800978ae745", "eec2096b3def10f9703a6e2040df0d8a89bf1562bb29d3a13df2f9a77c3e064e"] - } - } -} diff --git a/networks/monero/src/tests/vectors/ring_data.json b/networks/monero/src/tests/vectors/ring_data.json deleted file mode 100644 index 21601245..00000000 --- a/networks/monero/src/tests/vectors/ring_data.json +++ /dev/null @@ -1,134 +0,0 @@ -[ - [ - { - "key": "a1abc026eb4a18ca197ca7dbd32f7a4e66cda075a7c07ee6cbe68639a4b4ee46", - "mask": "48d7f0b8796720c7edef5e3797135b3e5ad2ae23db1d934bcf6d6bc396b8ed47" - }, - { - "key": "a374121e22ed620248c970e7f32ea7598b054f73c1edec33c4e1b18a73c35c14", - "mask": "15beeeedc9b33615097e0fac0acc6a0984e139fa2b4196896877a8cc3ebc3590" - }, - { - "key": "e2ac4d36f9567092563a09c7a19c5e21c39598f5d9d9dd8733b61cebb3ea8662", - "mask": "3d9105f85f9edd3f7f72b62385bb9a42d549331d3babea6cf73bbbcde8e4f53c" - }, - { - "key": "68c08bbbfdb3ad736dfed5854264a3b410de40d8f3d02b22f5cf75f69f6e2e1f", - "mask": "36c39958ddcad401d85d63883da510505650321ad7a26859e8b1b6c28204d274" - }, - { - "key": "7b8b580f7a2288040a0755810c5708c5a8277d139762545082785260275678e4", - "mask": "498105ec1dc7559becfb833140c5049382b846eff812616a2414494d7a46930d" - }, - { - "key": "348d9be3f2b42686c2a919ba1515c5a540c5ffb4c1762e4a371b42643ff69b3b", - "mask": "eeca9ed04ba72a89dbd85564cf3084daad577634db09d048895524f1ded26b19" - }, - { - "key": "91a59666453bcc55d2a02480dfe2029082e24548cdfd7d614be31657fdd75357", - "mask": "ae7f14cbb31d24b727d8680fbd03bcc177fc67b982edeca54e6b2b47d6b8d012" - }, - { - "key": "9868cb5201d4b00e5a3552a7f485662dfb3ca74b79f6bd069ee0a4650597abbc", - "mask": "570e3b126e429022177d22fd09d73c6950676c82a4872addb3afa950646c5f1d" - }, - { - "key": "56d05fced0eb9dda981a26fdd4170f46de2b0a35c70f02ceae23ad9f2ed8a5b0", - "mask": "a0e20ecd8526bd2a640c4df42c187fcf75d05660ba61262c93b19384b8fad49b" - }, - { - "key": "9e82f65349da1e0dacf5d96a9c0f80c0c5fd0fc2437cafbcc38b2f20e721abc5", - "mask": "e83344061c0632631eec627bb2103898cfc230b35e0177681e48f0ee4b6d37c8" - }, - { - "key": "2590a255607ab619fcd62142f4b002818f2d55dbb5b8665500854203b83e5c86", - "mask": "e9c103485b3f4dadab560e8efc67c594ba11f16513685f0faff78c6fdf4de061" - }, - { - "key": "c0e22332d897f0637440ad151089652e59dcbf27dc84b11c2efbe686a9e7afb5", - "mask": "363d5dcbc765854e830dc52762e24f71d7c85f6095227551f3ef6ada6aa25964" - }, - { - "key": "360e4efb484e8d419bdda5f581703de716671e3516d1c9deb97204f9b4c9c0d4", - "mask": "29ef141fa24ef86af35af48094928392543a9e7e7726ae92a9da322178e680ad" - }, - { - "key": "5bb515d131f03bbb3be4e710b83589f62f07f185b9ad344095df47092f41b8e0", - "mask": "94fd6083b669533eebfa49a1cb47b94555e8be7d5f84573354b0201229d07bed" - }, - { - "key": "5ce647c3017ec3c36a2385e2b11fb9a452a5766987d80531bec75952924ed896", - "mask": "8f61d7be3b4f2252810fbade3bbac970ccff55c453e34405836545f3e49be6f5" - }, - { - "key": "dbc787f7ca41996a981a0ebb498a8d565dfa62a3b3b169c4c3018fff2233a757", - "mask": "9bb749be705747d9c28168c0446d589b3ac18949fa0087e230805aaff5a9982f" - } - ], - [ - { - "key": "d10621b38fbc5237061b2d3503866f0be46aaa0694c9f9d747f7ed19acebe8ef", - "mask": "a1a7a42155f0abff0353a6008eda2a9b16d9ffcf7584a38933cce3e3976987cd" - }, - { - "key": "a9afb71ae2db057049131df856d246f7088a656cc85297ce7e1ef339bd6e0c96", - "mask": "96e9dc7a96a19c9ebaeb33ab94e7e9d86d88df1c1b11006b297b74f529f37f5a" - }, - { - "key": "68c08bbbfdb3ad736dfed5854264a3b410de40d8f3d02b22f5cf75f69f6e2e1f", - "mask": "36c39958ddcad401d85d63883da510505650321ad7a26859e8b1b6c28204d274" - }, - { - "key": "74193737897162c8b2c380ff34674e3bfbfb2ac7e1c7aacbb13f2a3a8fb2b043", - "mask": "8157e47f9998f4afdce72a328eb9e897a57a5819b838ed1b517ea2c938e0c94f" - }, - { - "key": "96e002055aafbfdd1136cc587543e5c0e51da0d9682879c107abab3cdcdb9479", - "mask": "f76929f6dba6d75bec713a02677aa7ad39dd4319077bfa7189fe65fe86b2ee9a" - }, - { - "key": "2a72f3b2cb3e10727fbfc09d2c726763000a92f77f2f000c63dee714a6c7424d", - "mask": "db459ca84da12ebab294b31961838c43cee1868f0690d143c93da1f2f825d07f" - }, - { - "key": "797f5f3a30ce8d4b19305ca9d8193033d649f0a74705203da9f3f106ad60dfb4", - "mask": "39339ac52a1194790b1bb5db0b119d403a1d5dcc4db4f8819fca4d425d5b2614" - }, - { - "key": "b0c42947607815eba320f97e7c9ecd092fe187fb67d7263540015e6308f6dc1a", - "mask": "6b92c8c269319192298307feb26a7b64fb78d877ac2e49a594650227f26e64bc" - }, - { - "key": "59015cfd533a742857454dce9d82846fce08ab7d96c5583640cf6e38ecf0445e", - "mask": "cf375f037e253ab6f52699fbba73f796ee2140e546710a1faa3c9f09b4f570ac" - }, - { - "key": "c0e22332d897f0637440ad151089652e59dcbf27dc84b11c2efbe686a9e7afb5", - "mask": "363d5dcbc765854e830dc52762e24f71d7c85f6095227551f3ef6ada6aa25964" - }, - { - "key": "360e4efb484e8d419bdda5f581703de716671e3516d1c9deb97204f9b4c9c0d4", - "mask": "29ef141fa24ef86af35af48094928392543a9e7e7726ae92a9da322178e680ad" - }, - { - "key": "92619df80e988c0b2dfb63dd6324ff2979ca319bf8200260b28944753dda4ac1", - "mask": "0a574b0aca86da38dd7aeb58d92550dc558c680deaa63c69e31e9a78e88a3559" - }, - { - "key": "0ac7e630a04be92b1f3c821c50ec80a2813f7bee4c1ab117967bc26263d4fd84", - "mask": "ed0bd4d707ab3deaf18437ae9d945da2d3f2c6e758068ce57972d676da2a24bf" - }, - { - "key": "b97300cdb6ef63a6990686521138b5c7c80cf6c9a8844518352f3ef1130d413d", - "mask": "690c312586bbdf123d9e34ad7955e1c2ae5259cd3effd0b08b19cb556d65ec25" - }, - { - "key": "1a62237b77e28713e5a47129f1ba18be27a5139d6f1e6d6d38c78705143b3ea5", - "mask": "39f6ba6d816695f20212042b1048301cd637161f685d7c2b61379b907b7b4c59" - }, - { - "key": "ffca492152d8206bb7f215d2408669856203edffd424f4fc6a0304def2195717", - "mask": "cd7684b7c32531b363784d86bee71731c113c545c67103ec1265c362de7e5555" - } - ] -] diff --git a/networks/monero/src/tests/vectors/transactions.json b/networks/monero/src/tests/vectors/transactions.json deleted file mode 100644 index 1c6cb3c3..00000000 --- a/networks/monero/src/tests/vectors/transactions.json +++ /dev/null @@ -1,324 +0,0 @@ -[ - { - "id": "373a2ace627debaf8bfd493155fd3c00c5c2fc164400ec22e79ee79a1ac487c4", - "hex": "02f78dae0101ffbb8dae0101e0b2d2b9c21103e6854544fbb66d55fc3546f4d3e69f8234257b69fa2237712af3b058a5f01ba14a340173f263b8a4bbc46dfb6f29e0584adbfffdf7a47c929d77c2d0c142afea2b05300211000000f7eeeb3f0e00000000000000000000", - "signature_hash": "", - "tx": { - "version": 2, - "unlock_time": 2852599, - "vin": [ - { - "gen": { - "height": 2852539 - } - } - ], - "vout": [ - { - "amount": 601953180000, - "target": { - "tagged_key": { - "key": "e6854544fbb66d55fc3546f4d3e69f8234257b69fa2237712af3b058a5f01ba1", - "view_tag": "4a" - } - } - } - ], - "extra": [ 1, 115, 242, 99, 184, 164, 187, 196, 109, 251, 111, 41, 224, 88, 74, 219, 255, 253, 247, 164, 124, 146, 157, 119, 194, 208, 193, 66, 175, 234, 43, 5, 48, 2, 17, 0, 0, 0, 247, 238, 235, 63, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], - "rct_signatures": { - "type": 0 - } - } - }, - { - "id": "c39652b79beb888464525fee06c3d078463af5b76d493785f8903cae93405603", - "hex": "02000102000be9aac314d8e710844d8d258133d9f701b0649e568b0cb50b1dea8103138a37c5543f3c632ef80331940cabeba29b758045db328d8d8a99de380200025155f659da61b507b0b8591cbef0ba1534b9db29a69be4a933f7897e58870b250002002de4643160fb8351a841f8079aa5af2ac62c9631bb2d5a48fcdb564cb699d12c01eaaa5acba0bc44657da783903d3de7febf7124ae03cf578938244068b475d906020901da0190b93466979e0580efcf037eafd43a457b940e592f261bd165059a9d3b92aa3baaed18346157d1361e30c553df17940985b375fc89b9c09d613d5daf007ed1ce05128cfc4d37181a5f2d6f56690357032dbcc6e85424e5ad79338801df166e5b93e427f8b86079e0de0695d24a4ac5098d98876da5f892359c982015fc0461847fd2955f6099cfc37d348bdd003f064a45c6959478807f7cb6263fc539b2616e13117fc82303aba9afeed67104c344f971a182e741436bcc040468fa6307bb478ed8ef47397e603fe7d50d3c56f5c50c4410db470e21c04b5b883a94c78bc19587d8a611c701d51b956ae06e92987ecdcd5237a65725fdcf7b52a9085831c7214b1282fbd1faf81277a6519c941907f0ef838576e610272512d1860a07a19ac9aec33168b06c937d1ead2e63e58345252be0a5fe15d38a371a6347355d16104b19f1d4a4c18e07dba72bbf8c8150bade347831b446f2ad9f5af4aa91cd35ddf3c64b3f0690c77634efd4ef07b817c6bb90a29f917769cd2978d37a14ed61b437858c93100f55b82ac7f94c0f83ac68a54df1e4d2c54b9c96661f7a41ed9fff8b28fc31949a07a7b298b5819ac7fe7c9490b20a1107ba6f6b94e6d1a58fab09bed6d096edaaea887addea1b7735228bdff409674c0f8bfceb815cdbae6fc5de41379ff842eebe205611c58d80371feb97b1f2a23fb65a474665f991113407da1c0a6f6d1fa7b6ad659f45354178270f5541eb948cbc861c53705982fa67f88c70cb936e86bf45bb17d6214ab716dc0c5fa3b7f3a181e23fad244741c1b6c44c557cf0e4dfe7a1acf75d39bfb810a798b33bcbfc73d9a11b0c648c84ed6e28b27e4aa8681aa33c0779e01ac2d16f58031d92fae578d92b6ab5d7c98a47b64ead0ad4f036922b4604d1b23dd4f4b798bae917294fc458000e7ce1449060a98c62bb1d86a0841f26e70550778103fcc3d70b9adcdb32cc750016b88178513e94d61eb65c8381532feb3368f3db60638f3b88575dded95eb92910d16c51351e7379e5df4f1fd2ffe1b80b958ef6f189ff0483a802bd1c49ed56131e1cc02dbe0949251adbc980b5a7f45317bab8be87961b6b9496d24779023f77dd1b4552fc0b522e857f38e9c63349955e3be4b3a15e86aa96943900797b8139cbfdcd940903ce41249b9e7e4a933b88a204b1ded0e2ba0fa6a1cf3ba4c3ffcc0ca96b06b400051d66ede7013ccc453e4f042e17e1091a536456460fe3551f36c99c05d59105802ee93d645167ee586e71fcc23c33d2362754065955eca45f294bb28f38960826166dfdde409f76f75366234b0a08f6e49caa1c2169e8d9376673e9835dc709bb94e4d8a164ee6278998e891e05086ad8996e496e833d975cb3d4fceb3f7203a7b365891227448b2e968b5bb653b29741bad9ba107b417354e6e3e5acf5710a346e232e9f02606a31bd8b692a67578637a5ff82339dde31425ad59ada17360184f24e6b542810ec43ef11d6a8b2eba53b288ca445d1e1a0daf059320978160eb610631a9b382291418309fc3ceecf3a08900ff5d301044a3cf565567129b10dc2aa970f43493920c4ef3df1c408e12bbcfb01573b79e296e34912cde62716033f73cf74f42b0fde655f2848bec39fd63f3485499147fe170861e9e0eae0e90d400328b595456b6451dc07eac7c8f6849dc065bb7f5ac49cff1530658bcc4d09d03e2f9611a5561dc3a90b3b2f3d933dc110db73904aeee5abcd2362a0e1b045c4cf22334e32fd33193fbfa1c4cb9e3182fb2357cf7a800f552b87579cc41d99", - "signature_hash": "686cc5232f8d0d90c6a447b10b5296c98b0b4ad5e2f88f278a6bd8f3eeb13dbf", - "tx": { - "version": 2, - "unlock_time": 0, - "vin": [ - { - "key": { - "amount": 0, - "key_offsets": [43046249, 275416, 9860, 4749, 6529, 31705, 12848, 11038, 1547, 1461, 29], - "k_image": "ea8103138a37c5543f3c632ef80331940cabeba29b758045db328d8d8a99de38" - } - } - ], - "vout": [ - { - "amount": 0, - "target": { - "key": "5155f659da61b507b0b8591cbef0ba1534b9db29a69be4a933f7897e58870b25" - } - }, - { - "amount": 0, - "target": { - "key": "002de4643160fb8351a841f8079aa5af2ac62c9631bb2d5a48fcdb564cb699d1" - } - } - ], - "extra": [ 1, 234, 170, 90, 203, 160, 188, 68, 101, 125, 167, 131, 144, 61, 61, 231, 254, 191, 113, 36, 174, 3, 207, 87, 137, 56, 36, 64, 104, 180, 117, 217, 6, 2, 9, 1, 218, 1, 144, 185, 52, 102, 151, 158], - "rct_signatures": { - "type": 5, - "txnFee": 7600000, - "ecdhInfo": [ { - "amount": "7eafd43a457b940e" - }, { - "amount": "592f261bd165059a" - }], - "outPk": [ "9d3b92aa3baaed18346157d1361e30c553df17940985b375fc89b9c09d613d5d", "af007ed1ce05128cfc4d37181a5f2d6f56690357032dbcc6e85424e5ad793388"] - }, - "rctsig_prunable": { - "nbp": 1, - "bp": [ { - "A": "df166e5b93e427f8b86079e0de0695d24a4ac5098d98876da5f892359c982015", - "S": "fc0461847fd2955f6099cfc37d348bdd003f064a45c6959478807f7cb6263fc5", - "T1": "39b2616e13117fc82303aba9afeed67104c344f971a182e741436bcc040468fa", - "T2": "6307bb478ed8ef47397e603fe7d50d3c56f5c50c4410db470e21c04b5b883a94", - "taux": "c78bc19587d8a611c701d51b956ae06e92987ecdcd5237a65725fdcf7b52a908", - "mu": "5831c7214b1282fbd1faf81277a6519c941907f0ef838576e610272512d1860a", - "L": [ "a19ac9aec33168b06c937d1ead2e63e58345252be0a5fe15d38a371a6347355d", "16104b19f1d4a4c18e07dba72bbf8c8150bade347831b446f2ad9f5af4aa91cd", "35ddf3c64b3f0690c77634efd4ef07b817c6bb90a29f917769cd2978d37a14ed", "61b437858c93100f55b82ac7f94c0f83ac68a54df1e4d2c54b9c96661f7a41ed", "9fff8b28fc31949a07a7b298b5819ac7fe7c9490b20a1107ba6f6b94e6d1a58f", "ab09bed6d096edaaea887addea1b7735228bdff409674c0f8bfceb815cdbae6f", "c5de41379ff842eebe205611c58d80371feb97b1f2a23fb65a474665f9911134"], - "R": [ "da1c0a6f6d1fa7b6ad659f45354178270f5541eb948cbc861c53705982fa67f8", "8c70cb936e86bf45bb17d6214ab716dc0c5fa3b7f3a181e23fad244741c1b6c4", "4c557cf0e4dfe7a1acf75d39bfb810a798b33bcbfc73d9a11b0c648c84ed6e28", "b27e4aa8681aa33c0779e01ac2d16f58031d92fae578d92b6ab5d7c98a47b64e", "ad0ad4f036922b4604d1b23dd4f4b798bae917294fc458000e7ce1449060a98c", "62bb1d86a0841f26e70550778103fcc3d70b9adcdb32cc750016b88178513e94", "d61eb65c8381532feb3368f3db60638f3b88575dded95eb92910d16c51351e73"], - "a": "79e5df4f1fd2ffe1b80b958ef6f189ff0483a802bd1c49ed56131e1cc02dbe09", - "b": "49251adbc980b5a7f45317bab8be87961b6b9496d24779023f77dd1b4552fc0b", - "t": "522e857f38e9c63349955e3be4b3a15e86aa96943900797b8139cbfdcd940903" - }], - "CLSAGs": [ { - "s": [ "ce41249b9e7e4a933b88a204b1ded0e2ba0fa6a1cf3ba4c3ffcc0ca96b06b400", "051d66ede7013ccc453e4f042e17e1091a536456460fe3551f36c99c05d59105", "802ee93d645167ee586e71fcc23c33d2362754065955eca45f294bb28f389608", "26166dfdde409f76f75366234b0a08f6e49caa1c2169e8d9376673e9835dc709", "bb94e4d8a164ee6278998e891e05086ad8996e496e833d975cb3d4fceb3f7203", "a7b365891227448b2e968b5bb653b29741bad9ba107b417354e6e3e5acf5710a", "346e232e9f02606a31bd8b692a67578637a5ff82339dde31425ad59ada173601", "84f24e6b542810ec43ef11d6a8b2eba53b288ca445d1e1a0daf059320978160e", "b610631a9b382291418309fc3ceecf3a08900ff5d301044a3cf565567129b10d", "c2aa970f43493920c4ef3df1c408e12bbcfb01573b79e296e34912cde6271603", "3f73cf74f42b0fde655f2848bec39fd63f3485499147fe170861e9e0eae0e90d"], - "c1": "400328b595456b6451dc07eac7c8f6849dc065bb7f5ac49cff1530658bcc4d09", - "D": "d03e2f9611a5561dc3a90b3b2f3d933dc110db73904aeee5abcd2362a0e1b045" - }], - "pseudoOuts": [ "c4cf22334e32fd33193fbfa1c4cb9e3182fb2357cf7a800f552b87579cc41d99"] - } - } - }, - { - "id": "2f650db5bafd37ce8982f37ee443f2ecf0a8f08f639591583aecb6cd74d5a80c", - "hex": "020001020010d6f68721ea820c88d539a68f0b84af09d19401c08a02f0ee048250c219958401a49f02b33fa321a527dd227f94e759b07b2c025ce22a57db0cb062bfd1f50f6086b14ca3742730c7fa9e5d040003fcdf91296bb4560335835fda30804a7d8d200acfabe4e98a0c425d38556dac06870003d66821247fe13266bad423e445ddd6a1b51a86198e38049e2c8039ab6d5dc8b485000393ae131b8c649288a9fb61ebffa8ecb0fababa8f5159286f895f5bed10bad6388600038abfdfa2d445934fe750607f9654e02389c056644453c942d1841bbf418d94e22021014004716b1c1ffb8447e0c1d27f147a4691ed393fdf2eadb225ebfd54ffdf872f0680d5f814756596945ca3852476b456ac3c9942c978d0a3bcd9e6c236efadcdf54e6ed0db9c4bc6ac562b6859a40ad8f3bc85ca35c98badb4b4c5d43832f330d6fedb08e8f9e2acd339c648bf03957cb02aa69b8ab15326e3bbe1ce35df677306edabd89e5635f226a743068500e25028fbdbf1ea19d0921a27c8baec842b753080f407ee4b9a87f2c525e9bfb61fb4d14187c0577e799bf20e53a86359cb75f40ee4d291017c2b59e7278c94b6296dee9ac65ed5ccf61a77ba4f1b3edfb13c5d02748763b23a6bac2f6a891b474d55b625030b35f9b7b564e747afd4cb8e1ce830a9bc59fd6e146443965494f94a8433de054080fcb71f8d48803598cc91db3c7b3fd190ea8ff5f67980a63de4cb9cd06568a9b27aa994992bc33d70990225acb09faf68066aa27c1118c685cb8f3516c3b664450fabdced384de01650d6455287bc0f210aaa5c173c491844155736a64d7cbdda79f0c8a5ccc07d187ca112664a0e6eb500087178983179f1ba2ffb030d577638001b58f5e621b4723e5b0bd0853fb430113d03efd026660a18a23c7582e9788f770212b604759aa242b35b3ca4a835bb18881c8593ac4247ba51ea95946cf079721588bac494f563a687fe1010818caa94583969b0f4a4a40eeee395cbb7881a53d98cad51b1e5d12c7071a7424b4c534e32c53a31b11e6151edd0a13ef9695021bff9bd4c62df9a62d9e0fbd01e750d0b6abc56cc96d55ef06f6428b42fc63f6610633ecf023211e64a1ff89dcabfeeb4b938e64312dcc849929e8d4a290eff601e06dc65141665d7b312ac1f0f859a00fd6d6ccf7dc695e7ae3cf44bed1d9c8659ee3451dd3498f462912ba881a473c9bc0866e4fb33114b2ef7c25869f9cc3c40a06fc2407e2c678126ff1c38a35c5c949bc219fb33ba15730510c41554c727d5adfce33a518148234e0aa5411cb20c115e749792ad47ee19e9f1544dba61593d95cb98d4720a8ae6e60146416d673e5707c3de31d91043422ab848d4676a6845ced6e7075c5a09bc8b4e0ad706c8c07bda527a7325771438e04f37517f3ca5262ef2ddfc9e13db988a90c50be5422a83ad75b93f4faae980d6e6a3abfd0e96387121101afaf55f425dc876d9a8735c1e29d823e19fee5e502c18d16ec9225f232cfbc3dcd143aaa1904f42e880b612beeea3e5a745a7f32e6b2135a75f71117e2947c99647f14702417a9a76f6130b5d62fd149a606061709a86253c3c2a30c8ccc0e2b5ee636bda81973b011fa8b96e0f9149e7d02d903e982b025e0944029423ba9318637387d6f0a8a75f1fa957950ce6661368738251a418968ae390143e596a77bef7de4008ca66ed28b82e044d0ab293f792e8b1e9c1bc24b14ee53539f535b05f2f336c1b7698ca3cb1dc8a3a09568c6841724a19d412d4313760e3560616df7f5b2250b1a52bf32922b3964309b0bedb645579ee09d87959f4e997e4792ac9fa26858ef1aa1dbf7b10da08e7092cb200369d75f3d2b81ad2c237954cdfea1d173f84122ce4cf82a9ebaa04650a69f3675f2155bbb7ce508fdd6a328492b8788e37809f2accf082387b97a7660d427cac9eb93ceacda0cdb9db95a2d6c6fa9ca86276acce2cb8e432b14efb4d0e8a1f3cbc8534c5dfb9a42f7b0d5c212928115cb2c5b905c650b5325e2a849109c60329dcc20f1c1f10d9f6a87d17359938c520e00dd3f5e1857b5af502cc590cad89abca61f4a94513d8e42db9e7223b5d97afd80f490155bf49b79c7ea5c10d6cb74ba10211d6ec75458436a08794164d16bcb4d092274061449418d9fc3d0a9947a8854a399c7e77a49568676ff8df07c3aa21ca90a611dcdfe0c6bd44690a43a3263237f1def6658ba936e2f17c3853fdcd2c0e24cc0b26c59abb47031e00992ca59657da958b48d21d12ae0a93a68596b72c6cc826fd8e079de67b0539026a24c5dcea4875f16cd0722352424493647f7ad3b3148bcdf6c8504c25bbbb07a8b01a6352cb602a1964c02e7e10601644cee41c2bdbb39a9687fdd78dca919726312d076b9e7a4e5b0324e305b99bb1c3ea40bd2296de41f2fc43f668e1a9fb", - "signature_hash": "9c13c702e03b54a3000a008e4deb1763d7e232c3378bf928df1e2e976f5ba9c5", - "tx": { - "version": 2, - "unlock_time": 0, - "vin": [ { - "key": { - "amount": 0, - "key_offsets": [69335894, 196970, 944776, 182182, 153476, 19025, 34112, 79728, 10242, 3266, 16917, 36772, 8115, 4259, 5029, 4445], - "k_image": "7f94e759b07b2c025ce22a57db0cb062bfd1f50f6086b14ca3742730c7fa9e5d" - } - }], - "vout": [ - { - "amount": 0, - "target": { - "tagged_key": { - "key": "fcdf91296bb4560335835fda30804a7d8d200acfabe4e98a0c425d38556dac06", - "view_tag": "87" - } - } - }, - { - "amount": 0, - "target": { - "tagged_key": { - "key": "d66821247fe13266bad423e445ddd6a1b51a86198e38049e2c8039ab6d5dc8b4", - "view_tag": "85" - } - } - }, - { - "amount": 0, - "target": { - "tagged_key": { - "key": "93ae131b8c649288a9fb61ebffa8ecb0fababa8f5159286f895f5bed10bad638", - "view_tag": "86" - } - } - }, { - "amount": 0, - "target": { - "tagged_key": { - "key": "8abfdfa2d445934fe750607f9654e02389c056644453c942d1841bbf418d94e2", - "view_tag": "20" - } - } - } - ], - "extra": [ 1, 64, 4, 113, 107, 28, 31, 251, 132, 71, 224, 193, 210, 127, 20, 122, 70, 145, 237, 57, 63, 223, 46, 173, 178, 37, 235, 253, 84, 255, 223, 135, 47], - "rct_signatures": { - "type": 6, - "txnFee": 43920000, - "ecdhInfo": [ { - "amount": "756596945ca38524" - }, { - "amount": "76b456ac3c9942c9" - }, { - "amount": "78d0a3bcd9e6c236" - }, { - "amount": "efadcdf54e6ed0db" - }], - "outPk": [ "9c4bc6ac562b6859a40ad8f3bc85ca35c98badb4b4c5d43832f330d6fedb08e8", "f9e2acd339c648bf03957cb02aa69b8ab15326e3bbe1ce35df677306edabd89e", "5635f226a743068500e25028fbdbf1ea19d0921a27c8baec842b753080f407ee", "4b9a87f2c525e9bfb61fb4d14187c0577e799bf20e53a86359cb75f40ee4d291"] - }, - "rctsig_prunable": { - "nbp": 1, - "bpp": [ { - "A": "7c2b59e7278c94b6296dee9ac65ed5ccf61a77ba4f1b3edfb13c5d02748763b2", - "A1": "3a6bac2f6a891b474d55b625030b35f9b7b564e747afd4cb8e1ce830a9bc59fd", - "B": "6e146443965494f94a8433de054080fcb71f8d48803598cc91db3c7b3fd190ea", - "r1": "8ff5f67980a63de4cb9cd06568a9b27aa994992bc33d70990225acb09faf6806", - "s1": "6aa27c1118c685cb8f3516c3b664450fabdced384de01650d6455287bc0f210a", - "d1": "aa5c173c491844155736a64d7cbdda79f0c8a5ccc07d187ca112664a0e6eb500", - "L": [ "7178983179f1ba2ffb030d577638001b58f5e621b4723e5b0bd0853fb430113d", "03efd026660a18a23c7582e9788f770212b604759aa242b35b3ca4a835bb1888", "1c8593ac4247ba51ea95946cf079721588bac494f563a687fe1010818caa9458", "3969b0f4a4a40eeee395cbb7881a53d98cad51b1e5d12c7071a7424b4c534e32", "c53a31b11e6151edd0a13ef9695021bff9bd4c62df9a62d9e0fbd01e750d0b6a", "bc56cc96d55ef06f6428b42fc63f6610633ecf023211e64a1ff89dcabfeeb4b9", "38e64312dcc849929e8d4a290eff601e06dc65141665d7b312ac1f0f859a00fd", "6d6ccf7dc695e7ae3cf44bed1d9c8659ee3451dd3498f462912ba881a473c9bc" - ], - "R": [ "66e4fb33114b2ef7c25869f9cc3c40a06fc2407e2c678126ff1c38a35c5c949b", "c219fb33ba15730510c41554c727d5adfce33a518148234e0aa5411cb20c115e", "749792ad47ee19e9f1544dba61593d95cb98d4720a8ae6e60146416d673e5707", "c3de31d91043422ab848d4676a6845ced6e7075c5a09bc8b4e0ad706c8c07bda", "527a7325771438e04f37517f3ca5262ef2ddfc9e13db988a90c50be5422a83ad", "75b93f4faae980d6e6a3abfd0e96387121101afaf55f425dc876d9a8735c1e29", "d823e19fee5e502c18d16ec9225f232cfbc3dcd143aaa1904f42e880b612beee", "a3e5a745a7f32e6b2135a75f71117e2947c99647f14702417a9a76f6130b5d62" - ] - }], - "CLSAGs": [ { - "s": [ "fd149a606061709a86253c3c2a30c8ccc0e2b5ee636bda81973b011fa8b96e0f", "9149e7d02d903e982b025e0944029423ba9318637387d6f0a8a75f1fa957950c", "e6661368738251a418968ae390143e596a77bef7de4008ca66ed28b82e044d0a", "b293f792e8b1e9c1bc24b14ee53539f535b05f2f336c1b7698ca3cb1dc8a3a09", "568c6841724a19d412d4313760e3560616df7f5b2250b1a52bf32922b3964309", "b0bedb645579ee09d87959f4e997e4792ac9fa26858ef1aa1dbf7b10da08e709", "2cb200369d75f3d2b81ad2c237954cdfea1d173f84122ce4cf82a9ebaa04650a", "69f3675f2155bbb7ce508fdd6a328492b8788e37809f2accf082387b97a7660d", "427cac9eb93ceacda0cdb9db95a2d6c6fa9ca86276acce2cb8e432b14efb4d0e", "8a1f3cbc8534c5dfb9a42f7b0d5c212928115cb2c5b905c650b5325e2a849109", "c60329dcc20f1c1f10d9f6a87d17359938c520e00dd3f5e1857b5af502cc590c", "ad89abca61f4a94513d8e42db9e7223b5d97afd80f490155bf49b79c7ea5c10d", "6cb74ba10211d6ec75458436a08794164d16bcb4d092274061449418d9fc3d0a", "9947a8854a399c7e77a49568676ff8df07c3aa21ca90a611dcdfe0c6bd44690a", "43a3263237f1def6658ba936e2f17c3853fdcd2c0e24cc0b26c59abb47031e00", "992ca59657da958b48d21d12ae0a93a68596b72c6cc826fd8e079de67b053902"], - "c1": "6a24c5dcea4875f16cd0722352424493647f7ad3b3148bcdf6c8504c25bbbb07", - "D": "a8b01a6352cb602a1964c02e7e10601644cee41c2bdbb39a9687fdd78dca9197" - }], - "pseudoOuts": [ "26312d076b9e7a4e5b0324e305b99bb1c3ea40bd2296de41f2fc43f668e1a9fb"] - } - } - }, - { - "id": "f66f36be5a6b340bc8515d3606d4beceb20611dddb1802b387fbaba30c5c98d3", - "hex": "02000102000bf59ea50bf48bfb08e1d6a1039843f7ee0597d002ba3ca603de3be263ca194830cafb5a73ad93cd2fe5271505596a75d7cabb01ced2bb608028245ea73bb8020002fc3f396be673a4957fbc1976601941d225ffdbec54bc06461698d14fda7c8b1f00022757dd54027e93c917251de2cc6777f7a3fa484f5b244ab54bf8783e7da80c362c01959377b2cc5b76f40886262064cc71324414c2996720dcbea25eae8b8faf4f9802090126a37bb1d1414ab705c0cef71cf0704cd0b1fcde1d656737377f5106deb167d7cde7206c17c8f2f25a508be29e1ad78bb792c3fedcbdd9ce95815c59a5fd98ff5b251f105f3d51067fb90cb9f1e0b6138dfda82aad4906472cf7f8c1b501c786dc1c545d39b00502134e4a2935b9b81f420f4d926bed61b2dde30fde4a464d85cbfda1df5a07da2d4d135ff618e5cf4d6b22238b913af712dd59cb228fec35fc0639d3b54edf518e507034b5be35523ac3c98396bd9cc6e6a59e1c6eda6e93f9e84c4fdade80a6f449ac6ccf8d6583fcd495b78c53a43321210d73370f86999fcc79761f1810514b50b8a1fee1288289b64718d54bcef42abfe61fb8fb0f60373d190d80ad65cabd36d9600d6253f8d343961367526122ca0c5b0acdc60c071f5ab47025d4d89568a2f0b56fc73c6488cc4500e398d2b3059e0d35cede35c33b473e6e57121629db88f0f8c15e03036eae2887f1d9d76d90a0b5a5caa01272347c96e88e461acb04a6be5624c4a6742dd8a0d36ac75e7056ffb3a3a7ae68dda87894c7f503a794dc4458ff058e6f7cd903662e5961d5eeb052b8f075a5ad5cb84407f96ce47a5793ad0c8e4060ee4d90c9946c54b83e91737ccf71acc00045a919866586941d8deeb467a7d83335f84a10d66ba1f51afdf961649ac95ad97dd24d553c1c1004a73332d225d29d0c62bee22e2ea81ceffb02f27ff05a12144a076ed84bce66dd6e84d56fedf06c180a504706977084ff0a74174da623fcb03a88b0246b5a76de445334f447c4525e524e0d3e6b8751417249e0eb6f498ba12ccee562dfddb048ddbd5131263e210d54f04ec38a41c64f7a5812c2083b9fc46fecc2de7f0d1d0aeaaa2f2a9d956879f66e563b48ff9476d67ab98f6956019bf0a36428b0361b28383e7ef2b90bad6a66ae286af4753e54fe6be3131fdc9986ccdf04cbc167abdfd181b326a1d8b30990b9df0b1e702c7bab83c199aa1341c6b4ae1e227b831f0d068e619bb73ed5de32b0bfda62c5203be7f2a6a37c3feb4663da413387e0255ec0204e469fed5e0fde75662cf3a90ae0044778a5a75a319dec07d2d84c70c9b51c9a31f9bc7f25f1a89033dbd23072ac38eb59e08d6ad4f321011a08df0f0559d3b6dd8a22042b40be532d2fc811e42774cd129dc9c9e671600c3a7fe60336836a1e71f2bfeee69d2ae5e647b54dc1c54e9993a64c0f42daf70aaab2387b9a0fdcc82a7e2f3b52ed2a8135b55f166cb49fb6b1d34a64d30f370d8271408d0e4db75616b758014671d321c8c5086a8d7b1bedd44bcb75b382c60bffb7c7726232426ea19c9bd622ee096e772a4c5ab6305a6b2f27fb4a5f60e40737a4cc043ea061755ace64393a0af82ea8088307426acb33a34de95ea7252c01a5b07847f707777b8cd64cc73364a8e65181227ba1ba5aa63161408a7265980b6b6c18079d195a12ec7af4404ff61d3c756aa35b88e4fe4bd72c8b22298b1601b1d04f9861e7118f10808505812809d54d85aa79f4ceb905c8e87b1a5801c60bb12b0f3fdc0e5e0afb7839fd51742aa88ef4466bc1e1f9c1b6f978f736b1880c9a902621eaca740aa21cfaf36931e09b7cc3b28223ad6c2398bd828c7270460d78dd6dd5d9c181aed832e62d56b00e870961b0b6a3a77eb4604cef64f69898046ba7157673909c3b2cb3bedef665e83c364475d482dfa46e717c4c5fa29b7f09dda97110c8a66b48e8fab4cbff2578e4cb85e6b353bfe8f78b6f9182711de13c7093d2007f1dbdfe5f46332b797376af80efc67ab028c597d528461384683dae", - "signature_hash": "8cb405e1460df8134032db1430e1cfffb8f707c9de43ba1f68100f2af8a5e6b1", - "tx": { - "version": 2, - "unlock_time": 0, - "vin": [ { - "key": { - "amount": 0, - "key_offsets": [ 23678837, 18793972, 6843233, 8600, 96119, 43031, 7738, 422, 7646, 12770, 3274], - "k_image": "4830cafb5a73ad93cd2fe5271505596a75d7cabb01ced2bb608028245ea73bb8" - } - }], - "vout": [ - { - "amount": 0, - "target": { - "key": "fc3f396be673a4957fbc1976601941d225ffdbec54bc06461698d14fda7c8b1f" - } - }, - { - "amount": 0, - "target": { - "key": "2757dd54027e93c917251de2cc6777f7a3fa484f5b244ab54bf8783e7da80c36" - } - } - ], - "extra": [ 1, 149, 147, 119, 178, 204, 91, 118, 244, 8, 134, 38, 32, 100, 204, 113, 50, 68, 20, 194, 153, 103, 32, 220, 190, 162, 94, 174, 139, 143, 175, 79, 152, 2, 9, 1, 38, 163, 123, 177, 209, 65, 74, 183], - "rct_signatures": { - "type": 5, - "txnFee": 60680000, - "ecdhInfo": [ { - "amount": "f0704cd0b1fcde1d" - }, { - "amount": "656737377f5106de" - }], - "outPk": [ "b167d7cde7206c17c8f2f25a508be29e1ad78bb792c3fedcbdd9ce95815c59a5", "fd98ff5b251f105f3d51067fb90cb9f1e0b6138dfda82aad4906472cf7f8c1b5"] - }, - "rctsig_prunable": { - "nbp": 1, - "bp": [ { - "A": "c786dc1c545d39b00502134e4a2935b9b81f420f4d926bed61b2dde30fde4a46", - "S": "4d85cbfda1df5a07da2d4d135ff618e5cf4d6b22238b913af712dd59cb228fec", - "T1": "35fc0639d3b54edf518e507034b5be35523ac3c98396bd9cc6e6a59e1c6eda6e", - "T2": "93f9e84c4fdade80a6f449ac6ccf8d6583fcd495b78c53a43321210d73370f86", - "taux": "999fcc79761f1810514b50b8a1fee1288289b64718d54bcef42abfe61fb8fb0f", - "mu": "60373d190d80ad65cabd36d9600d6253f8d343961367526122ca0c5b0acdc60c", - "L": [ "1f5ab47025d4d89568a2f0b56fc73c6488cc4500e398d2b3059e0d35cede35c3", "3b473e6e57121629db88f0f8c15e03036eae2887f1d9d76d90a0b5a5caa01272", "347c96e88e461acb04a6be5624c4a6742dd8a0d36ac75e7056ffb3a3a7ae68dd", "a87894c7f503a794dc4458ff058e6f7cd903662e5961d5eeb052b8f075a5ad5c", "b84407f96ce47a5793ad0c8e4060ee4d90c9946c54b83e91737ccf71acc00045", "a919866586941d8deeb467a7d83335f84a10d66ba1f51afdf961649ac95ad97d", "d24d553c1c1004a73332d225d29d0c62bee22e2ea81ceffb02f27ff05a12144a" - ], - "R": [ "6ed84bce66dd6e84d56fedf06c180a504706977084ff0a74174da623fcb03a88", "b0246b5a76de445334f447c4525e524e0d3e6b8751417249e0eb6f498ba12cce", "e562dfddb048ddbd5131263e210d54f04ec38a41c64f7a5812c2083b9fc46fec", "c2de7f0d1d0aeaaa2f2a9d956879f66e563b48ff9476d67ab98f6956019bf0a3", "6428b0361b28383e7ef2b90bad6a66ae286af4753e54fe6be3131fdc9986ccdf", "04cbc167abdfd181b326a1d8b30990b9df0b1e702c7bab83c199aa1341c6b4ae", "1e227b831f0d068e619bb73ed5de32b0bfda62c5203be7f2a6a37c3feb4663da" - ], - "a": "413387e0255ec0204e469fed5e0fde75662cf3a90ae0044778a5a75a319dec07", - "b": "d2d84c70c9b51c9a31f9bc7f25f1a89033dbd23072ac38eb59e08d6ad4f32101", - "t": "1a08df0f0559d3b6dd8a22042b40be532d2fc811e42774cd129dc9c9e671600c" - }], - "CLSAGs": [ { - "s": [ "3a7fe60336836a1e71f2bfeee69d2ae5e647b54dc1c54e9993a64c0f42daf70a", "aab2387b9a0fdcc82a7e2f3b52ed2a8135b55f166cb49fb6b1d34a64d30f370d", "8271408d0e4db75616b758014671d321c8c5086a8d7b1bedd44bcb75b382c60b", "ffb7c7726232426ea19c9bd622ee096e772a4c5ab6305a6b2f27fb4a5f60e407", "37a4cc043ea061755ace64393a0af82ea8088307426acb33a34de95ea7252c01", "a5b07847f707777b8cd64cc73364a8e65181227ba1ba5aa63161408a7265980b", "6b6c18079d195a12ec7af4404ff61d3c756aa35b88e4fe4bd72c8b22298b1601", "b1d04f9861e7118f10808505812809d54d85aa79f4ceb905c8e87b1a5801c60b", "b12b0f3fdc0e5e0afb7839fd51742aa88ef4466bc1e1f9c1b6f978f736b1880c", "9a902621eaca740aa21cfaf36931e09b7cc3b28223ad6c2398bd828c7270460d", "78dd6dd5d9c181aed832e62d56b00e870961b0b6a3a77eb4604cef64f6989804"], - "c1": "6ba7157673909c3b2cb3bedef665e83c364475d482dfa46e717c4c5fa29b7f09", - "D": "dda97110c8a66b48e8fab4cbff2578e4cb85e6b353bfe8f78b6f9182711de13c" - }], - "pseudoOuts": [ "7093d2007f1dbdfe5f46332b797376af80efc67ab028c597d528461384683dae"] - } - } - }, - { - "id": "55ba10662968c57fc8fed2c82a99d6fd9516730c245f58e9e87bb9a35378014a", - "hex": "01000302b0f9cf0e0100e53d3d97d11974ccf49d23513b9465bc139bda14b8207288e41557707e59c2dc02c08092de06010133e69f524f1989738827c9fb9087d45d7b6865645453f620189939d926735b3902b0b2c4f62201001d680e360c156c7cc952b0c4fc39a39c95a766423109766ae3dcf0c5cf8abf9e06a0f736026bcac41f5468fd1bcf8994e1a4282aecb420005ec294324faec5c6f46806614780c8afa025027998ef0ac319d96b224fa3c33fe12677ef06f4aed80916f28e3f7a684a1c89d78094ebdc03028e0004b98f7c622f1d0364a2d0270c40e6606793bd948a41af9287016d264d7580ade20402f911ad66eabcd8112e90130d7594cb8fc413431da9b5b004c8651d100a2ac8fb8084af5f029caa24c41b1b4938e7e066a7e59592fdc3c832e2516048e57af5d8acca9bef0ac0b80202e99b6a2e000f03f73c1966e16875945ffad20b1895efa17202c6e240191b01e421011da6ff966df43bd44f513aaafa260c54e2ad31469664fa7b0a44fed4ead9a483056cd88c350b340289694ad525f1316367ed16673dc23911e624e6bac4a48b032a623d09ce5bd85a839534de4fbbfb72da2a6779a66f775c16379e8abd122401a5a19c47bb8ecd0c7aa9610513b611602d246f3b07fa19512dc8fde7b180c704694b2d75abbaa2907cda52f888f18ded34308dd1b40f1fb33a01340a1ddc5f07ce19ab4357c52764600861d2d331cfed5972fff42dac64583d617fa4ee27a509a8c29ff77fc71bc7cd106ce54cd3020d1c18c6c794aaa93cb32cd4ff55d82b04", - "signature_hash": "1ad261b4c8f35b8861c4f3a78b240a85e44be6a8ac49acd1e50de4680adf7fac", - "tx": { - "version": 1, - "unlock_time": 0, - "vin": [ - { - "key": { - "amount": 30670000, - "key_offsets": [0], - "k_image": "e53d3d97d11974ccf49d23513b9465bc139bda14b8207288e41557707e59c2dc" - } - }, - { - "key": { - "amount": 1808040000, - "key_offsets": [1], - "k_image": "33e69f524f1989738827c9fb9087d45d7b6865645453f620189939d926735b39" - } - }, - { - "key": { - "amount": 9375390000, - "key_offsets": [0], - "k_image": "1d680e360c156c7cc952b0c4fc39a39c95a766423109766ae3dcf0c5cf8abf9e" - } - } - ], - "vout": [ - { - "amount": 900000, - "target": { - "key": "6bcac41f5468fd1bcf8994e1a4282aecb420005ec294324faec5c6f468066147" - } - }, - { - "amount": 10000000000, - "target": { - "key": "7998ef0ac319d96b224fa3c33fe12677ef06f4aed80916f28e3f7a684a1c89d7" - } - }, - { - "amount": 1000000000, - "target": { - "key": "8e0004b98f7c622f1d0364a2d0270c40e6606793bd948a41af9287016d264d75" - } - }, - { - "amount": 10000000, - "target": { - "key": "f911ad66eabcd8112e90130d7594cb8fc413431da9b5b004c8651d100a2ac8fb" - } - }, - { - "amount": 200000000, - "target": { - "key": "9caa24c41b1b4938e7e066a7e59592fdc3c832e2516048e57af5d8acca9bef0a" - } - }, - { - "amount": 40000, - "target": { - "key": "e99b6a2e000f03f73c1966e16875945ffad20b1895efa17202c6e240191b01e4" - } - } - ], - "extra": [ 1, 29, 166, 255, 150, 109, 244, 59, 212, 79, 81, 58, 170, 250, 38, 12, 84, 226, 173, 49, 70, 150, 100, 250, 123, 10, 68, 254, 212, 234, 217, 164, 131], - "signatures": [ "056cd88c350b340289694ad525f1316367ed16673dc23911e624e6bac4a48b032a623d09ce5bd85a839534de4fbbfb72da2a6779a66f775c16379e8abd122401", "a5a19c47bb8ecd0c7aa9610513b611602d246f3b07fa19512dc8fde7b180c704694b2d75abbaa2907cda52f888f18ded34308dd1b40f1fb33a01340a1ddc5f07", "ce19ab4357c52764600861d2d331cfed5972fff42dac64583d617fa4ee27a509a8c29ff77fc71bc7cd106ce54cd3020d1c18c6c794aaa93cb32cd4ff55d82b04"] - } - } -] diff --git a/networks/monero/src/transaction.rs b/networks/monero/src/transaction.rs deleted file mode 100644 index d70da5ff..00000000 --- a/networks/monero/src/transaction.rs +++ /dev/null @@ -1,635 +0,0 @@ -use core::cmp::Ordering; -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY}; - -use crate::{ - io::*, - primitives::keccak256, - ring_signatures::RingSignature, - ringct::{bulletproofs::Bulletproof, PrunedRctProofs}, -}; - -/// An input in the Monero protocol. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum Input { - /// An input for a miner transaction, which is generating new coins. - Gen(usize), - /// An input spending an output on-chain. - ToKey { - /// The pool this input spends an output of. - amount: Option, - /// The decoys used by this input's ring, specified as their offset distance from each other. - key_offsets: Vec, - /// The key image (linking tag, nullifer) for the spent output. - key_image: EdwardsPoint, - }, -} - -impl Input { - /// Write the Input. - pub fn write(&self, w: &mut W) -> io::Result<()> { - match self { - Input::Gen(height) => { - w.write_all(&[255])?; - write_varint(height, w) - } - - Input::ToKey { amount, key_offsets, key_image } => { - w.write_all(&[2])?; - write_varint(&amount.unwrap_or(0), w)?; - write_vec(write_varint, key_offsets, w)?; - write_point(key_image, w) - } - } - } - - /// Serialize the Input to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = vec![]; - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read an Input. - pub fn read(r: &mut R) -> io::Result { - Ok(match read_byte(r)? { - 255 => Input::Gen(read_varint(r)?), - 2 => { - let amount = read_varint(r)?; - // https://github.com/monero-project/monero/ - // blob/00fd416a99686f0956361d1cd0337fe56e58d4a7/ - // src/cryptonote_basic/cryptonote_format_utils.cpp#L860-L863 - // A non-RCT 0-amount input can't exist because only RCT TXs can have a 0-amount output - // That's why collapsing to None if the amount is 0 is safe, even without knowing if RCT - let amount = if amount == 0 { None } else { Some(amount) }; - Input::ToKey { - amount, - key_offsets: read_vec(read_varint, None, r)?, - key_image: read_torsion_free_point(r)?, - } - } - _ => Err(io::Error::other("Tried to deserialize unknown/unused input type"))?, - }) - } -} - -/// An output in the Monero protocol. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Output { - /// The pool this output should be sorted into. - pub amount: Option, - /// The key which can spend this output. - pub key: CompressedEdwardsY, - /// The view tag for this output, as used to accelerate scanning. - pub view_tag: Option, -} - -impl Output { - /// Write the Output. - pub fn write(&self, w: &mut W) -> io::Result<()> { - write_varint(&self.amount.unwrap_or(0), w)?; - w.write_all(&[2 + u8::from(self.view_tag.is_some())])?; - w.write_all(&self.key.to_bytes())?; - if let Some(view_tag) = self.view_tag { - w.write_all(&[view_tag])?; - } - Ok(()) - } - - /// Write the Output to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(8 + 1 + 32); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read an Output. - pub fn read(rct: bool, r: &mut R) -> io::Result { - let amount = read_varint(r)?; - let amount = if rct { - if amount != 0 { - Err(io::Error::other("RCT TX output wasn't 0"))?; - } - None - } else { - Some(amount) - }; - - let view_tag = match read_byte(r)? { - 2 => false, - 3 => true, - _ => Err(io::Error::other("Tried to deserialize unknown/unused output type"))?, - }; - - Ok(Output { - amount, - key: CompressedEdwardsY(read_bytes(r)?), - view_tag: if view_tag { Some(read_byte(r)?) } else { None }, - }) - } -} - -/// An additional timelock for a Monero transaction. -/// -/// Monero outputs are locked by a default timelock. If a timelock is explicitly specified, the -/// longer of the two will be the timelock used. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub enum Timelock { - /// No additional timelock. - None, - /// Additionally locked until this block. - Block(usize), - /// Additionally locked until this many seconds since the epoch. - Time(u64), -} - -impl Timelock { - /// Write the Timelock. - pub fn write(&self, w: &mut W) -> io::Result<()> { - match self { - Timelock::None => write_varint(&0u8, w), - Timelock::Block(block) => write_varint(block, w), - Timelock::Time(time) => write_varint(time, w), - } - } - - /// Serialize the Timelock to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(1); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read a Timelock. - pub fn read(r: &mut R) -> io::Result { - const TIMELOCK_BLOCK_THRESHOLD: usize = 500_000_000; - - let raw = read_varint::<_, u64>(r)?; - Ok(if raw == 0 { - Timelock::None - } else if raw < - u64::try_from(TIMELOCK_BLOCK_THRESHOLD) - .expect("TIMELOCK_BLOCK_THRESHOLD didn't fit in a u64") - { - Timelock::Block(usize::try_from(raw).expect( - "timelock overflowed usize despite being less than a const representable with a usize", - )) - } else { - Timelock::Time(raw) - }) - } -} - -impl PartialOrd for Timelock { - fn partial_cmp(&self, other: &Self) -> Option { - match (self, other) { - (Timelock::None, Timelock::None) => Some(Ordering::Equal), - (Timelock::None, _) => Some(Ordering::Less), - (_, Timelock::None) => Some(Ordering::Greater), - (Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b), - (Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b), - _ => None, - } - } -} - -/// The transaction prefix. -/// -/// This is common to all transaction versions and contains most parts of the transaction needed to -/// handle it. It excludes any proofs. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct TransactionPrefix { - /// The timelock this transaction is additionally constrained by. - /// - /// All transactions on the blockchain are subject to a 10-block lock. This adds a further - /// constraint. - pub additional_timelock: Timelock, - /// The inputs for this transaction. - pub inputs: Vec, - /// The outputs for this transaction. - pub outputs: Vec, - /// The additional data included within the transaction. - /// - /// This is an arbitrary data field, yet is used by wallets for containing the data necessary to - /// scan the transaction. - pub extra: Vec, -} - -impl TransactionPrefix { - /// Write a TransactionPrefix. - /// - /// This is distinct from Monero in that it won't write any version. - fn write(&self, w: &mut W) -> io::Result<()> { - self.additional_timelock.write(w)?; - write_vec(Input::write, &self.inputs, w)?; - write_vec(Output::write, &self.outputs, w)?; - write_varint(&self.extra.len(), w)?; - w.write_all(&self.extra) - } - - /// Read a TransactionPrefix. - /// - /// This is distinct from Monero in that it won't read the version. The version must be passed - /// in. - pub fn read(r: &mut R, version: u64) -> io::Result { - let additional_timelock = Timelock::read(r)?; - - let inputs = read_vec(|r| Input::read(r), None, r)?; - if inputs.is_empty() { - Err(io::Error::other("transaction had no inputs"))?; - } - let is_miner_tx = matches!(inputs[0], Input::Gen { .. }); - - let mut prefix = TransactionPrefix { - additional_timelock, - inputs, - outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), None, r)?, - extra: vec![], - }; - prefix.extra = read_vec(read_byte, None, r)?; - Ok(prefix) - } - - fn hash(&self, version: u64) -> [u8; 32] { - let mut buf = vec![]; - write_varint(&version, &mut buf).expect("write failed but doesn't fail"); - self.write(&mut buf).expect("write failed but doesn't fail"); - keccak256(buf) - } -} - -mod sealed { - use core::fmt::Debug; - use crate::ringct::*; - use super::*; - - pub(crate) trait RingSignatures: Clone + PartialEq + Eq + Default + Debug { - fn signatures_to_write(&self) -> &[RingSignature]; - fn read_signatures(inputs: &[Input], r: &mut impl Read) -> io::Result; - } - - impl RingSignatures for Vec { - fn signatures_to_write(&self) -> &[RingSignature] { - self - } - fn read_signatures(inputs: &[Input], r: &mut impl Read) -> io::Result { - let mut signatures = Vec::with_capacity(inputs.len()); - for input in inputs { - match input { - Input::ToKey { key_offsets, .. } => { - signatures.push(RingSignature::read(key_offsets.len(), r)?) - } - _ => Err(io::Error::other("reading signatures for a transaction with non-ToKey inputs"))?, - } - } - Ok(signatures) - } - } - - impl RingSignatures for () { - fn signatures_to_write(&self) -> &[RingSignature] { - &[] - } - fn read_signatures(_: &[Input], _: &mut impl Read) -> io::Result { - Ok(()) - } - } - - pub(crate) trait RctProofsTrait: Clone + PartialEq + Eq + Debug { - fn write(&self, w: &mut impl Write) -> io::Result<()>; - fn read( - ring_length: usize, - inputs: usize, - outputs: usize, - r: &mut impl Read, - ) -> io::Result>; - fn rct_type(&self) -> RctType; - fn base(&self) -> &RctBase; - } - - impl RctProofsTrait for RctProofs { - fn write(&self, w: &mut impl Write) -> io::Result<()> { - self.write(w) - } - fn read( - ring_length: usize, - inputs: usize, - outputs: usize, - r: &mut impl Read, - ) -> io::Result> { - RctProofs::read(ring_length, inputs, outputs, r) - } - fn rct_type(&self) -> RctType { - self.rct_type() - } - fn base(&self) -> &RctBase { - &self.base - } - } - - impl RctProofsTrait for PrunedRctProofs { - fn write(&self, w: &mut impl Write) -> io::Result<()> { - self.base.write(w, self.rct_type) - } - fn read( - _ring_length: usize, - inputs: usize, - outputs: usize, - r: &mut impl Read, - ) -> io::Result> { - Ok(RctBase::read(inputs, outputs, r)?.map(|(rct_type, base)| Self { rct_type, base })) - } - fn rct_type(&self) -> RctType { - self.rct_type - } - fn base(&self) -> &RctBase { - &self.base - } - } - - pub(crate) trait PotentiallyPruned { - type RingSignatures: RingSignatures; - type RctProofs: RctProofsTrait; - } - /// A transaction which isn't pruned. - #[derive(Clone, PartialEq, Eq, Debug)] - pub struct NotPruned; - impl PotentiallyPruned for NotPruned { - type RingSignatures = Vec; - type RctProofs = RctProofs; - } - /// A transaction which is pruned. - #[derive(Clone, PartialEq, Eq, Debug)] - pub struct Pruned; - impl PotentiallyPruned for Pruned { - type RingSignatures = (); - type RctProofs = PrunedRctProofs; - } -} -pub use sealed::*; - -/// A Monero transaction. -#[allow(private_bounds, private_interfaces, clippy::large_enum_variant)] -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum Transaction { - /// A version 1 transaction, used by the original Cryptonote codebase. - V1 { - /// The transaction's prefix. - prefix: TransactionPrefix, - /// The transaction's ring signatures. - signatures: P::RingSignatures, - }, - /// A version 2 transaction, used by the RingCT protocol. - V2 { - /// The transaction's prefix. - prefix: TransactionPrefix, - /// The transaction's proofs. - proofs: Option, - }, -} - -enum PrunableHash<'a> { - V1(&'a [RingSignature]), - V2([u8; 32]), -} - -#[allow(private_bounds)] -impl Transaction

{ - /// Get the version of this transaction. - pub fn version(&self) -> u8 { - match self { - Transaction::V1 { .. } => 1, - Transaction::V2 { .. } => 2, - } - } - - /// Get the TransactionPrefix of this transaction. - pub fn prefix(&self) -> &TransactionPrefix { - match self { - Transaction::V1 { prefix, .. } | Transaction::V2 { prefix, .. } => prefix, - } - } - - /// Get a mutable reference to the TransactionPrefix of this transaction. - pub fn prefix_mut(&mut self) -> &mut TransactionPrefix { - match self { - Transaction::V1 { prefix, .. } | Transaction::V2 { prefix, .. } => prefix, - } - } - - /// Write the Transaction. - /// - /// Some writable transactions may not be readable if they're malformed, per Monero's consensus - /// rules. - pub fn write(&self, w: &mut W) -> io::Result<()> { - write_varint(&self.version(), w)?; - match self { - Transaction::V1 { prefix, signatures } => { - prefix.write(w)?; - for ring_sig in signatures.signatures_to_write() { - ring_sig.write(w)?; - } - } - Transaction::V2 { prefix, proofs } => { - prefix.write(w)?; - match proofs { - None => w.write_all(&[0])?, - Some(proofs) => proofs.write(w)?, - } - } - } - Ok(()) - } - - /// Write the Transaction to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(2048); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read a Transaction. - pub fn read(r: &mut R) -> io::Result { - let version = read_varint(r)?; - let prefix = TransactionPrefix::read(r, version)?; - - if version == 1 { - let signatures = if (prefix.inputs.len() == 1) && matches!(prefix.inputs[0], Input::Gen(_)) { - Default::default() - } else { - P::RingSignatures::read_signatures(&prefix.inputs, r)? - }; - - Ok(Transaction::V1 { prefix, signatures }) - } else if version == 2 { - let proofs = P::RctProofs::read( - prefix.inputs.first().map_or(0, |input| match input { - Input::Gen(_) => 0, - Input::ToKey { key_offsets, .. } => key_offsets.len(), - }), - prefix.inputs.len(), - prefix.outputs.len(), - r, - )?; - - Ok(Transaction::V2 { prefix, proofs }) - } else { - Err(io::Error::other("tried to deserialize unknown version")) - } - } - - // The hash of the transaction. - #[allow(clippy::needless_pass_by_value)] - fn hash_with_prunable_hash(&self, prunable: PrunableHash<'_>) -> [u8; 32] { - match self { - Transaction::V1 { prefix, .. } => { - let mut buf = Vec::with_capacity(512); - - // We don't use `self.write` as that may write the signatures (if this isn't pruned) - write_varint(&self.version(), &mut buf) - .expect("write failed but doesn't fail"); - prefix.write(&mut buf).expect("write failed but doesn't fail"); - - // We explicitly write the signatures ourselves here - let PrunableHash::V1(signatures) = prunable else { - panic!("hashing v1 TX with non-v1 prunable data") - }; - for signature in signatures { - signature.write(&mut buf).expect("write failed but doesn't fail"); - } - - keccak256(buf) - } - Transaction::V2 { prefix, proofs } => { - let mut hashes = Vec::with_capacity(96); - - hashes.extend(prefix.hash(2)); - - if let Some(proofs) = proofs { - let mut buf = Vec::with_capacity(512); - proofs - .base() - .write(&mut buf, proofs.rct_type()) - .expect("write failed but doesn't fail"); - hashes.extend(keccak256(&buf)); - } else { - // Serialization of RctBase::Null - hashes.extend(keccak256([0])); - } - let PrunableHash::V2(prunable_hash) = prunable else { - panic!("hashing v2 TX with non-v2 prunable data") - }; - hashes.extend(prunable_hash); - - keccak256(hashes) - } - } - } -} - -impl Transaction { - /// The hash of the transaction. - pub fn hash(&self) -> [u8; 32] { - match self { - Transaction::V1 { signatures, .. } => { - self.hash_with_prunable_hash(PrunableHash::V1(signatures)) - } - Transaction::V2 { proofs, .. } => { - self.hash_with_prunable_hash(PrunableHash::V2(if let Some(proofs) = proofs { - let mut buf = Vec::with_capacity(1024); - proofs - .prunable - .write(&mut buf, proofs.rct_type()) - .expect("write failed but doesn't fail"); - keccak256(buf) - } else { - [0; 32] - })) - } - } - } - - /// Calculate the hash of this transaction as needed for signing it. - /// - /// This returns None if the transaction is without signatures. - pub fn signature_hash(&self) -> Option<[u8; 32]> { - Some(match self { - Transaction::V1 { prefix, .. } => { - if (prefix.inputs.len() == 1) && matches!(prefix.inputs[0], Input::Gen(_)) { - None?; - } - self.hash_with_prunable_hash(PrunableHash::V1(&[])) - } - Transaction::V2 { proofs, .. } => self.hash_with_prunable_hash({ - let Some(proofs) = proofs else { None? }; - let mut buf = Vec::with_capacity(1024); - proofs - .prunable - .signature_write(&mut buf) - .expect("write failed but doesn't fail"); - PrunableHash::V2(keccak256(buf)) - }), - }) - } - - fn is_rct_bulletproof(&self) -> bool { - match self { - Transaction::V1 { .. } => false, - Transaction::V2 { proofs, .. } => { - let Some(proofs) = proofs else { return false }; - proofs.rct_type().bulletproof() - } - } - } - - fn is_rct_bulletproof_plus(&self) -> bool { - match self { - Transaction::V1 { .. } => false, - Transaction::V2 { proofs, .. } => { - let Some(proofs) = proofs else { return false }; - proofs.rct_type().bulletproof_plus() - } - } - } - - /// Calculate the transaction's weight. - pub fn weight(&self) -> usize { - let blob_size = self.serialize().len(); - - let bp = self.is_rct_bulletproof(); - let bp_plus = self.is_rct_bulletproof_plus(); - if !(bp || bp_plus) { - blob_size - } else { - blob_size + - Bulletproof::calculate_clawback( - bp_plus, - match self { - Transaction::V1 { .. } => panic!("v1 transaction was BP(+)"), - Transaction::V2 { prefix, .. } => prefix.outputs.len(), - }, - ) - .0 - } - } -} - -impl From> for Transaction { - fn from(tx: Transaction) -> Transaction { - match tx { - Transaction::V1 { prefix, .. } => Transaction::V1 { prefix, signatures: () }, - Transaction::V2 { prefix, proofs } => Transaction::V2 { - prefix, - proofs: proofs - .map(|proofs| PrunedRctProofs { rct_type: proofs.rct_type(), base: proofs.base }), - }, - } - } -} diff --git a/networks/monero/tests/tests.rs b/networks/monero/tests/tests.rs deleted file mode 100644 index 7b6656f2..00000000 --- a/networks/monero/tests/tests.rs +++ /dev/null @@ -1,3 +0,0 @@ -// TODO -#[test] -fn test() {} diff --git a/networks/monero/verify-chain/Cargo.toml b/networks/monero/verify-chain/Cargo.toml deleted file mode 100644 index e1aba16e..00000000 --- a/networks/monero/verify-chain/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "monero-serai-verify-chain" -version = "0.1.0" -description = "A binary to deserialize and verify the Monero blockchain" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/verify-chain" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" -publish = false - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -rand_core = { version = "0.6", default-features = false, features = ["std"] } - -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -hex = { version = "0.4", default-features = false, features = ["std"] } -serde = { version = "1", default-features = false, features = ["derive", "alloc", "std"] } -serde_json = { version = "1", default-features = false, features = ["alloc", "std"] } - -monero-serai = { path = "..", default-features = false, features = ["std", "compile-time-generators"] } -monero-rpc = { path = "../rpc", default-features = false, features = ["std"] } -monero-simple-request-rpc = { path = "../rpc/simple-request", default-features = false } - -tokio = { version = "1", default-features = false, features = ["rt-multi-thread", "macros"] } diff --git a/networks/monero/verify-chain/LICENSE b/networks/monero/verify-chain/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/verify-chain/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/verify-chain/README.md b/networks/monero/verify-chain/README.md deleted file mode 100644 index 4348b2c1..00000000 --- a/networks/monero/verify-chain/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# monero-serai Verify Chain - -A binary to deserialize and verify the Monero blockchain. - -This is not complete. This is not intended to be complete. This is intended to -test monero-serai against actual blockchain data. Do not use this as an -inflation checker. diff --git a/networks/monero/verify-chain/src/main.rs b/networks/monero/verify-chain/src/main.rs deleted file mode 100644 index 2cc56c55..00000000 --- a/networks/monero/verify-chain/src/main.rs +++ /dev/null @@ -1,284 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] - -use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint}; - -use serde::Deserialize; -use serde_json::json; - -use monero_serai::{ - io::decompress_point, - primitives::Commitment, - ringct::{RctPrunable, bulletproofs::BatchVerifier}, - transaction::{Input, Transaction}, - block::Block, -}; - -use monero_rpc::{RpcError, Rpc}; -use monero_simple_request_rpc::SimpleRequestRpc; - -use tokio::task::JoinHandle; - -async fn check_block(rpc: impl Rpc, block_i: usize) { - let hash = loop { - match rpc.get_block_hash(block_i).await { - Ok(hash) => break hash, - Err(RpcError::ConnectionError(e)) => { - println!("get_block_hash ConnectionError: {e}"); - continue; - } - Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"), - } - }; - - // TODO: Grab the JSON to also check it was deserialized correctly - #[derive(Deserialize, Debug)] - struct BlockResponse { - blob: String, - } - let res: BlockResponse = loop { - match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await { - Ok(res) => break res, - Err(RpcError::ConnectionError(e)) => { - println!("get_block ConnectionError: {e}"); - continue; - } - Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"), - } - }; - - let blob = hex::decode(res.blob).expect("node returned non-hex block"); - let block = Block::read(&mut blob.as_slice()) - .unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}")); - assert_eq!(block.hash(), hash, "hash differs"); - assert_eq!(block.serialize(), blob, "serialization differs"); - - let txs_len = 1 + block.transactions.len(); - - if !block.transactions.is_empty() { - // Test getting pruned transactions - loop { - match rpc.get_pruned_transactions(&block.transactions).await { - Ok(_) => break, - Err(RpcError::ConnectionError(e)) => { - println!("get_pruned_transactions ConnectionError: {e}"); - continue; - } - Err(e) => panic!("couldn't call get_pruned_transactions: {e:?}"), - } - } - - let txs = loop { - match rpc.get_transactions(&block.transactions).await { - Ok(txs) => break txs, - Err(RpcError::ConnectionError(e)) => { - println!("get_transactions ConnectionError: {e}"); - continue; - } - Err(e) => panic!("couldn't call get_transactions: {e:?}"), - } - }; - - let mut batch = BatchVerifier::new(); - for tx in txs { - match tx { - Transaction::V1 { prefix: _, signatures } => { - assert!(!signatures.is_empty()); - continue; - } - Transaction::V2 { prefix: _, proofs: None } => { - panic!("proofs were empty in non-miner v2 transaction"); - } - Transaction::V2 { ref prefix, proofs: Some(ref proofs) } => { - let sig_hash = tx.signature_hash().expect("no signature hash for TX with proofs"); - // Verify all proofs we support proving for - // This is due to having debug_asserts calling verify within their proving, and CLSAG - // multisig explicitly calling verify as part of its signing process - // Accordingly, making sure our signature_hash algorithm is correct is great, and further - // making sure the verification functions are valid is appreciated - match &proofs.prunable { - RctPrunable::AggregateMlsagBorromean { .. } | RctPrunable::MlsagBorromean { .. } => {} - RctPrunable::MlsagBulletproofs { bulletproof, .. } | - RctPrunable::MlsagBulletproofsCompactAmount { bulletproof, .. } => { - assert!(bulletproof.batch_verify( - &mut rand_core::OsRng, - &mut batch, - &proofs.base.commitments - )); - } - RctPrunable::Clsag { bulletproof, clsags, pseudo_outs } => { - assert!(bulletproof.batch_verify( - &mut rand_core::OsRng, - &mut batch, - &proofs.base.commitments - )); - - for (i, clsag) in clsags.iter().enumerate() { - let (amount, key_offsets, image) = match &prefix.inputs[i] { - Input::Gen(_) => panic!("Input::Gen"), - Input::ToKey { amount, key_offsets, key_image } => { - (amount, key_offsets, key_image) - } - }; - - let mut running_sum = 0; - let mut actual_indexes = vec![]; - for offset in key_offsets { - running_sum += offset; - actual_indexes.push(running_sum); - } - - async fn get_outs( - rpc: &impl Rpc, - amount: u64, - indexes: &[u64], - ) -> Vec<[EdwardsPoint; 2]> { - #[derive(Deserialize, Debug)] - struct Out { - key: String, - mask: String, - } - - #[derive(Deserialize, Debug)] - struct Outs { - outs: Vec, - } - - let outs: Outs = loop { - match rpc - .rpc_call( - "get_outs", - Some(json!({ - "get_txid": true, - "outputs": indexes.iter().map(|o| json!({ - "amount": amount, - "index": o - })).collect::>() - })), - ) - .await - { - Ok(outs) => break outs, - Err(RpcError::ConnectionError(e)) => { - println!("get_outs ConnectionError: {e}"); - continue; - } - Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"), - } - }; - - let rpc_point = |point: &str| { - decompress_point( - hex::decode(point) - .expect("invalid hex for ring member") - .try_into() - .expect("invalid point len for ring member"), - ) - .expect("invalid point for ring member") - }; - - outs - .outs - .iter() - .map(|out| { - let mask = rpc_point(&out.mask); - if amount != 0 { - assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate()); - } - [rpc_point(&out.key), mask] - }) - .collect() - } - - clsag - .verify( - &get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await, - image, - &pseudo_outs[i], - &sig_hash, - ) - .unwrap(); - } - } - } - } - } - } - assert!(batch.verify()); - } - - println!("Deserialized, hashed, and reserialized {block_i} with {txs_len} TXs"); -} - -#[tokio::main] -async fn main() { - let args = std::env::args().collect::>(); - - // Read start block as the first arg - let mut block_i = - args.get(1).expect("no start block specified").parse::().expect("invalid start block"); - - // How many blocks to work on at once - let async_parallelism: usize = - args.get(2).unwrap_or(&"8".to_string()).parse::().expect("invalid parallelism argument"); - - // Read further args as RPC URLs - let default_nodes = vec![ - "http://xmr-node-uk.cakewallet.com:18081".to_string(), - "http://xmr-node-eu.cakewallet.com:18081".to_string(), - ]; - let mut specified_nodes = vec![]; - { - let mut i = 0; - loop { - let Some(node) = args.get(3 + i) else { break }; - specified_nodes.push(node.clone()); - i += 1; - } - } - let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes }; - - let rpc = |url: String| async move { - SimpleRequestRpc::new(url.clone()) - .await - .unwrap_or_else(|_| panic!("couldn't create SimpleRequestRpc connected to {url}")) - }; - let main_rpc = rpc(nodes[0].clone()).await; - let mut rpcs = vec![]; - for i in 0 .. async_parallelism { - rpcs.push(rpc(nodes[i % nodes.len()].clone()).await); - } - - let mut rpc_i = 0; - let mut handles: Vec> = vec![]; - let mut height = 0; - loop { - let new_height = main_rpc.get_height().await.expect("couldn't call get_height"); - if new_height == height { - break; - } - height = new_height; - - while block_i < height { - if handles.len() >= async_parallelism { - // Guarantee one handle is complete - handles.swap_remove(0).await.unwrap(); - - // Remove all of the finished handles - let mut i = 0; - while i < handles.len() { - if handles[i].is_finished() { - handles.swap_remove(i).await.unwrap(); - continue; - } - i += 1; - } - } - - handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i))); - rpc_i = (rpc_i + 1) % rpcs.len(); - block_i += 1; - } - } -} diff --git a/networks/monero/wallet/Cargo.toml b/networks/monero/wallet/Cargo.toml deleted file mode 100644 index c0c34606..00000000 --- a/networks/monero/wallet/Cargo.toml +++ /dev/null @@ -1,81 +0,0 @@ -[package] -name = "monero-wallet" -version = "0.1.0" -description = "Wallet functionality for the Monero protocol, built around monero-serai" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.82" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] -rust-version = "1.80" - -[package.metadata.cargo-machete] -ignored = ["monero-clsag"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false } - -thiserror = { version = "1", default-features = false, optional = true } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -rand_core = { version = "0.6", default-features = false } -# Used to send transactions -rand = { version = "0.8", default-features = false } -rand_chacha = { version = "0.3", default-features = false } -# Used to select decoys -rand_distr = { version = "0.4", default-features = false } - -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "group"] } - -# Multisig dependencies -transcript = { package = "flexible-transcript", path = "../../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true } -group = { version = "0.13", default-features = false, optional = true } -dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false, optional = true } -frost = { package = "modular-frost", path = "../../../crypto/frost", default-features = false, features = ["ed25519"], optional = true } - -hex = { version = "0.4", default-features = false, features = ["alloc"] } - -monero-clsag = { path = "../ringct/clsag", default-features = false } -monero-serai = { path = "..", default-features = false } -monero-rpc = { path = "../rpc", default-features = false } -monero-address = { path = "./address", default-features = false } - -[dev-dependencies] -serde = { version = "1", default-features = false, features = ["derive", "alloc", "std"] } -serde_json = { version = "1", default-features = false, features = ["alloc", "std"] } - -frost = { package = "modular-frost", path = "../../../crypto/frost", default-features = false, features = ["ed25519", "tests"] } - -tokio = { version = "1", features = ["sync", "macros"] } - -monero-simple-request-rpc = { path = "../rpc/simple-request", default-features = false } - -[features] -std = [ - "std-shims/std", - - "thiserror", - - "zeroize/std", - - "rand_core/std", - "rand/std", - "rand_chacha/std", - "rand_distr/std", - - "monero-clsag/std", - "monero-serai/std", - "monero-rpc/std", - "monero-address/std", -] -compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-serai/compile-time-generators"] -multisig = ["std", "transcript", "group", "dalek-ff-group", "frost", "monero-clsag/multisig"] -default = ["std", "compile-time-generators"] diff --git a/networks/monero/wallet/LICENSE b/networks/monero/wallet/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/wallet/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/wallet/README.md b/networks/monero/wallet/README.md deleted file mode 100644 index d88a56d9..00000000 --- a/networks/monero/wallet/README.md +++ /dev/null @@ -1,58 +0,0 @@ -# Monero Wallet - -Wallet functionality for the Monero protocol, built around monero-serai. This -library prides itself on resolving common pit falls developers may face. - -monero-wallet also offers a FROST-inspired multisignature protocol orders of -magnitude more performant than Monero's own. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Features - -- Scanning Monero transactions -- Sending Monero transactions -- Sending Monero transactions with a FROST-inspired threshold multisignature - protocol, orders of magnitude more performant than Monero's own - -### Caveats - -This library DOES attempt to do the following: - -- Create on-chain transactions identical to how wallet2 would (unless told not - to) -- Not be detectable as monero-serai when scanning outputs -- Not reveal spent outputs to the connected RPC node - -This library DOES NOT attempt to do the following: - -- Have identical RPC behavior when creating transactions -- Be a wallet - -This means that monero-serai shouldn't be fingerprintable on-chain. It also -shouldn't be fingerprintable if a targeted attack occurs to detect if the -receiving wallet is monero-serai or wallet2. It also should be generally safe -for usage with remote nodes. - -It won't hide from remote nodes it's monero-serai however, potentially -allowing a remote node to profile you. The implications of this are left to the -user to consider. - -It also won't act as a wallet, just as a wallet functionality library. wallet2 -has several *non-transaction-level* policies, such as always attempting to use -two inputs to create transactions. These are considered out of scope to -monero-serai. - -Finally, this library only supports producing transactions with CLSAG -signatures. That means this library cannot spend non-RingCT outputs. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). -- `compile-time-generators` (on by default): Derives the generators at - compile-time so they don't need to be derived at runtime. This is recommended - if program size doesn't need to be kept minimal. -- `multisig`: Adds support for creation of transactions using a threshold - multisignature wallet. diff --git a/networks/monero/wallet/address/Cargo.toml b/networks/monero/wallet/address/Cargo.toml deleted file mode 100644 index a86ff73c..00000000 --- a/networks/monero/wallet/address/Cargo.toml +++ /dev/null @@ -1,49 +0,0 @@ -[package] -name = "monero-address" -version = "0.1.0" -description = "Rust implementation of Monero addresses" -license = "MIT" -repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/address" -authors = ["Luke Parker "] -edition = "2021" -rust-version = "1.80" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false } - -thiserror = { version = "1", default-features = false, optional = true } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - -curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } - -monero-io = { path = "../../io", default-features = false } -monero-primitives = { path = "../../primitives", default-features = false } - -[dev-dependencies] -rand_core = { version = "0.6", default-features = false, features = ["std"] } - -hex-literal = { version = "0.4", default-features = false } -hex = { version = "0.4", default-features = false, features = ["alloc"] } - -serde = { version = "1", default-features = false, features = ["derive", "alloc"] } -serde_json = { version = "1", default-features = false, features = ["alloc"] } - -[features] -std = [ - "std-shims/std", - - "thiserror", - - "zeroize/std", - - "monero-io/std", -] -default = ["std"] diff --git a/networks/monero/wallet/address/LICENSE b/networks/monero/wallet/address/LICENSE deleted file mode 100644 index 91d893c1..00000000 --- a/networks/monero/wallet/address/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022-2024 Luke Parker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/networks/monero/wallet/address/README.md b/networks/monero/wallet/address/README.md deleted file mode 100644 index 8fe3b77d..00000000 --- a/networks/monero/wallet/address/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Monero Address - -Rust implementation of Monero addresses. - -This library is usable under no-std when the `std` feature (on by default) is -disabled. - -### Cargo Features - -- `std` (on by default): Enables `std` (and with it, more efficient internal - implementations). diff --git a/networks/monero/wallet/address/src/base58check.rs b/networks/monero/wallet/address/src/base58check.rs deleted file mode 100644 index 08113bbe..00000000 --- a/networks/monero/wallet/address/src/base58check.rs +++ /dev/null @@ -1,107 +0,0 @@ -use std_shims::{vec::Vec, string::String}; - -use monero_primitives::keccak256; - -const ALPHABET_LEN: u64 = 58; -const ALPHABET: &[u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; - -pub(crate) const BLOCK_LEN: usize = 8; -const ENCODED_BLOCK_LEN: usize = 11; - -const CHECKSUM_LEN: usize = 4; - -// The maximum possible length of an encoding of this many bytes -// -// This is used for determining padding/how many bytes an encoding actually uses -pub(crate) fn encoded_len_for_bytes(bytes: usize) -> usize { - let bits = u64::try_from(bytes).expect("length exceeded 2**64") * 8; - let mut max = if bits == 64 { u64::MAX } else { (1 << bits) - 1 }; - - let mut i = 0; - while max != 0 { - max /= ALPHABET_LEN; - i += 1; - } - i -} - -// Encode an arbitrary-length stream of data -pub(crate) fn encode(bytes: &[u8]) -> String { - let mut res = String::with_capacity(bytes.len().div_ceil(BLOCK_LEN) * ENCODED_BLOCK_LEN); - - for chunk in bytes.chunks(BLOCK_LEN) { - // Convert to a u64 - let mut fixed_len_chunk = [0; BLOCK_LEN]; - fixed_len_chunk[(BLOCK_LEN - chunk.len()) ..].copy_from_slice(chunk); - let mut val = u64::from_be_bytes(fixed_len_chunk); - - // Convert to the base58 encoding - let mut chunk_str = [char::from(ALPHABET[0]); ENCODED_BLOCK_LEN]; - let mut i = 0; - while val > 0 { - chunk_str[i] = ALPHABET[usize::try_from(val % ALPHABET_LEN) - .expect("ALPHABET_LEN exceeds usize despite being a usize")] - .into(); - i += 1; - val /= ALPHABET_LEN; - } - - // Only take used bytes, and since we put the LSBs in the first byte, reverse the byte order - for c in chunk_str.into_iter().take(encoded_len_for_bytes(chunk.len())).rev() { - res.push(c); - } - } - - res -} - -// Decode an arbitrary-length stream of data -pub(crate) fn decode(data: &str) -> Option> { - let mut res = Vec::with_capacity((data.len() / ENCODED_BLOCK_LEN) * BLOCK_LEN); - - for chunk in data.as_bytes().chunks(ENCODED_BLOCK_LEN) { - // Convert the chunk back to a u64 - let mut sum = 0u64; - for this_char in chunk { - sum = sum.checked_mul(ALPHABET_LEN)?; - sum += u64::try_from(ALPHABET.iter().position(|a| a == this_char)?) - .expect("alphabet len exceeded 2**64"); - } - - // From the size of the encoding, determine the size of the bytes - let mut used_bytes = None; - for i in 1 ..= BLOCK_LEN { - if encoded_len_for_bytes(i) == chunk.len() { - used_bytes = Some(i); - break; - } - } - let used_bytes = used_bytes - .expect("chunk of bounded length exhaustively searched but couldn't find matching length"); - // Only push on the used bytes - res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes) ..]); - } - - Some(res) -} - -// Encode an arbitrary-length stream of data, with a checksum -pub(crate) fn encode_check(mut data: Vec) -> String { - let checksum = keccak256(&data); - data.extend(&checksum[.. CHECKSUM_LEN]); - encode(&data) -} - -// Decode an arbitrary-length stream of data, with a checksum -pub(crate) fn decode_check(data: &str) -> Option> { - let mut res = decode(data)?; - if res.len() < CHECKSUM_LEN { - None?; - } - let checksum_pos = res.len() - CHECKSUM_LEN; - if keccak256(&res[.. checksum_pos])[.. CHECKSUM_LEN] != res[checksum_pos ..] { - None?; - } - res.truncate(checksum_pos); - Some(res) -} diff --git a/networks/monero/wallet/address/src/lib.rs b/networks/monero/wallet/address/src/lib.rs deleted file mode 100644 index cc463630..00000000 --- a/networks/monero/wallet/address/src/lib.rs +++ /dev/null @@ -1,505 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use core::fmt::{self, Write}; -use std_shims::{ - vec, - string::{String, ToString}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::EdwardsPoint; - -use monero_io::*; - -mod base58check; -use base58check::{encode_check, decode_check}; - -#[cfg(test)] -mod tests; - -/// The address type. -/// -/// The officially specified addresses are supported, along with -/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789). -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub enum AddressType { - /// A legacy address type. - Legacy, - /// A legacy address with a payment ID embedded. - LegacyIntegrated([u8; 8]), - /// A subaddress. - /// - /// This is what SHOULD be used if specific functionality isn't needed. - Subaddress, - /// A featured address. - /// - /// Featured Addresses are an unofficial address specification which is meant to be extensible - /// and support a variety of functionality. This functionality includes being a subaddresses AND - /// having a payment ID, along with being immune to the burning bug. - /// - /// At this time, support for featured addresses is limited to this crate. There should be no - /// expectation of interoperability. - Featured { - /// If this address is a subaddress. - subaddress: bool, - /// The payment ID associated with this address. - payment_id: Option<[u8; 8]>, - /// If this address is guaranteed. - /// - /// A guaranteed address is one where any outputs scanned to it are guaranteed to be spendable - /// under the hardness of various cryptographic problems (which are assumed hard). This is via - /// a modified shared-key derivation which eliminates the burning bug. - guaranteed: bool, - }, -} - -impl AddressType { - /// If this address is a subaddress. - pub fn is_subaddress(&self) -> bool { - matches!(self, AddressType::Subaddress) || - matches!(self, AddressType::Featured { subaddress: true, .. }) - } - - /// The payment ID within this address. - pub fn payment_id(&self) -> Option<[u8; 8]> { - if let AddressType::LegacyIntegrated(id) = self { - Some(*id) - } else if let AddressType::Featured { payment_id, .. } = self { - *payment_id - } else { - None - } - } - - /// If this address is guaranteed. - /// - /// A guaranteed address is one where any outputs scanned to it are guaranteed to be spendable - /// under the hardness of various cryptographic problems (which are assumed hard). This is via - /// a modified shared-key derivation which eliminates the burning bug. - pub fn is_guaranteed(&self) -> bool { - matches!(self, AddressType::Featured { guaranteed: true, .. }) - } -} - -/// A subaddress index. -/// -/// Subaddresses are derived from a root using a `(account, address)` tuple as an index. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub struct SubaddressIndex { - account: u32, - address: u32, -} - -impl SubaddressIndex { - /// Create a new SubaddressIndex. - pub const fn new(account: u32, address: u32) -> Option { - if (account == 0) && (address == 0) { - return None; - } - Some(SubaddressIndex { account, address }) - } - - /// Get the account this subaddress index is under. - pub const fn account(&self) -> u32 { - self.account - } - - /// Get the address this subaddress index is for, within its account. - pub const fn address(&self) -> u32 { - self.address - } -} - -/// Bytes used as prefixes when encoding addresses. -/// -/// These distinguish the address's type. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub struct AddressBytes { - legacy: u8, - legacy_integrated: u8, - subaddress: u8, - featured: u8, -} - -impl AddressBytes { - /// Create a new set of address bytes, one for each address type. - pub const fn new( - legacy: u8, - legacy_integrated: u8, - subaddress: u8, - featured: u8, - ) -> Option { - if (legacy == legacy_integrated) || (legacy == subaddress) || (legacy == featured) { - return None; - } - if (legacy_integrated == subaddress) || (legacy_integrated == featured) { - return None; - } - if subaddress == featured { - return None; - } - Some(AddressBytes { legacy, legacy_integrated, subaddress, featured }) - } - - const fn to_const_generic(self) -> u32 { - ((self.legacy as u32) << 24) + - ((self.legacy_integrated as u32) << 16) + - ((self.subaddress as u32) << 8) + - (self.featured as u32) - } - - #[allow(clippy::cast_possible_truncation)] - const fn from_const_generic(const_generic: u32) -> Self { - let legacy = (const_generic >> 24) as u8; - let legacy_integrated = ((const_generic >> 16) & (u8::MAX as u32)) as u8; - let subaddress = ((const_generic >> 8) & (u8::MAX as u32)) as u8; - let featured = (const_generic & (u8::MAX as u32)) as u8; - - AddressBytes { legacy, legacy_integrated, subaddress, featured } - } -} - -// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 -// /src/cryptonote_config.h#L216-L225 -// https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789 for featured -const MONERO_MAINNET_BYTES: AddressBytes = match AddressBytes::new(18, 19, 42, 70) { - Some(bytes) => bytes, - None => panic!("mainnet byte constants conflicted"), -}; -// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 -// /src/cryptonote_config.h#L277-L281 -const MONERO_STAGENET_BYTES: AddressBytes = match AddressBytes::new(24, 25, 36, 86) { - Some(bytes) => bytes, - None => panic!("stagenet byte constants conflicted"), -}; -// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 -// /src/cryptonote_config.h#L262-L266 -const MONERO_TESTNET_BYTES: AddressBytes = match AddressBytes::new(53, 54, 63, 111) { - Some(bytes) => bytes, - None => panic!("testnet byte constants conflicted"), -}; - -/// The network this address is for. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub enum Network { - /// A mainnet address. - Mainnet, - /// A stagenet address. - /// - /// Stagenet maintains parity with mainnet and is useful for testing integrations accordingly. - Stagenet, - /// A testnet address. - /// - /// Testnet is used to test new consensus rules and functionality. - Testnet, -} - -/// Errors when decoding an address. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum AddressError { - /// The address had an invalid (network, type) byte. - #[cfg_attr(feature = "std", error("invalid byte for the address's network/type ({0})"))] - InvalidTypeByte(u8), - /// The address wasn't a valid Base58Check (as defined by Monero) string. - #[cfg_attr(feature = "std", error("invalid address encoding"))] - InvalidEncoding, - /// The data encoded wasn't the proper length. - #[cfg_attr(feature = "std", error("invalid length"))] - InvalidLength, - /// The address had an invalid key. - #[cfg_attr(feature = "std", error("invalid key"))] - InvalidKey, - /// The address was featured with unrecognized features. - #[cfg_attr(feature = "std", error("unknown features"))] - UnknownFeatures(u64), - /// The network was for a different network than expected. - #[cfg_attr( - feature = "std", - error("different network ({actual:?}) than expected ({expected:?})") - )] - DifferentNetwork { - /// The Network expected. - expected: Network, - /// The Network embedded within the Address. - actual: Network, - }, -} - -/// Bytes used as prefixes when encoding addresses, variable to the network instance. -/// -/// These distinguish the address's network and type. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub struct NetworkedAddressBytes { - mainnet: AddressBytes, - stagenet: AddressBytes, - testnet: AddressBytes, -} - -impl NetworkedAddressBytes { - /// Create a new set of address bytes, one for each network. - pub const fn new( - mainnet: AddressBytes, - stagenet: AddressBytes, - testnet: AddressBytes, - ) -> Option { - let res = NetworkedAddressBytes { mainnet, stagenet, testnet }; - let all_bytes = res.to_const_generic(); - - let mut i = 0; - while i < 12 { - let this_byte = (all_bytes >> (32 + (i * 8))) & (u8::MAX as u128); - - let mut j = 0; - while j < 12 { - if i == j { - j += 1; - continue; - } - let other_byte = (all_bytes >> (32 + (j * 8))) & (u8::MAX as u128); - if this_byte == other_byte { - return None; - } - - j += 1; - } - - i += 1; - } - - Some(res) - } - - /// Convert this set of address bytes to its representation as a u128. - /// - /// We cannot use this struct directly as a const generic unfortunately. - pub const fn to_const_generic(self) -> u128 { - ((self.mainnet.to_const_generic() as u128) << 96) + - ((self.stagenet.to_const_generic() as u128) << 64) + - ((self.testnet.to_const_generic() as u128) << 32) - } - - #[allow(clippy::cast_possible_truncation)] - const fn from_const_generic(const_generic: u128) -> Self { - let mainnet = AddressBytes::from_const_generic((const_generic >> 96) as u32); - let stagenet = - AddressBytes::from_const_generic(((const_generic >> 64) & (u32::MAX as u128)) as u32); - let testnet = - AddressBytes::from_const_generic(((const_generic >> 32) & (u32::MAX as u128)) as u32); - - NetworkedAddressBytes { mainnet, stagenet, testnet } - } - - fn network(&self, network: Network) -> &AddressBytes { - match network { - Network::Mainnet => &self.mainnet, - Network::Stagenet => &self.stagenet, - Network::Testnet => &self.testnet, - } - } - - fn byte(&self, network: Network, kind: AddressType) -> u8 { - let address_bytes = self.network(network); - - match kind { - AddressType::Legacy => address_bytes.legacy, - AddressType::LegacyIntegrated(_) => address_bytes.legacy_integrated, - AddressType::Subaddress => address_bytes.subaddress, - AddressType::Featured { .. } => address_bytes.featured, - } - } - - // This will return an incomplete AddressType for LegacyIntegrated/Featured. - fn metadata_from_byte(&self, byte: u8) -> Result<(Network, AddressType), AddressError> { - let mut meta = None; - for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] { - let address_bytes = self.network(network); - if let Some(kind) = match byte { - _ if byte == address_bytes.legacy => Some(AddressType::Legacy), - _ if byte == address_bytes.legacy_integrated => Some(AddressType::LegacyIntegrated([0; 8])), - _ if byte == address_bytes.subaddress => Some(AddressType::Subaddress), - _ if byte == address_bytes.featured => { - Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false }) - } - _ => None, - } { - meta = Some((network, kind)); - break; - } - } - - meta.ok_or(AddressError::InvalidTypeByte(byte)) - } -} - -/// The bytes used for distinguishing Monero addresses. -pub const MONERO_BYTES: NetworkedAddressBytes = match NetworkedAddressBytes::new( - MONERO_MAINNET_BYTES, - MONERO_STAGENET_BYTES, - MONERO_TESTNET_BYTES, -) { - Some(bytes) => bytes, - None => panic!("Monero network byte constants conflicted"), -}; - -/// A Monero address. -#[derive(Clone, Copy, PartialEq, Eq, Zeroize)] -pub struct Address { - network: Network, - kind: AddressType, - spend: EdwardsPoint, - view: EdwardsPoint, -} - -impl fmt::Debug for Address { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - let hex = |bytes: &[u8]| -> Result { - let mut res = String::with_capacity(2 + (2 * bytes.len())); - res.push_str("0x"); - for b in bytes { - write!(&mut res, "{b:02x}")?; - } - Ok(res) - }; - - fmt - .debug_struct("Address") - .field("network", &self.network) - .field("kind", &self.kind) - .field("spend", &hex(&self.spend.compress().to_bytes())?) - .field("view", &hex(&self.view.compress().to_bytes())?) - // This is not a real field yet is the most valuable thing to know when debugging - .field("(address)", &self.to_string()) - .finish() - } -} - -impl fmt::Display for Address { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let address_bytes: NetworkedAddressBytes = - NetworkedAddressBytes::from_const_generic(ADDRESS_BYTES); - - let mut data = vec![address_bytes.byte(self.network, self.kind)]; - data.extend(self.spend.compress().to_bytes()); - data.extend(self.view.compress().to_bytes()); - if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind { - let features_uint = - (u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress); - write_varint(&features_uint, &mut data) - .expect("write failed but doesn't fail"); - } - if let Some(id) = self.kind.payment_id() { - data.extend(id); - } - write!(f, "{}", encode_check(data)) - } -} - -impl Address { - /// Create a new address. - pub fn new(network: Network, kind: AddressType, spend: EdwardsPoint, view: EdwardsPoint) -> Self { - Address { network, kind, spend, view } - } - - /// Parse an address from a String, accepting any network it is. - pub fn from_str_with_unchecked_network(s: &str) -> Result { - let raw = decode_check(s).ok_or(AddressError::InvalidEncoding)?; - let mut raw = raw.as_slice(); - - let address_bytes: NetworkedAddressBytes = - NetworkedAddressBytes::from_const_generic(ADDRESS_BYTES); - let (network, mut kind) = address_bytes - .metadata_from_byte(read_byte(&mut raw).map_err(|_| AddressError::InvalidLength)?)?; - let spend = read_point(&mut raw).map_err(|_| AddressError::InvalidKey)?; - let view = read_point(&mut raw).map_err(|_| AddressError::InvalidKey)?; - - if matches!(kind, AddressType::Featured { .. }) { - let features = read_varint::<_, u64>(&mut raw).map_err(|_| AddressError::InvalidLength)?; - if (features >> 3) != 0 { - Err(AddressError::UnknownFeatures(features))?; - } - - let subaddress = (features & 1) == 1; - let integrated = ((features >> 1) & 1) == 1; - let guaranteed = ((features >> 2) & 1) == 1; - - kind = - AddressType::Featured { subaddress, payment_id: integrated.then_some([0; 8]), guaranteed }; - } - - // Read the payment ID, if there should be one - match kind { - AddressType::LegacyIntegrated(ref mut id) | - AddressType::Featured { payment_id: Some(ref mut id), .. } => { - *id = read_bytes(&mut raw).map_err(|_| AddressError::InvalidLength)?; - } - _ => {} - }; - - if !raw.is_empty() { - Err(AddressError::InvalidLength)?; - } - - Ok(Address { network, kind, spend, view }) - } - - /// Create a new address from a `&str`. - /// - /// This takes in an argument for the expected network, erroring if a distinct network was used. - /// It also errors if the address is invalid (as expected). - pub fn from_str(network: Network, s: &str) -> Result { - Self::from_str_with_unchecked_network(s).and_then(|addr| { - if addr.network == network { - Ok(addr) - } else { - Err(AddressError::DifferentNetwork { actual: addr.network, expected: network })? - } - }) - } - - /// The network this address is intended for use on. - pub fn network(&self) -> Network { - self.network - } - - /// The type of address this is. - pub fn kind(&self) -> &AddressType { - &self.kind - } - - /// If this is a subaddress. - pub fn is_subaddress(&self) -> bool { - self.kind.is_subaddress() - } - - /// The payment ID for this address. - pub fn payment_id(&self) -> Option<[u8; 8]> { - self.kind.payment_id() - } - - /// If this address is guaranteed. - /// - /// A guaranteed address is one where any outputs scanned to it are guaranteed to be spendable - /// under the hardness of various cryptographic problems (which are assumed hard). This is via - /// a modified shared-key derivation which eliminates the burning bug. - pub fn is_guaranteed(&self) -> bool { - self.kind.is_guaranteed() - } - - /// The public spend key for this address. - pub fn spend(&self) -> EdwardsPoint { - self.spend - } - - /// The public view key for this address. - pub fn view(&self) -> EdwardsPoint { - self.view - } -} - -/// Instantiation of the Address type with Monero's network bytes. -pub type MoneroAddress = Address<{ MONERO_BYTES.to_const_generic() }>; diff --git a/networks/monero/wallet/address/src/tests.rs b/networks/monero/wallet/address/src/tests.rs deleted file mode 100644 index 2804832a..00000000 --- a/networks/monero/wallet/address/src/tests.rs +++ /dev/null @@ -1,205 +0,0 @@ -use hex_literal::hex; - -use rand_core::{RngCore, OsRng}; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar}; - -use monero_io::decompress_point; - -use crate::{Network, AddressType, MoneroAddress}; - -const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7"); -const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce"); - -const STANDARD: &str = - "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey"; - -const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f"); -const INTEGRATED: &str = - "4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\ - pXSn88oBX35"; - -const SUB_SPEND: [u8; 32] = - hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b"); -const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470"); -const SUBADDRESS: &str = - "8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB"; - -const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json"); - -#[test] -fn test_encoded_len_for_bytes() { - // For an encoding of length `l`, we prune to the amount of bytes which encodes with length `l` - // This assumes length `l` -> amount of bytes has a singular answer, which is tested here - use crate::base58check::*; - let mut set = std::collections::HashSet::new(); - for i in 0 .. BLOCK_LEN { - set.insert(encoded_len_for_bytes(i)); - } - assert_eq!(set.len(), BLOCK_LEN); -} - -#[test] -fn base58check() { - use crate::base58check::*; - - assert_eq!(encode(&[]), String::new()); - assert!(decode("").unwrap().is_empty()); - - let full_block = &[1, 2, 3, 4, 5, 6, 7, 8]; - assert_eq!(&decode(&encode(full_block)).unwrap(), full_block); - - let partial_block = &[1, 2, 3]; - assert_eq!(&decode(&encode(partial_block)).unwrap(), partial_block); - - let max_encoded_block = &[u8::MAX; 8]; - assert_eq!(&decode(&encode(max_encoded_block)).unwrap(), max_encoded_block); - - let max_decoded_block = "zzzzzzzzzzz"; - assert!(decode(max_decoded_block).is_none()); - - let full_and_partial_block = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]; - assert_eq!(&decode(&encode(full_and_partial_block)).unwrap(), full_and_partial_block); -} - -#[test] -fn standard_address() { - let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap(); - assert_eq!(addr.network(), Network::Mainnet); - assert_eq!(addr.kind(), &AddressType::Legacy); - assert!(!addr.is_subaddress()); - assert_eq!(addr.payment_id(), None); - assert!(!addr.is_guaranteed()); - assert_eq!(addr.spend.compress().to_bytes(), SPEND); - assert_eq!(addr.view.compress().to_bytes(), VIEW); - assert_eq!(addr.to_string(), STANDARD); -} - -#[test] -fn integrated_address() { - let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap(); - assert_eq!(addr.network(), Network::Mainnet); - assert_eq!(addr.kind(), &AddressType::LegacyIntegrated(PAYMENT_ID)); - assert!(!addr.is_subaddress()); - assert_eq!(addr.payment_id(), Some(PAYMENT_ID)); - assert!(!addr.is_guaranteed()); - assert_eq!(addr.spend.compress().to_bytes(), SPEND); - assert_eq!(addr.view.compress().to_bytes(), VIEW); - assert_eq!(addr.to_string(), INTEGRATED); -} - -#[test] -fn subaddress() { - let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap(); - assert_eq!(addr.network(), Network::Mainnet); - assert_eq!(addr.kind(), &AddressType::Subaddress); - assert!(addr.is_subaddress()); - assert_eq!(addr.payment_id(), None); - assert!(!addr.is_guaranteed()); - assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND); - assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW); - assert_eq!(addr.to_string(), SUBADDRESS); -} - -#[test] -fn featured() { - for (network, first) in - [(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')] - { - for _ in 0 .. 100 { - let spend = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE; - let view = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE; - - for features in 0 .. (1 << 3) { - const SUBADDRESS_FEATURE_BIT: u8 = 1; - const INTEGRATED_FEATURE_BIT: u8 = 1 << 1; - const GUARANTEED_FEATURE_BIT: u8 = 1 << 2; - - let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT; - - let mut payment_id = [0; 8]; - OsRng.fill_bytes(&mut payment_id); - let payment_id = Some(payment_id) - .filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT); - - let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT; - - let kind = AddressType::Featured { subaddress, payment_id, guaranteed }; - let addr = MoneroAddress::new(network, kind, spend, view); - - assert_eq!(addr.to_string().chars().next().unwrap(), first); - assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr); - - assert_eq!(addr.spend, spend); - assert_eq!(addr.view, view); - - assert_eq!(addr.is_subaddress(), subaddress); - assert_eq!(addr.payment_id(), payment_id); - assert_eq!(addr.is_guaranteed(), guaranteed); - } - } - } -} - -#[test] -fn featured_vectors() { - #[derive(serde::Deserialize)] - struct Vector { - address: String, - - network: String, - spend: String, - view: String, - - subaddress: bool, - integrated: bool, - payment_id: Option<[u8; 8]>, - guaranteed: bool, - } - - let vectors = serde_json::from_str::>(FEATURED_JSON).unwrap(); - for vector in vectors { - let first = vector.address.chars().next().unwrap(); - let network = match vector.network.as_str() { - "Mainnet" => { - assert_eq!(first, 'C'); - Network::Mainnet - } - "Testnet" => { - assert_eq!(first, 'K'); - Network::Testnet - } - "Stagenet" => { - assert_eq!(first, 'F'); - Network::Stagenet - } - _ => panic!("Unknown network"), - }; - let spend = decompress_point(hex::decode(vector.spend).unwrap().try_into().unwrap()).unwrap(); - let view = decompress_point(hex::decode(vector.view).unwrap().try_into().unwrap()).unwrap(); - - let addr = MoneroAddress::from_str(network, &vector.address).unwrap(); - assert_eq!(addr.spend, spend); - assert_eq!(addr.view, view); - - assert_eq!(addr.is_subaddress(), vector.subaddress); - assert_eq!(vector.integrated, vector.payment_id.is_some()); - assert_eq!(addr.payment_id(), vector.payment_id); - assert_eq!(addr.is_guaranteed(), vector.guaranteed); - - assert_eq!( - MoneroAddress::new( - network, - AddressType::Featured { - subaddress: vector.subaddress, - payment_id: vector.payment_id, - guaranteed: vector.guaranteed - }, - spend, - view - ) - .to_string(), - vector.address - ); - } -} diff --git a/networks/monero/wallet/address/src/vectors/featured_addresses.json b/networks/monero/wallet/address/src/vectors/featured_addresses.json deleted file mode 100644 index dbb83fbf..00000000 --- a/networks/monero/wallet/address/src/vectors/featured_addresses.json +++ /dev/null @@ -1,230 +0,0 @@ -[ - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3pYyUDn", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": false, - "integrated": false, - "guaranteed": false - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3wfMHCy", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": true, - "integrated": false, - "guaranteed": false - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJTo4p5ayvj36PStM5AX", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": false, - "integrated": true, - "payment_id": [46, 48, 134, 34, 245, 148, 243, 195], - "guaranteed": false - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJWv5WqMCNE2hRs9rJfy", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": true, - "integrated": true, - "payment_id": [153, 176, 98, 204, 151, 27, 197, 168], - "guaranteed": false - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4DwqwH1", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": false, - "integrated": false, - "guaranteed": true - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4Pyz8bD", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": true, - "integrated": false, - "guaranteed": true - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJcwt7hykou237MqZZDA", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": false, - "integrated": true, - "payment_id": [88, 37, 149, 111, 171, 108, 120, 181], - "guaranteed": true - }, - { - "address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJfTrFAp69u2MYbf5YeN", - "network": "Mainnet", - "spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c", - "view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df", - "subaddress": true, - "integrated": true, - "payment_id": [125, 69, 155, 152, 140, 160, 157, 186], - "guaranteed": true - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712U9w7ScYA", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": false, - "integrated": false, - "guaranteed": false - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UA2gCrT1", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": true, - "integrated": false, - "guaranteed": false - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc1DbPKwJu81cxJjqBkS", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": false, - "integrated": true, - "payment_id": [92, 225, 118, 220, 39, 3, 72, 51], - "guaranteed": false - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc2o1rPMaXN31Fe5J6dn", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": true, - "integrated": true, - "payment_id": [20, 120, 47, 89, 72, 165, 233, 115], - "guaranteed": false - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAQHCRZ4", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": false, - "integrated": false, - "guaranteed": true - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAUzqaii", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": true, - "integrated": false, - "guaranteed": true - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcAsfQc3gJQ2gHLd5DiQ", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": false, - "integrated": true, - "payment_id": [193, 149, 123, 214, 180, 205, 195, 91], - "guaranteed": true - }, - { - "address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcDBAD5jbZQ3AMHFyvQB", - "network": "Testnet", - "spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a", - "view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab", - "subaddress": true, - "integrated": true, - "payment_id": [205, 170, 65, 0, 51, 175, 251, 184], - "guaranteed": true - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPJnBtTP", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": false, - "integrated": false, - "guaranteed": false - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPUrwMvP", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": true, - "integrated": false, - "guaranteed": false - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY5ECEhP5Nr1aCRPXdxk", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": false, - "integrated": true, - "payment_id": [173, 149, 78, 64, 215, 211, 66, 170], - "guaranteed": false - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY882kTUS1D2LttnPvTR", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": true, - "integrated": true, - "payment_id": [254, 159, 186, 162, 1, 8, 156, 108], - "guaranteed": false - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPpBBo8F", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": false, - "integrated": false, - "guaranteed": true - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPuUJX3b", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": true, - "integrated": false, - "guaranteed": true - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYCZPxVAoDu21DryMoto", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": false, - "integrated": true, - "payment_id": [3, 115, 230, 129, 172, 108, 116, 235], - "guaranteed": true - }, - { - "address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYFYCqKQAWL18KkpBQ8R", - "network": "Stagenet", - "spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151", - "view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39", - "subaddress": true, - "integrated": true, - "payment_id": [94, 122, 63, 167, 209, 225, 14, 180], - "guaranteed": true - } -] diff --git a/networks/monero/wallet/src/decoys.rs b/networks/monero/wallet/src/decoys.rs deleted file mode 100644 index b13179f9..00000000 --- a/networks/monero/wallet/src/decoys.rs +++ /dev/null @@ -1,331 +0,0 @@ -use std_shims::{io, vec::Vec, string::ToString, collections::HashSet}; - -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use rand_core::{RngCore, CryptoRng}; -use rand_distr::{Distribution, Gamma}; -#[cfg(not(feature = "std"))] -use rand_distr::num_traits::Float; - -use curve25519_dalek::{Scalar, EdwardsPoint}; - -use crate::{ - DEFAULT_LOCK_WINDOW, COINBASE_LOCK_WINDOW, BLOCK_TIME, - primitives::{Commitment, Decoys}, - rpc::{RpcError, DecoyRpc}, - output::OutputData, - WalletOutput, -}; - -const RECENT_WINDOW: u64 = 15; -const BLOCKS_PER_YEAR: usize = (365 * 24 * 60 * 60) / BLOCK_TIME; -#[allow(clippy::cast_precision_loss)] -const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64; - -async fn select_n( - rng: &mut (impl RngCore + CryptoRng), - rpc: &impl DecoyRpc, - height: usize, - real_output: u64, - ring_len: u8, - fingerprintable_deterministic: bool, -) -> Result, RpcError> { - if height < DEFAULT_LOCK_WINDOW { - Err(RpcError::InternalError("not enough blocks to select decoys".to_string()))?; - } - if height > rpc.get_output_distribution_end_height().await? { - Err(RpcError::InternalError( - "decoys being requested from blocks this node doesn't have".to_string(), - ))?; - } - - // Get the distribution - let distribution = rpc.get_output_distribution(.. height).await?; - if distribution.len() < DEFAULT_LOCK_WINDOW { - Err(RpcError::InternalError("not enough blocks to select decoys".to_string()))?; - } - let highest_output_exclusive_bound = distribution[distribution.len() - DEFAULT_LOCK_WINDOW]; - // This assumes that each miner TX had one output (as sane) and checks we have sufficient - // outputs even when excluding them (due to their own timelock requirements) - // Considering this a temporal error for very new chains, it's sufficiently sane to have - if highest_output_exclusive_bound.saturating_sub( - u64::try_from(COINBASE_LOCK_WINDOW).expect("coinbase lock window exceeds 2^{64}"), - ) < u64::from(ring_len) - { - Err(RpcError::InternalError("not enough decoy candidates".to_string()))?; - } - - // Determine the outputs per second - #[allow(clippy::cast_precision_loss)] - let per_second = { - let blocks = distribution.len().min(BLOCKS_PER_YEAR); - let initial = distribution[distribution.len().saturating_sub(blocks + 1)]; - let outputs = distribution[distribution.len() - 1].saturating_sub(initial); - (outputs as f64) / ((blocks * BLOCK_TIME) as f64) - }; - - // Don't select the real output - let mut do_not_select = HashSet::new(); - do_not_select.insert(real_output); - - let decoy_count = usize::from(ring_len - 1); - let mut res = Vec::with_capacity(decoy_count); - - let mut iters = 0; - // Iterates until we have enough decoys - // If an iteration only returns a partial set of decoys, the remainder will be obvious as decoys - // to the RPC - // The length of that remainder is expected to be minimal - while res.len() != decoy_count { - iters += 1; - #[cfg(not(test))] - const MAX_ITERS: usize = 10; - // When testing on fresh chains, increased iterations can be useful and we don't necessitate - // reasonable performance - #[cfg(test)] - const MAX_ITERS: usize = 100; - // Ensure this isn't infinitely looping - // We check both that we aren't at the maximum amount of iterations and that the not-yet - // selected candidates exceed the amount of candidates necessary to trigger the next iteration - if (iters == MAX_ITERS) || - ((highest_output_exclusive_bound - - u64::try_from(do_not_select.len()).expect("amount of ignored decoys exceeds 2^{64}")) < - u64::from(ring_len)) - { - Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?; - } - - let remaining = decoy_count - res.len(); - let mut candidates = Vec::with_capacity(remaining); - while candidates.len() != remaining { - // Use a gamma distribution, as Monero does - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45 - // /src/wallet/wallet2.cpp#L142-L143 - let mut age = Gamma::::new(19.28, 1.0 / 1.61) - .expect("constant Gamma distribution could no longer be created") - .sample(rng) - .exp(); - #[allow(clippy::cast_precision_loss)] - if age > TIP_APPLICATION { - age -= TIP_APPLICATION; - } else { - // f64 does not have try_from available, which is why these are written with `as` - age = (rng.next_u64() % - (RECENT_WINDOW * u64::try_from(BLOCK_TIME).expect("BLOCK_TIME exceeded u64::MAX"))) - as f64; - } - - #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)] - let o = (age * per_second) as u64; - if o < highest_output_exclusive_bound { - // Find which block this points to - let i = distribution.partition_point(|s| *s < (highest_output_exclusive_bound - 1 - o)); - let prev = i.saturating_sub(1); - let n = distribution[i].checked_sub(distribution[prev]).ok_or_else(|| { - RpcError::InternalError("RPC returned non-monotonic distribution".to_string()) - })?; - if n != 0 { - // Select an output from within this block - let o = distribution[prev] + (rng.next_u64() % n); - if !do_not_select.contains(&o) { - candidates.push(o); - // This output will either be used or is unusable - // In either case, we should not try it again - do_not_select.insert(o); - } - } - } - } - - // If this is the first time we're requesting these outputs, include the real one as well - // Prevents the node we're connected to from having a list of known decoys and then seeing a - // TX which uses all of them, with one additional output (the true spend) - let real_index = if iters == 0 { - candidates.push(real_output); - // Sort candidates so the real spends aren't the ones at the end - candidates.sort(); - Some( - candidates - .binary_search(&real_output) - .expect("selected a ring which didn't include the real spend"), - ) - } else { - None - }; - - for (i, output) in rpc - .get_unlocked_outputs(&candidates, height, fingerprintable_deterministic) - .await? - .iter_mut() - .enumerate() - { - // We could check the returned info is equivalent to our expectations, yet that'd allow the - // node to malleate the returned info to see if they can cause this error (allowing them to - // figure out the output being spent) - // - // Some degree of this attack (forcing resampling/trying to observe errors) is likely - // always possible - if real_index == Some(i) { - continue; - } - - // If this is an unlocked output, push it to the result - if let Some(output) = output.take() { - res.push((candidates[i], output)); - } - } - } - - Ok(res) -} - -async fn select_decoys( - rng: &mut R, - rpc: &impl DecoyRpc, - ring_len: u8, - height: usize, - input: &WalletOutput, - fingerprintable_deterministic: bool, -) -> Result { - if ring_len == 0 { - Err(RpcError::InternalError("requesting a ring of length 0".to_string()))?; - } - - // Select all decoys for this transaction, assuming we generate a sane transaction - // We should almost never naturally generate an insane transaction, hence why this doesn't - // bother with an overage - let decoys = select_n( - rng, - rpc, - height, - input.relative_id.index_on_blockchain, - ring_len, - fingerprintable_deterministic, - ) - .await?; - - // Form the complete ring - let mut ring = decoys; - ring.push((input.relative_id.index_on_blockchain, [input.key(), input.commitment().calculate()])); - ring.sort_by(|a, b| a.0.cmp(&b.0)); - - /* - Monero does have sanity checks which it applies to the selected ring. - - They're statistically unlikely to be hit and only occur when the transaction is published over - the RPC (so they are not a relay rule). The RPC allows disabling them, which monero-rpc does to - ensure they don't pose a problem. - - They aren't worth the complexity to implement here, especially since they're non-deterministic. - */ - - // We need to convert our positional indexes to offset indexes - let mut offsets = Vec::with_capacity(ring.len()); - { - offsets.push(ring[0].0); - for m in 1 .. ring.len() { - offsets.push(ring[m].0 - ring[m - 1].0); - } - } - - Ok( - Decoys::new( - offsets, - // Binary searches for the real spend since we don't know where it sorted to - // TODO: Define our own collection whose `len` function returns `u8` to ensure this bound - // with types - u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)) - .expect("ring of size <= u8::MAX had an index exceeding u8::MAX"), - ring.into_iter().map(|output| output.1).collect(), - ) - .expect("selected a syntactically-invalid set of Decoys"), - ) -} - -/// An output with decoys selected. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)] -pub struct OutputWithDecoys { - output: OutputData, - decoys: Decoys, -} - -impl OutputWithDecoys { - /// Select decoys for this output. - pub async fn new( - rng: &mut (impl Send + Sync + RngCore + CryptoRng), - rpc: &impl DecoyRpc, - ring_len: u8, - height: usize, - output: WalletOutput, - ) -> Result { - let decoys = select_decoys(rng, rpc, ring_len, height, &output, false).await?; - Ok(OutputWithDecoys { output: output.data.clone(), decoys }) - } - - /// Select a set of decoys for this output with a deterministic process. - /// - /// This function will always output the same set of decoys when called with the same arguments. - /// This makes it very useful in multisignature contexts, where instead of having one participant - /// select the decoys, everyone can locally select the decoys while coming to the same result. - /// - /// The set of decoys selected may be fingerprintable as having been produced by this - /// methodology. - pub async fn fingerprintable_deterministic_new( - rng: &mut (impl Send + Sync + RngCore + CryptoRng), - rpc: &impl DecoyRpc, - ring_len: u8, - height: usize, - output: WalletOutput, - ) -> Result { - let decoys = select_decoys(rng, rpc, ring_len, height, &output, true).await?; - Ok(OutputWithDecoys { output: output.data.clone(), decoys }) - } - - /// The key this output may be spent by. - pub fn key(&self) -> EdwardsPoint { - self.output.key() - } - - /// The scalar to add to the private spend key for it to be the discrete logarithm of this - /// output's key. - pub fn key_offset(&self) -> Scalar { - self.output.key_offset - } - - /// The commitment this output created. - pub fn commitment(&self) -> &Commitment { - &self.output.commitment - } - - /// The decoys this output selected. - pub fn decoys(&self) -> &Decoys { - &self.decoys - } - - /// Write the OutputWithDecoys. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut W) -> io::Result<()> { - self.output.write(w)?; - self.decoys.write(w) - } - - /// Serialize the OutputWithDecoys to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - let mut serialized = Vec::with_capacity(128); - self.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - /// Read an OutputWithDecoys. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut R) -> io::Result { - Ok(Self { output: OutputData::read(r)?, decoys: Decoys::read(r)? }) - } -} diff --git a/networks/monero/wallet/src/extra.rs b/networks/monero/wallet/src/extra.rs deleted file mode 100644 index 5041a3cf..00000000 --- a/networks/monero/wallet/src/extra.rs +++ /dev/null @@ -1,304 +0,0 @@ -use core::ops::BitXor; -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, BufRead, Write}, -}; - -use zeroize::Zeroize; - -use curve25519_dalek::edwards::EdwardsPoint; - -use monero_serai::io::*; - -pub(crate) const MAX_TX_EXTRA_PADDING_COUNT: usize = 255; -const MAX_TX_EXTRA_NONCE_SIZE: usize = 255; - -const PAYMENT_ID_MARKER: u8 = 0; -const ENCRYPTED_PAYMENT_ID_MARKER: u8 = 1; -// Used as it's the highest value not interpretable as a continued VarInt -pub(crate) const ARBITRARY_DATA_MARKER: u8 = 127; - -/// The max amount of data which will fit within a blob of arbitrary data. -// 1 byte is used for the marker -pub const MAX_ARBITRARY_DATA_SIZE: usize = MAX_TX_EXTRA_NONCE_SIZE - 1; - -/// A Payment ID. -/// -/// This is a legacy method of identifying why Monero was sent to the receiver. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -pub enum PaymentId { - /// A deprecated form of payment ID which is no longer supported. - Unencrypted([u8; 32]), - /// An encrypted payment ID. - Encrypted([u8; 8]), -} - -impl BitXor<[u8; 8]> for PaymentId { - type Output = PaymentId; - - fn bitxor(self, bytes: [u8; 8]) -> PaymentId { - match self { - // Don't perform the xor since this isn't intended to be encrypted with xor - PaymentId::Unencrypted(_) => self, - PaymentId::Encrypted(id) => { - PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes()) - } - } - } -} - -impl PaymentId { - /// Write the PaymentId. - pub fn write(&self, w: &mut W) -> io::Result<()> { - match self { - PaymentId::Unencrypted(id) => { - w.write_all(&[PAYMENT_ID_MARKER])?; - w.write_all(id)?; - } - PaymentId::Encrypted(id) => { - w.write_all(&[ENCRYPTED_PAYMENT_ID_MARKER])?; - w.write_all(id)?; - } - } - Ok(()) - } - - /// Serialize the PaymentId to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(1 + 8); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read a PaymentId. - pub fn read(r: &mut R) -> io::Result { - Ok(match read_byte(r)? { - 0 => PaymentId::Unencrypted(read_bytes(r)?), - 1 => PaymentId::Encrypted(read_bytes(r)?), - _ => Err(io::Error::other("unknown payment ID type"))?, - }) - } -} - -/// A field within the TX extra. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub enum ExtraField { - /// Padding. - /// - /// This is a block of zeroes within the TX extra. - Padding(usize), - /// The transaction key. - /// - /// This is a commitment to the randomness used for deriving outputs. - PublicKey(EdwardsPoint), - /// The nonce field. - /// - /// This is used for data, such as payment IDs. - Nonce(Vec), - /// The field for merge-mining. - /// - /// This is used within miner transactions who are merge-mining Monero to specify the foreign - /// block they mined. - MergeMining(u64, [u8; 32]), - /// The additional transaction keys. - /// - /// These are the per-output commitments to the randomness used for deriving outputs. - PublicKeys(Vec), - /// The 'mysterious' Minergate tag. - /// - /// This was used by a closed source entity without documentation. Support for parsing it was - /// added to reduce extra which couldn't be decoded. - MysteriousMinergate(Vec), -} - -impl ExtraField { - /// Write the ExtraField. - pub fn write(&self, w: &mut W) -> io::Result<()> { - match self { - ExtraField::Padding(size) => { - w.write_all(&[0])?; - for _ in 1 .. *size { - write_byte(&0u8, w)?; - } - } - ExtraField::PublicKey(key) => { - w.write_all(&[1])?; - w.write_all(&key.compress().to_bytes())?; - } - ExtraField::Nonce(data) => { - w.write_all(&[2])?; - write_vec(write_byte, data, w)?; - } - ExtraField::MergeMining(height, merkle) => { - w.write_all(&[3])?; - write_varint(height, w)?; - w.write_all(merkle)?; - } - ExtraField::PublicKeys(keys) => { - w.write_all(&[4])?; - write_vec(write_point, keys, w)?; - } - ExtraField::MysteriousMinergate(data) => { - w.write_all(&[0xDE])?; - write_vec(write_byte, data, w)?; - } - } - Ok(()) - } - - /// Serialize the ExtraField to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(1 + 8); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - - /// Read an ExtraField. - pub fn read(r: &mut R) -> io::Result { - Ok(match read_byte(r)? { - 0 => ExtraField::Padding({ - // Read until either non-zero, max padding count, or end of buffer - let mut size: usize = 1; - loop { - let buf = r.fill_buf()?; - let mut n_consume = 0; - for v in buf { - if *v != 0u8 { - Err(io::Error::other("non-zero value after padding"))? - } - n_consume += 1; - size += 1; - if size > MAX_TX_EXTRA_PADDING_COUNT { - Err(io::Error::other("padding exceeded max count"))? - } - } - if n_consume == 0 { - break; - } - r.consume(n_consume); - } - size - }), - 1 => ExtraField::PublicKey(read_point(r)?), - 2 => ExtraField::Nonce(read_vec(read_byte, Some(MAX_TX_EXTRA_NONCE_SIZE), r)?), - 3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?), - 4 => ExtraField::PublicKeys(read_vec(read_point, None, r)?), - 0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, None, r)?), - _ => Err(io::Error::other("unknown extra field"))?, - }) - } -} - -/// The result of decoding a transaction's extra field. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct Extra(pub(crate) Vec); -impl Extra { - /// The keys within this extra. - /// - /// This returns all keys specified with `PublicKey` and the first set of keys specified with - /// `PublicKeys`, so long as they're well-formed. - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45 - // /src/wallet/wallet2.cpp#L2290-L2300 - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/wallet/wallet2.cpp#L2337-L2340 - pub fn keys(&self) -> Option<(Vec, Option>)> { - let mut keys = vec![]; - let mut additional = None; - for field in &self.0 { - match field.clone() { - ExtraField::PublicKey(this_key) => keys.push(this_key), - ExtraField::PublicKeys(these_additional) => { - additional = additional.or(Some(these_additional)) - } - _ => (), - } - } - // Don't return any keys if this was non-standard and didn't include the primary key - if keys.is_empty() { - None - } else { - Some((keys, additional)) - } - } - - /// The payment ID embedded within this extra. - // Monero finds the first nonce field and reads the payment ID from it: - // https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/ - // src/wallet/wallet2.cpp#L2709-L2752 - pub fn payment_id(&self) -> Option { - for field in &self.0 { - if let ExtraField::Nonce(data) = field { - return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok(); - } - } - None - } - - /// The arbitrary data within this extra. - /// - /// This uses a marker custom to monero-wallet. - pub fn data(&self) -> Vec> { - let mut res = vec![]; - for field in &self.0 { - if let ExtraField::Nonce(data) = field { - if data[0] == ARBITRARY_DATA_MARKER { - res.push(data[1 ..].to_vec()); - } - } - } - res - } - - pub(crate) fn new(key: EdwardsPoint, additional: Vec) -> Extra { - let mut res = Extra(Vec::with_capacity(3)); - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/cryptonote_basic/cryptonote_format_utils.cpp#L627-L633 - // We only support pushing nonces which come after these in the sort order - res.0.push(ExtraField::PublicKey(key)); - if !additional.is_empty() { - res.0.push(ExtraField::PublicKeys(additional)); - } - res - } - - pub(crate) fn push_nonce(&mut self, nonce: Vec) { - self.0.push(ExtraField::Nonce(nonce)); - } - - /// Write the Extra. - /// - /// This is not of deterministic length nor length-prefixed. It should only be written to a - /// buffer which will be delimited. - pub fn write(&self, w: &mut W) -> io::Result<()> { - for field in &self.0 { - field.write(w)?; - } - Ok(()) - } - - /// Serialize the Extra to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut buf = vec![]; - self.write(&mut buf).expect("write failed but doesn't fail"); - buf - } - - /// Read an `Extra`. - /// - /// This is not of deterministic length nor length-prefixed. It should only be read from a buffer - /// already delimited. - #[allow(clippy::unnecessary_wraps)] - pub fn read(r: &mut R) -> io::Result { - let mut res = Extra(vec![]); - // Extra reads until EOF - // We take a BufRead so we can detect when the buffer is empty - // `fill_buf` returns the current buffer, filled if empty, only empty if the reader is - // exhausted - while !r.fill_buf()?.is_empty() { - let Ok(field) = ExtraField::read(r) else { break }; - res.0.push(field); - } - Ok(res) - } -} diff --git a/networks/monero/wallet/src/lib.rs b/networks/monero/wallet/src/lib.rs deleted file mode 100644 index 703ba69c..00000000 --- a/networks/monero/wallet/src/lib.rs +++ /dev/null @@ -1,163 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_auto_cfg))] -#![doc = include_str!("../README.md")] -#![deny(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use std_shims::vec::Vec; - -use zeroize::{Zeroize, Zeroizing}; - -use curve25519_dalek::{Scalar, EdwardsPoint}; - -use monero_serai::{ - io::write_varint, - primitives::{Commitment, keccak256, keccak256_to_scalar}, - ringct::EncryptedAmount, - transaction::Input, -}; - -pub use monero_serai::*; - -pub use monero_rpc as rpc; - -pub use monero_address as address; - -mod view_pair; -pub use view_pair::{ViewPairError, ViewPair, GuaranteedViewPair}; - -/// Structures and functionality for working with transactions' extra fields. -pub mod extra; -pub(crate) use extra::{PaymentId, Extra}; - -pub(crate) mod output; -pub use output::WalletOutput; - -mod scan; -pub use scan::{Timelocked, ScanError, Scanner, GuaranteedScanner}; - -mod decoys; -pub use decoys::OutputWithDecoys; - -/// Structs and functionality for sending transactions. -pub mod send; - -#[cfg(test)] -mod tests; - -#[derive(Clone, PartialEq, Eq, Zeroize)] -struct SharedKeyDerivations { - // Hs("view_tag" || 8Ra || o) - view_tag: u8, - // Hs(uniqueness || 8Ra || o) where uniqueness may be empty - shared_key: Scalar, -} - -impl SharedKeyDerivations { - // https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100 - fn uniqueness(inputs: &[Input]) -> [u8; 32] { - let mut u = b"uniqueness".to_vec(); - for input in inputs { - match input { - // If Gen, this should be the only input, making this loop somewhat pointless - // This works and even if there were somehow multiple inputs, it'd be a false negative - Input::Gen(height) => { - write_varint(height, &mut u).expect("write failed but doesn't fail"); - } - Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()), - } - } - keccak256(u) - } - - #[allow(clippy::needless_pass_by_value)] - fn output_derivations( - uniqueness: Option<[u8; 32]>, - ecdh: Zeroizing, - o: usize, - ) -> Zeroizing { - // 8Ra - let mut output_derivation = Zeroizing::new( - Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(), - ); - - // || o - { - let output_derivation: &mut Vec = output_derivation.as_mut(); - write_varint(&o, output_derivation) - .expect("write failed but doesn't fail"); - } - - let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0]; - - // uniqueness || - let output_derivation = if let Some(uniqueness) = uniqueness { - Zeroizing::new([uniqueness.as_ref(), &output_derivation].concat()) - } else { - output_derivation - }; - - Zeroizing::new(SharedKeyDerivations { - view_tag, - shared_key: keccak256_to_scalar(&output_derivation), - }) - } - - // H(8Ra || 0x8d) - #[allow(clippy::needless_pass_by_value)] - fn payment_id_xor(ecdh: Zeroizing) -> [u8; 8] { - // 8Ra - let output_derivation = Zeroizing::new( - Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(), - ); - - let mut payment_id_xor = [0; 8]; - payment_id_xor - .copy_from_slice(&keccak256([output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]); - payment_id_xor - } - - fn commitment_mask(&self) -> Scalar { - let mut mask = b"commitment_mask".to_vec(); - mask.extend(self.shared_key.as_bytes()); - let res = keccak256_to_scalar(&mask); - mask.zeroize(); - res - } - - fn compact_amount_encryption(&self, amount: u64) -> [u8; 8] { - let mut amount_mask = Zeroizing::new(b"amount".to_vec()); - amount_mask.extend(self.shared_key.to_bytes()); - let mut amount_mask = keccak256(&amount_mask); - - let mut amount_mask_8 = [0; 8]; - amount_mask_8.copy_from_slice(&amount_mask[.. 8]); - amount_mask.zeroize(); - - (amount ^ u64::from_le_bytes(amount_mask_8)).to_le_bytes() - } - - fn decrypt(&self, enc_amount: &EncryptedAmount) -> Commitment { - match enc_amount { - EncryptedAmount::Original { mask, amount } => { - let mask_shared_sec_scalar = keccak256_to_scalar(self.shared_key.as_bytes()); - let amount_shared_sec_scalar = keccak256_to_scalar(mask_shared_sec_scalar.as_bytes()); - - let mask = Scalar::from_bytes_mod_order(*mask) - mask_shared_sec_scalar; - let amount_scalar = Scalar::from_bytes_mod_order(*amount) - amount_shared_sec_scalar; - - // d2b from rctTypes.cpp - let amount = u64::from_le_bytes( - amount_scalar.to_bytes()[.. 8] - .try_into() - .expect("32-byte array couldn't have an 8-byte slice taken"), - ); - - Commitment::new(mask, amount) - } - EncryptedAmount::Compact { amount } => Commitment::new( - self.commitment_mask(), - u64::from_le_bytes(self.compact_amount_encryption(u64::from_le_bytes(*amount))), - ), - } - } -} diff --git a/networks/monero/wallet/src/output.rs b/networks/monero/wallet/src/output.rs deleted file mode 100644 index 933d7ae7..00000000 --- a/networks/monero/wallet/src/output.rs +++ /dev/null @@ -1,376 +0,0 @@ -use std_shims::{ - vec, - vec::Vec, - io::{self, Read, Write}, -}; - -use zeroize::{Zeroize, ZeroizeOnDrop}; - -use curve25519_dalek::{Scalar, edwards::EdwardsPoint}; - -use crate::{ - io::*, primitives::Commitment, transaction::Timelock, address::SubaddressIndex, extra::PaymentId, -}; - -/// An absolute output ID, defined as its transaction hash and output index. -/// -/// This is not the output's key as multiple outputs may share an output key. -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub(crate) struct AbsoluteId { - pub(crate) transaction: [u8; 32], - pub(crate) index_in_transaction: u64, -} - -impl core::fmt::Debug for AbsoluteId { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - fmt - .debug_struct("AbsoluteId") - .field("transaction", &hex::encode(self.transaction)) - .field("index_in_transaction", &self.index_in_transaction) - .finish() - } -} - -impl AbsoluteId { - /// Write the AbsoluteId. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - fn write(&self, w: &mut W) -> io::Result<()> { - w.write_all(&self.transaction)?; - w.write_all(&self.index_in_transaction.to_le_bytes()) - } - - /// Read an AbsoluteId. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - fn read(r: &mut R) -> io::Result { - Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u64(r)? }) - } -} - -/// An output's relative ID. -/// -/// This is defined as the output's index on the blockchain. -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub(crate) struct RelativeId { - pub(crate) index_on_blockchain: u64, -} - -impl core::fmt::Debug for RelativeId { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - fmt.debug_struct("RelativeId").field("index_on_blockchain", &self.index_on_blockchain).finish() - } -} - -impl RelativeId { - /// Write the RelativeId. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - fn write(&self, w: &mut W) -> io::Result<()> { - w.write_all(&self.index_on_blockchain.to_le_bytes()) - } - - /// Read an RelativeId. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - fn read(r: &mut R) -> io::Result { - Ok(RelativeId { index_on_blockchain: read_u64(r)? }) - } -} - -/// The data within an output, as necessary to spend the output. -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub(crate) struct OutputData { - pub(crate) key: EdwardsPoint, - pub(crate) key_offset: Scalar, - pub(crate) commitment: Commitment, -} - -impl core::fmt::Debug for OutputData { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - fmt - .debug_struct("OutputData") - .field("key", &hex::encode(self.key.compress().0)) - .field("key_offset", &hex::encode(self.key_offset.to_bytes())) - .field("commitment", &self.commitment) - .finish() - } -} - -impl OutputData { - /// The key this output may be spent by. - pub(crate) fn key(&self) -> EdwardsPoint { - self.key - } - - /// The scalar to add to the private spend key for it to be the discrete logarithm of this - /// output's key. - pub(crate) fn key_offset(&self) -> Scalar { - self.key_offset - } - - /// The commitment this output created. - pub(crate) fn commitment(&self) -> &Commitment { - &self.commitment - } - - /// Write the OutputData. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub(crate) fn write(&self, w: &mut W) -> io::Result<()> { - w.write_all(&self.key.compress().to_bytes())?; - w.write_all(&self.key_offset.to_bytes())?; - self.commitment.write(w) - } - - /* Commented as it's unused, due to self being private - /// Serialize the OutputData to a `Vec`. - pub fn serialize(&self) -> Vec { - let mut res = Vec::with_capacity(32 + 32 + 40); - self.write(&mut res).expect("write failed but doesn't fail"); - res - } - */ - - /// Read an OutputData. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub(crate) fn read(r: &mut R) -> io::Result { - Ok(OutputData { - key: read_point(r)?, - key_offset: read_scalar(r)?, - commitment: Commitment::read(r)?, - }) - } -} - -/// The metadata for an output. -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub(crate) struct Metadata { - pub(crate) additional_timelock: Timelock, - pub(crate) subaddress: Option, - pub(crate) payment_id: Option, - pub(crate) arbitrary_data: Vec>, -} - -impl core::fmt::Debug for Metadata { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - fmt - .debug_struct("Metadata") - .field("additional_timelock", &self.additional_timelock) - .field("subaddress", &self.subaddress) - .field("payment_id", &self.payment_id) - .field("arbitrary_data", &self.arbitrary_data.iter().map(hex::encode).collect::>()) - .finish() - } -} - -impl Metadata { - /// Write the Metadata. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - fn write(&self, w: &mut W) -> io::Result<()> { - self.additional_timelock.write(w)?; - - if let Some(subaddress) = self.subaddress { - w.write_all(&[1])?; - w.write_all(&subaddress.account().to_le_bytes())?; - w.write_all(&subaddress.address().to_le_bytes())?; - } else { - w.write_all(&[0])?; - } - - if let Some(payment_id) = self.payment_id { - w.write_all(&[1])?; - payment_id.write(w)?; - } else { - w.write_all(&[0])?; - } - - w.write_all( - &u64::try_from(self.arbitrary_data.len()) - .expect("amount of arbitrary data chunks exceeded u64::MAX") - .to_le_bytes(), - )?; - for part in &self.arbitrary_data { - // TODO: Define our own collection whose `len` function returns `u8` to ensure this bound - // with types - w.write_all(&[ - u8::try_from(part.len()).expect("piece of arbitrary data exceeded max length of u8::MAX") - ])?; - w.write_all(part)?; - } - Ok(()) - } - - /// Read a Metadata. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - fn read(r: &mut R) -> io::Result { - let additional_timelock = Timelock::read(r)?; - - let subaddress = match read_byte(r)? { - 0 => None, - 1 => Some( - SubaddressIndex::new(read_u32(r)?, read_u32(r)?) - .ok_or_else(|| io::Error::other("invalid subaddress in metadata"))?, - ), - _ => Err(io::Error::other("invalid subaddress is_some boolean in metadata"))?, - }; - - Ok(Metadata { - additional_timelock, - subaddress, - payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None }, - arbitrary_data: { - let mut data = vec![]; - for _ in 0 .. read_u64(r)? { - let len = read_byte(r)?; - data.push(read_raw_vec(read_byte, usize::from(len), r)?); - } - data - }, - }) - } -} - -/// A scanned output and all associated data. -/// -/// This struct contains all data necessary to spend this output, or handle it as a payment. -/// -/// This struct is bound to a specific instance of the blockchain. If the blockchain reorganizes -/// the block this struct is bound to, it MUST be discarded. If any outputs are mutual to both -/// blockchains, scanning the new blockchain will yield those outputs again. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)] -pub struct WalletOutput { - /// The absolute ID for this transaction. - pub(crate) absolute_id: AbsoluteId, - /// The ID for this transaction, relative to the blockchain. - pub(crate) relative_id: RelativeId, - /// The output's data. - pub(crate) data: OutputData, - /// Associated metadata relevant for handling it as a payment. - pub(crate) metadata: Metadata, -} - -impl WalletOutput { - /// The hash of the transaction which created this output. - pub fn transaction(&self) -> [u8; 32] { - self.absolute_id.transaction - } - - /// The index of the output within the transaction. - pub fn index_in_transaction(&self) -> u64 { - self.absolute_id.index_in_transaction - } - - /// The index of the output on the blockchain. - pub fn index_on_blockchain(&self) -> u64 { - self.relative_id.index_on_blockchain - } - - /// The key this output may be spent by. - pub fn key(&self) -> EdwardsPoint { - self.data.key() - } - - /// The scalar to add to the private spend key for it to be the discrete logarithm of this - /// output's key. - pub fn key_offset(&self) -> Scalar { - self.data.key_offset() - } - - /// The commitment this output created. - pub fn commitment(&self) -> &Commitment { - self.data.commitment() - } - - /// The additional timelock this output is subject to. - /// - /// All outputs are subject to the '10-block lock', a 10-block window after their inclusion - /// on-chain during which they cannot be spent. Outputs may be additionally timelocked. This - /// function only returns the additional timelock. - pub fn additional_timelock(&self) -> Timelock { - self.metadata.additional_timelock - } - - /// The index of the subaddress this output was identified as sent to. - pub fn subaddress(&self) -> Option { - self.metadata.subaddress - } - - /// The payment ID included with this output. - /// - /// This field may be `Some` even if wallet2 would not return a payment ID. wallet2 will only - /// decrypt a payment ID if either: - /// - /// A) The transaction wasn't made by the wallet (via checking if any key images are recognized) - /// B) For the highest-indexed input with a recognized key image, it spends an output with - /// subaddress account `(a, _)` which is distinct from this output's subaddress account - /// - /// Neither of these cases are handled by `monero-wallet` as scanning doesn't have the context - /// of key images. - // - // Identification of the subaddress account for the highest-indexed input with a recognized key - // image: - // https://github.com/monero-project/monero/blob/a1dc85c5373a30f14aaf7dcfdd95f5a7375d3623 - // /src/wallet/wallet2.cpp/#L2637-L2670 - // - // Removal of 'transfers' received to this account: - // https://github.com/monero-project/monero/blob/a1dc85c5373a30f14aaf7dcfdd95f5a7375d3623 - // /src/wallet/wallet2.cpp/#L2782-L2794 - // - // Payment IDs only being decrypted for the remaining transfers: - // https://github.com/monero-project/monero/blob/a1dc85c5373a30f14aaf7dcfdd95f5a7375d3623 - // /src/wallet/wallet2.cpp/#L2796-L2844 - pub fn payment_id(&self) -> Option { - self.metadata.payment_id - } - - /// The arbitrary data from the `extra` field of the transaction which created this output. - pub fn arbitrary_data(&self) -> &[Vec] { - &self.metadata.arbitrary_data - } - - /// Write the WalletOutput. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut W) -> io::Result<()> { - self.absolute_id.write(w)?; - self.relative_id.write(w)?; - self.data.write(w)?; - self.metadata.write(w) - } - - /// Serialize the WalletOutput to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - let mut serialized = Vec::with_capacity(128); - self.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - /// Read a WalletOutput. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut R) -> io::Result { - Ok(WalletOutput { - absolute_id: AbsoluteId::read(r)?, - relative_id: RelativeId::read(r)?, - data: OutputData::read(r)?, - metadata: Metadata::read(r)?, - }) - } -} diff --git a/networks/monero/wallet/src/scan.rs b/networks/monero/wallet/src/scan.rs deleted file mode 100644 index 79caf3f2..00000000 --- a/networks/monero/wallet/src/scan.rs +++ /dev/null @@ -1,381 +0,0 @@ -use core::ops::Deref; -use std_shims::{vec, vec::Vec, collections::HashMap}; - -use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY}; - -use monero_rpc::ScannableBlock; -use monero_serai::{ - io::*, - primitives::Commitment, - transaction::{Timelock, Pruned, Transaction}, -}; -use crate::{ - address::SubaddressIndex, ViewPair, GuaranteedViewPair, output::*, PaymentId, Extra, - SharedKeyDerivations, -}; - -/// A collection of potentially additionally timelocked outputs. -#[derive(Zeroize, ZeroizeOnDrop)] -pub struct Timelocked(Vec); - -impl Timelocked { - /// Return the outputs which aren't subject to an additional timelock. - #[must_use] - pub fn not_additionally_locked(self) -> Vec { - let mut res = vec![]; - for output in &self.0 { - if output.additional_timelock() == Timelock::None { - res.push(output.clone()); - } - } - res - } - - /// Return the outputs whose additional timelock unlocks by the specified block/time. - /// - /// Additional timelocks are almost never used outside of miner transactions, and are - /// increasingly planned for removal. Ignoring non-miner additionally-timelocked outputs is - /// recommended. - /// - /// `block` is the block number of the block the additional timelock must be satsified by. - /// - /// `time` is represented in seconds since the epoch and is in terms of Monero's on-chain clock. - /// That means outputs whose additional timelocks are statisfied by `Instant::now()` (the time - /// according to the local system clock) may still be locked due to variance with Monero's clock. - #[must_use] - pub fn additional_timelock_satisfied_by(self, block: usize, time: u64) -> Vec { - let mut res = vec![]; - for output in &self.0 { - if (output.additional_timelock() <= Timelock::Block(block)) || - (output.additional_timelock() <= Timelock::Time(time)) - { - res.push(output.clone()); - } - } - res - } - - /// Ignore the timelocks and return all outputs within this container. - #[must_use] - pub fn ignore_additional_timelock(mut self) -> Vec { - let mut res = vec![]; - core::mem::swap(&mut self.0, &mut res); - res - } -} - -/// Errors when scanning a block. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum ScanError { - /// The block was for an unsupported protocol version. - #[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))] - UnsupportedProtocol(u8), - /// The ScannableBlock was invalid. - #[cfg_attr(feature = "std", error("invalid scannable block ({0})"))] - InvalidScannableBlock(&'static str), -} - -#[derive(Clone)] -struct InternalScanner { - pair: ViewPair, - guaranteed: bool, - subaddresses: HashMap>, -} - -impl Zeroize for InternalScanner { - fn zeroize(&mut self) { - self.pair.zeroize(); - self.guaranteed.zeroize(); - - // This may not be effective, unfortunately - for (mut key, mut value) in self.subaddresses.drain() { - key.zeroize(); - value.zeroize(); - } - } -} -impl Drop for InternalScanner { - fn drop(&mut self) { - self.zeroize(); - } -} -impl ZeroizeOnDrop for InternalScanner {} - -impl InternalScanner { - fn new(pair: ViewPair, guaranteed: bool) -> Self { - let mut subaddresses = HashMap::new(); - subaddresses.insert(pair.spend().compress(), None); - Self { pair, guaranteed, subaddresses } - } - - fn register_subaddress(&mut self, subaddress: SubaddressIndex) { - let (spend, _) = self.pair.subaddress_keys(subaddress); - self.subaddresses.insert(spend.compress(), Some(subaddress)); - } - - fn scan_transaction( - &self, - output_index_for_first_ringct_output: u64, - tx_hash: [u8; 32], - tx: &Transaction, - ) -> Result { - // Only scan TXs creating RingCT outputs - // For the full details on why this check is equivalent, please see the documentation in `scan` - if tx.version() != 2 { - return Ok(Timelocked(vec![])); - } - - // Read the extra field - let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()) else { - return Ok(Timelocked(vec![])); - }; - - let Some((tx_keys, additional)) = extra.keys() else { - return Ok(Timelocked(vec![])); - }; - let payment_id = extra.payment_id(); - - let mut res = vec![]; - for (o, output) in tx.prefix().outputs.iter().enumerate() { - let Some(output_key) = decompress_point(output.key.to_bytes()) else { continue }; - - // Monero checks with each TX key and with the additional key for this output - - // This will be None if there's no additional keys, Some(None) if there's additional keys - // yet not one for this output (which is non-standard), and Some(Some(_)) if there's an - // additional key for this output - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/cryptonote_basic/cryptonote_format_utils.cpp#L1060-L1070 - let additional = additional.as_ref().map(|additional| additional.get(o)); - - #[allow(clippy::manual_let_else)] - for key in tx_keys.iter().map(|key| Some(Some(key))).chain(core::iter::once(additional)) { - // Get the key, or continue if there isn't one - let key = match key { - Some(Some(key)) => key, - Some(None) | None => continue, - }; - // Calculate the ECDH - let ecdh = Zeroizing::new(self.pair.view.deref() * key); - let output_derivations = SharedKeyDerivations::output_derivations( - if self.guaranteed { - Some(SharedKeyDerivations::uniqueness(&tx.prefix().inputs)) - } else { - None - }, - ecdh.clone(), - o, - ); - - // Check the view tag matches, if there is a view tag - if let Some(actual_view_tag) = output.view_tag { - if actual_view_tag != output_derivations.view_tag { - continue; - } - } - - // P - shared == spend - let Some(subaddress) = ({ - // The output key may be of torsion [0, 8) - // Our subtracting of a prime-order element means any torsion will be preserved - // If someone wanted to malleate output keys with distinct torsions, only one will be - // scanned accordingly (the one which has matching torsion of the spend key) - let subaddress_spend_key = - output_key - (&output_derivations.shared_key * ED25519_BASEPOINT_TABLE); - self.subaddresses.get(&subaddress_spend_key.compress()) - }) else { - continue; - }; - let subaddress = *subaddress; - - // The key offset is this shared key - let mut key_offset = output_derivations.shared_key; - if let Some(subaddress) = subaddress { - // And if this was to a subaddress, it's additionally the offset from subaddress spend - // key to the normal spend key - key_offset += self.pair.subaddress_derivation(subaddress); - } - // Since we've found an output to us, get its amount - let mut commitment = Commitment::zero(); - - // Miner transaction - if let Some(amount) = output.amount { - commitment.amount = amount; - // Regular transaction - } else { - let Transaction::V2 { proofs: Some(ref proofs), .. } = &tx else { - // Invalid transaction, as of consensus rules at the time of writing this code - Err(ScanError::InvalidScannableBlock("non-miner v2 transaction without RCT proofs"))? - }; - - commitment = match proofs.base.encrypted_amounts.get(o) { - Some(amount) => output_derivations.decrypt(amount), - // Invalid transaction, as of consensus rules at the time of writing this code - None => Err(ScanError::InvalidScannableBlock( - "RCT proofs without an encrypted amount per output", - ))?, - }; - - // Rebuild the commitment to verify it - if Some(&commitment.calculate()) != proofs.base.commitments.get(o) { - continue; - } - } - - // Decrypt the payment ID - let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh)); - - let o = u64::try_from(o).expect("couldn't convert output index (usize) to u64"); - - res.push(WalletOutput { - absolute_id: AbsoluteId { transaction: tx_hash, index_in_transaction: o }, - relative_id: RelativeId { - index_on_blockchain: output_index_for_first_ringct_output.checked_add(o).ok_or( - ScanError::InvalidScannableBlock( - "transaction's output's index isn't representable as a u64", - ), - )?, - }, - data: OutputData { key: output_key, key_offset, commitment }, - metadata: Metadata { - additional_timelock: tx.prefix().additional_timelock, - subaddress, - payment_id, - arbitrary_data: extra.data(), - }, - }); - - // Break to prevent public keys from being included multiple times, triggering multiple - // inclusions of the same output - break; - } - } - - Ok(Timelocked(res)) - } - - fn scan(&mut self, block: ScannableBlock) -> Result { - // This is the output index for the first RingCT output within the block - // We mutate it to be the output index for the first RingCT for each transaction - let ScannableBlock { block, transactions, output_index_for_first_ringct_output } = block; - if block.transactions.len() != transactions.len() { - Err(ScanError::InvalidScannableBlock( - "scanning a ScannableBlock with more/less transactions than it should have", - ))?; - } - let Some(mut output_index_for_first_ringct_output) = output_index_for_first_ringct_output - else { - return Ok(Timelocked(vec![])); - }; - - if block.header.hardfork_version > 16 { - Err(ScanError::UnsupportedProtocol(block.header.hardfork_version))?; - } - - // We obtain all TXs in full - let mut txs_with_hashes = vec![( - block.miner_transaction.hash(), - Transaction::::from(block.miner_transaction.clone()), - )]; - for (hash, tx) in block.transactions.iter().zip(transactions) { - txs_with_hashes.push((*hash, tx)); - } - - let mut res = Timelocked(vec![]); - for (hash, tx) in txs_with_hashes { - // Push all outputs into our result - { - let mut this_txs_outputs = vec![]; - core::mem::swap( - &mut self.scan_transaction(output_index_for_first_ringct_output, hash, &tx)?.0, - &mut this_txs_outputs, - ); - res.0.extend(this_txs_outputs); - } - - // Update the RingCT starting index for the next TX - if matches!(tx, Transaction::V2 { .. }) { - output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len()) - .expect("couldn't convert amount of outputs (usize) to u64") - } - } - - // If the block's version is >= 12, drop all unencrypted payment IDs - // https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/ - // src/wallet/wallet2.cpp#L2739-L2744 - if block.header.hardfork_version >= 12 { - for output in &mut res.0 { - if matches!(output.metadata.payment_id, Some(PaymentId::Unencrypted(_))) { - output.metadata.payment_id = None; - } - } - } - - Ok(res) - } -} - -/// A transaction scanner to find outputs received. -/// -/// When an output is successfully scanned, the output key MUST be checked against the local -/// database for lack of prior observation. If it was prior observed, that output is an instance -/// of the -/// [burning bug](https://web.getmonero.org/2018/09/25/a-post-mortum-of-the-burning-bug.html) and -/// MAY be unspendable. Only the prior received output(s) or the newly received output will be -/// spendable (as spending one will burn all of them). -/// -/// Once checked, the output key MUST be saved to the local database so future checks can be -/// performed. -#[derive(Clone, Zeroize, ZeroizeOnDrop)] -pub struct Scanner(InternalScanner); - -impl Scanner { - /// Create a Scanner from a ViewPair. - pub fn new(pair: ViewPair) -> Self { - Self(InternalScanner::new(pair, false)) - } - - /// Register a subaddress to scan for. - /// - /// Subaddresses must be explicitly registered ahead of time in order to be successfully scanned. - pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) { - self.0.register_subaddress(subaddress) - } - - /// Scan a block. - pub fn scan(&mut self, block: ScannableBlock) -> Result { - self.0.scan(block) - } -} - -/// A transaction scanner to find outputs received which are guaranteed to be spendable. -/// -/// 'Guaranteed' outputs, or transactions outputs to the burning bug, are not officially specified -/// by the Monero project. They should only be used if necessary. No support outside of -/// monero-wallet is promised. -/// -/// "guaranteed to be spendable" assumes satisfaction of any timelocks in effect. -#[derive(Clone, Zeroize, ZeroizeOnDrop)] -pub struct GuaranteedScanner(InternalScanner); - -impl GuaranteedScanner { - /// Create a GuaranteedScanner from a GuaranteedViewPair. - pub fn new(pair: GuaranteedViewPair) -> Self { - Self(InternalScanner::new(pair.0, true)) - } - - /// Register a subaddress to scan for. - /// - /// Subaddresses must be explicitly registered ahead of time in order to be successfully scanned. - pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) { - self.0.register_subaddress(subaddress) - } - - /// Scan a block. - pub fn scan(&mut self, block: ScannableBlock) -> Result { - self.0.scan(block) - } -} diff --git a/networks/monero/wallet/src/send/eventuality.rs b/networks/monero/wallet/src/send/eventuality.rs deleted file mode 100644 index cd2543a4..00000000 --- a/networks/monero/wallet/src/send/eventuality.rs +++ /dev/null @@ -1,137 +0,0 @@ -use std_shims::{vec::Vec, io}; - -use zeroize::Zeroize; - -use crate::{ - ringct::PrunedRctProofs, - transaction::{Input, Timelock, Pruned, Transaction}, - send::SignableTransaction, -}; - -/// The eventual output of a SignableTransaction. -/// -/// If a SignableTransaction is signed and published on-chain, it will create a Transaction -/// identifiable to whoever else has the same SignableTransaction (with the same outgoing view -/// key). This structure enables checking if a Transaction is in fact such an output, as it can. -/// -/// Since Monero is a privacy coin without outgoing view keys, this only performs a fuzzy match. -/// The fuzzy match executes over the outputs and associated data necessary to work with the -/// outputs (the transaction randomness, ciphertexts). This transaction does not check if the -/// inputs intended to be spent where actually the inputs spent (as infeasible). -/// -/// The transaction randomness does bind to the inputs intended to be spent, so an on-chain -/// transaction will not match for multiple `Eventuality`s unless the `SignableTransaction`s they -/// were built from were in conflict (and their intended transactions cannot simultaneously exist -/// on-chain). -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct Eventuality(SignableTransaction); - -impl From for Eventuality { - fn from(tx: SignableTransaction) -> Eventuality { - Eventuality(tx) - } -} - -impl Eventuality { - /// Return the `extra` field any transaction following this intent would use. - /// - /// This enables building a HashMap of Extra -> Eventuality for efficiently fetching the - /// `Eventuality` an on-chain transaction may complete. - /// - /// This extra is cryptographically bound to the inputs intended to be spent. If the - /// `SignableTransaction`s the `Eventuality`s are built from are not in conflict (their intended - /// transactions can simultaneously exist on-chain), then each extra will only have a single - /// Eventuality associated (barring a cryptographic problem considered hard failing). - pub fn extra(&self) -> Vec { - self.0.extra() - } - - /// Return if this TX matches the SignableTransaction this was created from. - /// - /// Matching the SignableTransaction means this transaction created the expected outputs, they're - /// scannable, they're not locked, and this transaction claims to use the intended inputs (though - /// this is not guaranteed). This 'claim' is evaluated by this transaction using the transaction - /// keys derived from the intended inputs. This ensures two SignableTransactions with the same - /// intended payments don't match for each other's `Eventuality`s (as they'll have distinct - /// inputs intended). - #[must_use] - pub fn matches(&self, tx: &Transaction) -> bool { - // Verify extra - if self.0.extra() != tx.prefix().extra { - return false; - } - - // Also ensure no timelock was set - if tx.prefix().additional_timelock != Timelock::None { - return false; - } - - // Check the amount of inputs aligns - if tx.prefix().inputs.len() != self.0.inputs.len() { - return false; - } - // Collect the key images used by this transaction - let Ok(key_images) = tx - .prefix() - .inputs - .iter() - .map(|input| match input { - Input::Gen(_) => Err(()), - Input::ToKey { key_image, .. } => Ok(*key_image), - }) - .collect::, _>>() - else { - return false; - }; - - // Check the outputs - if self.0.outputs(&key_images) != tx.prefix().outputs { - return false; - } - - // Check the encrypted amounts and commitments - let commitments_and_encrypted_amounts = self.0.commitments_and_encrypted_amounts(&key_images); - let Transaction::V2 { proofs: Some(PrunedRctProofs { ref base, .. }), .. } = tx else { - return false; - }; - if base.commitments != - commitments_and_encrypted_amounts - .iter() - .map(|(commitment, _)| commitment.calculate()) - .collect::>() - { - return false; - } - if base.encrypted_amounts != - commitments_and_encrypted_amounts.into_iter().map(|(_, amount)| amount).collect::>() - { - return false; - } - - true - } - - /// Write the Eventuality. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut W) -> io::Result<()> { - self.0.write(w) - } - - /// Serialize the Eventuality to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - self.0.serialize() - } - - /// Read a Eventuality. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut R) -> io::Result { - Ok(Eventuality(SignableTransaction::read(r)?)) - } -} diff --git a/networks/monero/wallet/src/send/mod.rs b/networks/monero/wallet/src/send/mod.rs deleted file mode 100644 index e0c59e20..00000000 --- a/networks/monero/wallet/src/send/mod.rs +++ /dev/null @@ -1,610 +0,0 @@ -use core::{ops::Deref, fmt}; -use std_shims::{ - io, vec, - vec::Vec, - string::{String, ToString}, -}; - -use zeroize::{Zeroize, Zeroizing}; - -use rand_core::{RngCore, CryptoRng}; -use rand::seq::SliceRandom; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint}; -#[cfg(feature = "multisig")] -use frost::FrostError; - -use crate::{ - io::*, - generators::{MAX_COMMITMENTS, hash_to_point}, - ringct::{ - clsag::{ClsagError, ClsagContext, Clsag}, - RctType, RctPrunable, RctProofs, - }, - transaction::Transaction, - address::{Network, SubaddressIndex, MoneroAddress}, - extra::MAX_ARBITRARY_DATA_SIZE, - rpc::FeeRate, - ViewPair, GuaranteedViewPair, OutputWithDecoys, -}; - -mod tx_keys; -pub use tx_keys::TransactionKeys; -mod tx; -mod eventuality; -pub use eventuality::Eventuality; - -#[cfg(feature = "multisig")] -mod multisig; -#[cfg(feature = "multisig")] -pub use multisig::{TransactionMachine, TransactionSignMachine, TransactionSignatureMachine}; - -pub(crate) fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering { - x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse() -} - -#[derive(Clone, PartialEq, Eq, Zeroize)] -enum ChangeEnum { - AddressOnly(MoneroAddress), - Standard { view_pair: ViewPair, subaddress: Option }, - Guaranteed { view_pair: GuaranteedViewPair, subaddress: Option }, -} - -impl fmt::Debug for ChangeEnum { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ChangeEnum::AddressOnly(addr) => { - f.debug_struct("ChangeEnum::AddressOnly").field("addr", &addr).finish() - } - ChangeEnum::Standard { subaddress, .. } => f - .debug_struct("ChangeEnum::Standard") - .field("subaddress", &subaddress) - .finish_non_exhaustive(), - ChangeEnum::Guaranteed { subaddress, .. } => f - .debug_struct("ChangeEnum::Guaranteed") - .field("subaddress", &subaddress) - .finish_non_exhaustive(), - } - } -} - -/// Specification for a change output. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct Change(Option); - -impl Change { - /// Create a change output specification. - /// - /// This take the view key as Monero assumes it has the view key for change outputs. It optimizes - /// its wallet protocol accordingly. - pub fn new(view_pair: ViewPair, subaddress: Option) -> Change { - Change(Some(ChangeEnum::Standard { view_pair, subaddress })) - } - - /// Create a change output specification for a guaranteed view pair. - /// - /// This take the view key as Monero assumes it has the view key for change outputs. It optimizes - /// its wallet protocol accordingly. - pub fn guaranteed(view_pair: GuaranteedViewPair, subaddress: Option) -> Change { - Change(Some(ChangeEnum::Guaranteed { view_pair, subaddress })) - } - - /// Create a fingerprintable change output specification. - /// - /// You MUST assume this will harm your privacy. Only use this if you know what you're doing. - /// - /// If the change address is Some, this will be unable to optimize the transaction as the - /// Monero wallet protocol expects it can (due to presumably having the view key for the change - /// output). If a transaction should be optimized, and isn'tm it will be fingerprintable. - /// - /// If the change address is None, there are two fingerprints: - /// - /// 1) The change in the TX is shunted to the fee (making it fingerprintable). - /// - /// 2) In two-output transactions, where the payment address doesn't have a payment ID, wallet2 - /// includes an encrypted dummy payment ID for the non-change output in order to not allow - /// differentiating if transactions send to addresses with payment IDs or not. monero-wallet - /// includes a dummy payment ID which at least one recipient will identify as not the expected - /// dummy payment ID, revealing to the recipient(s) the sender is using non-wallet2 software. - pub fn fingerprintable(address: Option) -> Change { - if let Some(address) = address { - Change(Some(ChangeEnum::AddressOnly(address))) - } else { - Change(None) - } - } -} - -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -enum InternalPayment { - Payment(MoneroAddress, u64), - Change(ChangeEnum), -} - -impl InternalPayment { - fn address(&self) -> MoneroAddress { - match self { - InternalPayment::Payment(addr, _) => *addr, - InternalPayment::Change(change) => match change { - ChangeEnum::AddressOnly(addr) => *addr, - // Network::Mainnet as the network won't effect the derivations - ChangeEnum::Standard { view_pair, subaddress } => match subaddress { - Some(subaddress) => view_pair.subaddress(Network::Mainnet, *subaddress), - None => view_pair.legacy_address(Network::Mainnet), - }, - ChangeEnum::Guaranteed { view_pair, subaddress } => { - view_pair.address(Network::Mainnet, *subaddress, None) - } - }, - } - } -} - -/// An error while sending Monero. -#[derive(Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum SendError { - /// The RingCT type to produce proofs for this transaction with weren't supported. - #[cfg_attr(feature = "std", error("this library doesn't yet support that RctType"))] - UnsupportedRctType, - /// The transaction had no inputs specified. - #[cfg_attr(feature = "std", error("no inputs"))] - NoInputs, - /// The decoy quantity was invalid for the specified RingCT type. - #[cfg_attr(feature = "std", error("invalid number of decoys"))] - InvalidDecoyQuantity, - /// The transaction had no outputs specified. - #[cfg_attr(feature = "std", error("no outputs"))] - NoOutputs, - /// The transaction had too many outputs specified. - #[cfg_attr(feature = "std", error("too many outputs"))] - TooManyOutputs, - /// The transaction did not have a change output, and did not have two outputs. - /// - /// Monero requires all transactions have at least two outputs, assuming one payment and one - /// change (or at least one dummy and one change). Accordingly, specifying no change and only - /// one payment prevents creating a valid transaction - #[cfg_attr(feature = "std", error("only one output and no change address"))] - NoChange, - /// Multiple addresses had payment IDs specified. - /// - /// Only one payment ID is allowed per transaction. - #[cfg_attr(feature = "std", error("multiple addresses with payment IDs"))] - MultiplePaymentIds, - /// Too much arbitrary data was specified. - #[cfg_attr(feature = "std", error("too much data"))] - TooMuchArbitraryData, - /// The created transaction was too large. - #[cfg_attr(feature = "std", error("too large of a transaction"))] - TooLargeTransaction, - /// The transactions' amounts could not be represented within a `u64`. - #[cfg_attr( - feature = "std", - error("transaction amounts exceed u64::MAX (in {in_amount}, out {out_amount})") - )] - AmountsUnrepresentable { - /// The amount in (via inputs). - in_amount: u128, - /// The amount which would be out (between outputs and the fee). - out_amount: u128, - }, - /// This transaction could not pay for itself. - #[cfg_attr( - feature = "std", - error( - "not enough funds (inputs {inputs}, outputs {outputs}, necessary_fee {necessary_fee:?})" - ) - )] - NotEnoughFunds { - /// The amount of funds the inputs contributed. - inputs: u64, - /// The amount of funds the outputs required. - outputs: u64, - /// The fee necessary to be paid on top. - /// - /// If this is None, it is because the fee was not calculated as the outputs alone caused this - /// error. - necessary_fee: Option, - }, - /// This transaction is being signed with the wrong private key. - #[cfg_attr(feature = "std", error("wrong spend private key"))] - WrongPrivateKey, - /// This transaction was read from a bytestream which was malicious. - #[cfg_attr( - feature = "std", - error("this SignableTransaction was created by deserializing a malicious serialization") - )] - MaliciousSerialization, - /// There was an error when working with the CLSAGs. - #[cfg_attr(feature = "std", error("clsag error ({0})"))] - ClsagError(ClsagError), - /// There was an error when working with FROST. - #[cfg(feature = "multisig")] - #[cfg_attr(feature = "std", error("frost error {0}"))] - FrostError(FrostError), -} - -/// A signable transaction. -#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] -pub struct SignableTransaction { - rct_type: RctType, - outgoing_view_key: Zeroizing<[u8; 32]>, - inputs: Vec, - payments: Vec, - data: Vec>, - fee_rate: FeeRate, -} - -struct SignableTransactionWithKeyImages { - intent: SignableTransaction, - key_images: Vec, -} - -impl SignableTransaction { - fn validate(&self) -> Result<(), SendError> { - match self.rct_type { - RctType::ClsagBulletproof | RctType::ClsagBulletproofPlus => {} - _ => Err(SendError::UnsupportedRctType)?, - } - - if self.inputs.is_empty() { - Err(SendError::NoInputs)?; - } - for input in &self.inputs { - if input.decoys().len() != - match self.rct_type { - RctType::ClsagBulletproof => 11, - RctType::ClsagBulletproofPlus => 16, - _ => panic!("unsupported RctType"), - } - { - Err(SendError::InvalidDecoyQuantity)?; - } - } - - // Check we have at least one non-change output - if !self.payments.iter().any(|payment| matches!(payment, InternalPayment::Payment(_, _))) { - Err(SendError::NoOutputs)?; - } - // If we don't have at least two outputs, as required by Monero, error - if self.payments.len() < 2 { - Err(SendError::NoChange)?; - } - // Check we don't have multiple Change outputs due to decoding a malicious serialization - { - let mut change_count = 0; - for payment in &self.payments { - change_count += usize::from(u8::from(matches!(payment, InternalPayment::Change(_)))); - } - if change_count > 1 { - Err(SendError::MaliciousSerialization)?; - } - } - - // Make sure there's at most one payment ID - { - let mut payment_ids = 0; - for payment in &self.payments { - payment_ids += usize::from(u8::from(payment.address().payment_id().is_some())); - } - if payment_ids > 1 { - Err(SendError::MultiplePaymentIds)?; - } - } - - if self.payments.len() > MAX_COMMITMENTS { - Err(SendError::TooManyOutputs)?; - } - - // Check the length of each arbitrary data - for part in &self.data { - if part.len() > MAX_ARBITRARY_DATA_SIZE { - Err(SendError::TooMuchArbitraryData)?; - } - } - - // Check the length of TX extra - // https://github.com/monero-project/monero/pull/8733 - const MAX_EXTRA_SIZE: usize = 1060; - if self.extra().len() > MAX_EXTRA_SIZE { - Err(SendError::TooMuchArbitraryData)?; - } - - // Make sure we have enough funds - let weight; - { - let in_amount: u128 = - self.inputs.iter().map(|input| u128::from(input.commitment().amount)).sum(); - let payments_amount: u128 = self - .payments - .iter() - .filter_map(|payment| match payment { - InternalPayment::Payment(_, amount) => Some(u128::from(*amount)), - InternalPayment::Change(_) => None, - }) - .sum(); - let necessary_fee; - (weight, necessary_fee) = self.weight_and_necessary_fee(); - let out_amount = payments_amount + u128::from(necessary_fee); - let in_out_amount = u64::try_from(in_amount) - .and_then(|in_amount| u64::try_from(out_amount).map(|out_amount| (in_amount, out_amount))); - let Ok((in_amount, out_amount)) = in_out_amount else { - Err(SendError::AmountsUnrepresentable { in_amount, out_amount })? - }; - if in_amount < out_amount { - Err(SendError::NotEnoughFunds { - inputs: in_amount, - outputs: u64::try_from(payments_amount) - .expect("total out fit within u64 but not part of total out"), - necessary_fee: Some(necessary_fee), - })?; - } - } - - // The limit is half the no-penalty block size - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/wallet/wallet2.cpp#L11076-L11085 - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/cryptonote_config.h#L61 - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/cryptonote_config.h#L64 - const MAX_TX_SIZE: usize = (300_000 / 2) - 600; - if weight >= MAX_TX_SIZE { - Err(SendError::TooLargeTransaction)?; - } - - Ok(()) - } - - /// Create a new SignableTransaction. - /// - /// `outgoing_view_key` is used to seed the RNGs for this transaction. Anyone with knowledge of - /// the outgoing view key will be able to identify a transaction produced with this methodology, - /// and the data within it. Accordingly, it must be treated as a private key. - /// - /// `data` represents arbitrary data which will be embedded into the transaction's `extra` field. - /// The embedding occurs using an `ExtraField::Nonce` with a custom marker byte (as to not - /// conflict with a payment ID). - pub fn new( - rct_type: RctType, - outgoing_view_key: Zeroizing<[u8; 32]>, - inputs: Vec, - payments: Vec<(MoneroAddress, u64)>, - change: Change, - data: Vec>, - fee_rate: FeeRate, - ) -> Result { - // Re-format the payments and change into a consolidated payments list - let mut payments = payments - .into_iter() - .map(|(addr, amount)| InternalPayment::Payment(addr, amount)) - .collect::>(); - - if let Some(change) = change.0 { - payments.push(InternalPayment::Change(change)); - } - - let mut res = - SignableTransaction { rct_type, outgoing_view_key, inputs, payments, data, fee_rate }; - res.validate()?; - - // Shuffle the payments - { - let mut rng = res.seeded_rng(b"shuffle_payments"); - res.payments.shuffle(&mut rng); - } - - Ok(res) - } - - /// The fee rate this transaction uses. - pub fn fee_rate(&self) -> FeeRate { - self.fee_rate - } - - /// The fee this transaction requires. - /// - /// This is distinct from the fee this transaction will use. If no change output is specified, - /// all unspent coins will be shunted to the fee. - pub fn necessary_fee(&self) -> u64 { - self.weight_and_necessary_fee().1 - } - - /// Write a SignableTransaction. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn write(&self, w: &mut W) -> io::Result<()> { - fn write_payment(payment: &InternalPayment, w: &mut W) -> io::Result<()> { - match payment { - InternalPayment::Payment(addr, amount) => { - w.write_all(&[0])?; - write_vec(write_byte, addr.to_string().as_bytes(), w)?; - w.write_all(&amount.to_le_bytes()) - } - InternalPayment::Change(change) => match change { - ChangeEnum::AddressOnly(addr) => { - w.write_all(&[1])?; - write_vec(write_byte, addr.to_string().as_bytes(), w) - } - ChangeEnum::Standard { view_pair, subaddress } => { - w.write_all(&[2])?; - write_point(&view_pair.spend(), w)?; - write_scalar(&view_pair.view, w)?; - if let Some(subaddress) = subaddress { - w.write_all(&subaddress.account().to_le_bytes())?; - w.write_all(&subaddress.address().to_le_bytes()) - } else { - w.write_all(&0u32.to_le_bytes())?; - w.write_all(&0u32.to_le_bytes()) - } - } - ChangeEnum::Guaranteed { view_pair, subaddress } => { - w.write_all(&[3])?; - write_point(&view_pair.spend(), w)?; - write_scalar(&view_pair.0.view, w)?; - if let Some(subaddress) = subaddress { - w.write_all(&subaddress.account().to_le_bytes())?; - w.write_all(&subaddress.address().to_le_bytes()) - } else { - w.write_all(&0u32.to_le_bytes())?; - w.write_all(&0u32.to_le_bytes()) - } - } - }, - } - } - - write_byte(&u8::from(self.rct_type), w)?; - w.write_all(self.outgoing_view_key.as_slice())?; - write_vec(OutputWithDecoys::write, &self.inputs, w)?; - write_vec(write_payment, &self.payments, w)?; - write_vec(|data, w| write_vec(write_byte, data, w), &self.data, w)?; - self.fee_rate.write(w) - } - - /// Serialize the SignableTransaction to a `Vec`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn serialize(&self) -> Vec { - let mut buf = Vec::with_capacity(256); - self.write(&mut buf).expect("write failed but doesn't fail"); - buf - } - - /// Read a `SignableTransaction`. - /// - /// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol - /// defined serialization. - pub fn read(r: &mut R) -> io::Result { - fn read_address(r: &mut R) -> io::Result { - String::from_utf8(read_vec(read_byte, None, r)?) - .ok() - .and_then(|str| MoneroAddress::from_str_with_unchecked_network(&str).ok()) - .ok_or_else(|| io::Error::other("invalid address")) - } - - fn read_payment(r: &mut R) -> io::Result { - Ok(match read_byte(r)? { - 0 => InternalPayment::Payment(read_address(r)?, read_u64(r)?), - 1 => InternalPayment::Change(ChangeEnum::AddressOnly(read_address(r)?)), - 2 => InternalPayment::Change(ChangeEnum::Standard { - view_pair: ViewPair::new(read_point(r)?, Zeroizing::new(read_scalar(r)?)) - .map_err(io::Error::other)?, - subaddress: SubaddressIndex::new(read_u32(r)?, read_u32(r)?), - }), - 3 => InternalPayment::Change(ChangeEnum::Guaranteed { - view_pair: GuaranteedViewPair::new(read_point(r)?, Zeroizing::new(read_scalar(r)?)) - .map_err(io::Error::other)?, - subaddress: SubaddressIndex::new(read_u32(r)?, read_u32(r)?), - }), - _ => Err(io::Error::other("invalid payment"))?, - }) - } - - let res = SignableTransaction { - rct_type: RctType::try_from(read_byte(r)?) - .map_err(|()| io::Error::other("unsupported/invalid RctType"))?, - outgoing_view_key: Zeroizing::new(read_bytes(r)?), - inputs: read_vec(OutputWithDecoys::read, None, r)?, - payments: read_vec(read_payment, None, r)?, - data: read_vec(|r| read_vec(read_byte, None, r), None, r)?, - fee_rate: FeeRate::read(r)?, - }; - match res.validate() { - Ok(()) => {} - Err(e) => Err(io::Error::other(e))?, - } - Ok(res) - } - - fn with_key_images(mut self, key_images: Vec) -> SignableTransactionWithKeyImages { - debug_assert_eq!(self.inputs.len(), key_images.len()); - - // Sort the inputs by their key images - let mut sorted_inputs = self.inputs.into_iter().zip(key_images).collect::>(); - sorted_inputs - .sort_by(|(_, key_image_a), (_, key_image_b)| key_image_sort(key_image_a, key_image_b)); - - self.inputs = Vec::with_capacity(sorted_inputs.len()); - let mut key_images = Vec::with_capacity(sorted_inputs.len()); - for (input, key_image) in sorted_inputs { - self.inputs.push(input); - key_images.push(key_image); - } - - SignableTransactionWithKeyImages { intent: self, key_images } - } - - /// Sign this transaction. - pub fn sign( - self, - rng: &mut (impl RngCore + CryptoRng), - sender_spend_key: &Zeroizing, - ) -> Result { - // Calculate the key images - let mut key_images = vec![]; - for input in &self.inputs { - let input_key = Zeroizing::new(sender_spend_key.deref() + input.key_offset()); - if (input_key.deref() * ED25519_BASEPOINT_TABLE) != input.key() { - Err(SendError::WrongPrivateKey)?; - } - let key_image = input_key.deref() * hash_to_point(input.key().compress().to_bytes()); - key_images.push(key_image); - } - - // Convert to a SignableTransactionWithKeyImages - let tx = self.with_key_images(key_images); - - // Prepare the CLSAG signatures - let mut clsag_signs = Vec::with_capacity(tx.intent.inputs.len()); - for input in &tx.intent.inputs { - // Re-derive the input key as this will be in a different order - let input_key = Zeroizing::new(sender_spend_key.deref() + input.key_offset()); - clsag_signs.push(( - input_key, - ClsagContext::new(input.decoys().clone(), input.commitment().clone()) - .map_err(SendError::ClsagError)?, - )); - } - - // Get the output commitments' mask sum - let mask_sum = tx.intent.sum_output_masks(&tx.key_images); - - // Get the actual TX, just needing the CLSAGs - let mut tx = tx.transaction_without_signatures(); - - // Sign the CLSAGs - let clsags_and_pseudo_outs = Clsag::sign( - rng, - clsag_signs, - mask_sum, - tx.signature_hash().expect("signing a transaction which isn't signed?"), - ) - .map_err(SendError::ClsagError)?; - - // Fill in the CLSAGs/pseudo-outs - let inputs_len = tx.prefix().inputs.len(); - let Transaction::V2 { - proofs: - Some(RctProofs { - prunable: RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. }, - .. - }), - .. - } = tx - else { - panic!("not signing clsag?") - }; - *clsags = Vec::with_capacity(inputs_len); - *pseudo_outs = Vec::with_capacity(inputs_len); - for (clsag, pseudo_out) in clsags_and_pseudo_outs { - clsags.push(clsag); - pseudo_outs.push(pseudo_out); - } - - // Return the signed TX - Ok(tx) - } -} diff --git a/networks/monero/wallet/src/send/multisig.rs b/networks/monero/wallet/src/send/multisig.rs deleted file mode 100644 index f78f0fcd..00000000 --- a/networks/monero/wallet/src/send/multisig.rs +++ /dev/null @@ -1,304 +0,0 @@ -use std_shims::{ - vec::Vec, - io::{self, Read}, - collections::HashMap, -}; - -use rand_core::{RngCore, CryptoRng}; - -use group::ff::Field; -use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint}; -use dalek_ff_group as dfg; - -use transcript::{Transcript, RecommendedTranscript}; -use frost::{ - curve::Ed25519, - Participant, FrostError, ThresholdKeys, - dkg::lagrange, - sign::{ - Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine, - AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine, - }, -}; - -use monero_serai::{ - ringct::{ - clsag::{ClsagContext, ClsagMultisigMaskSender, ClsagAddendum, ClsagMultisig}, - RctPrunable, RctProofs, - }, - transaction::Transaction, -}; -use crate::send::{SendError, SignableTransaction, key_image_sort}; - -/// Initial FROST machine to produce a signed transaction. -pub struct TransactionMachine { - signable: SignableTransaction, - - i: Participant, - - // The key image generator, and the scalar offset from the spend key - key_image_generators_and_offsets: Vec<(EdwardsPoint, Scalar)>, - clsags: Vec<(ClsagMultisigMaskSender, AlgorithmMachine)>, -} - -/// Second FROST machine to produce a signed transaction. -pub struct TransactionSignMachine { - signable: SignableTransaction, - - i: Participant, - - key_image_generators_and_offsets: Vec<(EdwardsPoint, Scalar)>, - clsags: Vec<(ClsagMultisigMaskSender, AlgorithmSignMachine)>, - - our_preprocess: Vec>, -} - -/// Final FROST machine to produce a signed transaction. -pub struct TransactionSignatureMachine { - tx: Transaction, - clsags: Vec>, -} - -impl SignableTransaction { - /// Create a FROST signing machine out of this signable transaction. - pub fn multisig(self, keys: &ThresholdKeys) -> Result { - let mut clsags = vec![]; - - let mut key_image_generators_and_offsets = vec![]; - for input in &self.inputs { - // Check this is the right set of keys - let offset = keys.offset(dfg::Scalar(input.key_offset())); - if offset.group_key().0 != input.key() { - Err(SendError::WrongPrivateKey)?; - } - - let context = ClsagContext::new(input.decoys().clone(), input.commitment().clone()) - .map_err(SendError::ClsagError)?; - let (clsag, clsag_mask_send) = ClsagMultisig::new( - RecommendedTranscript::new(b"Monero Multisignature Transaction"), - context, - ); - key_image_generators_and_offsets.push(( - clsag.key_image_generator(), - keys.current_offset().unwrap_or(dfg::Scalar::ZERO).0 + input.key_offset(), - )); - clsags.push((clsag_mask_send, AlgorithmMachine::new(clsag, offset))); - } - - Ok(TransactionMachine { - signable: self, - i: keys.params().i(), - key_image_generators_and_offsets, - clsags, - }) - } -} - -impl PreprocessMachine for TransactionMachine { - type Preprocess = Vec>; - type Signature = Transaction; - type SignMachine = TransactionSignMachine; - - fn preprocess( - mut self, - rng: &mut R, - ) -> (TransactionSignMachine, Self::Preprocess) { - // Iterate over each CLSAG calling preprocess - let mut preprocesses = Vec::with_capacity(self.clsags.len()); - let clsags = self - .clsags - .drain(..) - .map(|(clsag_mask_send, clsag)| { - let (clsag, preprocess) = clsag.preprocess(rng); - preprocesses.push(preprocess); - (clsag_mask_send, clsag) - }) - .collect(); - let our_preprocess = preprocesses.clone(); - - ( - TransactionSignMachine { - signable: self.signable, - - i: self.i, - - key_image_generators_and_offsets: self.key_image_generators_and_offsets, - clsags, - - our_preprocess, - }, - preprocesses, - ) - } -} - -impl SignMachine for TransactionSignMachine { - type Params = (); - type Keys = ThresholdKeys; - type Preprocess = Vec>; - type SignatureShare = Vec>; - type SignatureMachine = TransactionSignatureMachine; - - fn cache(self) -> CachedPreprocess { - unimplemented!( - "Monero transactions don't support caching their preprocesses due to {}", - "being already bound to a specific transaction" - ); - } - - fn from_cache( - (): (), - _: ThresholdKeys, - _: CachedPreprocess, - ) -> (Self, Self::Preprocess) { - unimplemented!( - "Monero transactions don't support caching their preprocesses due to {}", - "being already bound to a specific transaction" - ); - } - - fn read_preprocess(&self, reader: &mut R) -> io::Result { - self.clsags.iter().map(|clsag| clsag.1.read_preprocess(reader)).collect() - } - - fn sign( - self, - mut commitments: HashMap, - msg: &[u8], - ) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> { - if !msg.is_empty() { - panic!("message was passed to the TransactionMachine when it generates its own"); - } - - // We do not need to be included here, yet this set of signers has yet to be validated - // We explicitly remove ourselves to ensure we aren't included twice, if we were redundantly - // included - commitments.remove(&self.i); - - // Find out who's included - let mut included = commitments.keys().copied().collect::>(); - // This push won't duplicate due to the above removal - included.push(self.i); - // unstable sort may reorder elements of equal order - // Given our lack of duplicates, we should have no elements of equal order - included.sort_unstable(); - - // Start calculating the key images, as needed on the TX level - let mut key_images = vec![EdwardsPoint::identity(); self.clsags.len()]; - for (image, (generator, offset)) in - key_images.iter_mut().zip(&self.key_image_generators_and_offsets) - { - *image = generator * offset; - } - - // Convert the serialized nonces commitments to a parallelized Vec - let mut commitments = (0 .. self.clsags.len()) - .map(|c| { - included - .iter() - .map(|l| { - let preprocess = if *l == self.i { - self.our_preprocess[c].clone() - } else { - commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone() - }; - - // While here, calculate the key image as needed to call sign - // The CLSAG algorithm will independently calculate the key image/verify these shares - key_images[c] += - preprocess.addendum.key_image_share().0 * lagrange::(*l, &included).0; - - Ok((*l, preprocess)) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - // The above inserted our own preprocess into these maps (which is unnecessary) - // Remove it now - for map in &mut commitments { - map.remove(&self.i); - } - - // The actual TX will have sorted its inputs by key image - // We apply the same sort now to our CLSAG machines - let mut clsags = Vec::with_capacity(self.clsags.len()); - for ((key_image, clsag), commitments) in key_images.iter().zip(self.clsags).zip(commitments) { - clsags.push((key_image, clsag, commitments)); - } - clsags.sort_by(|x, y| key_image_sort(x.0, y.0)); - let clsags = - clsags.into_iter().map(|(_, clsag, commitments)| (clsag, commitments)).collect::>(); - - // Specify the TX's key images - let tx = self.signable.with_key_images(key_images); - - // We now need to decide the masks for each CLSAG - let clsag_len = clsags.len(); - let output_masks = tx.intent.sum_output_masks(&tx.key_images); - let mut rng = tx.intent.seeded_rng(b"multisig_pseudo_out_masks"); - let mut sum_pseudo_outs = Scalar::ZERO; - let mut to_sign = Vec::with_capacity(clsag_len); - for (i, ((clsag_mask_send, clsag), commitments)) in clsags.into_iter().enumerate() { - let mut mask = Scalar::random(&mut rng); - if i == (clsag_len - 1) { - mask = output_masks - sum_pseudo_outs; - } else { - sum_pseudo_outs += mask; - } - clsag_mask_send.send(mask); - to_sign.push((clsag, commitments)); - } - - let tx = tx.transaction_without_signatures(); - let msg = tx.signature_hash().expect("signing a transaction which isn't signed?"); - - // Iterate over each CLSAG calling sign - let mut shares = Vec::with_capacity(to_sign.len()); - let clsags = to_sign - .drain(..) - .map(|(clsag, commitments)| { - let (clsag, share) = clsag.sign(commitments, &msg)?; - shares.push(share); - Ok(clsag) - }) - .collect::>()?; - - Ok((TransactionSignatureMachine { tx, clsags }, shares)) - } -} - -impl SignatureMachine for TransactionSignatureMachine { - type SignatureShare = Vec>; - - fn read_share(&self, reader: &mut R) -> io::Result { - self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect() - } - - fn complete( - mut self, - shares: HashMap, - ) -> Result { - let mut tx = self.tx; - match tx { - Transaction::V2 { - proofs: - Some(RctProofs { - prunable: RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. }, - .. - }), - .. - } => { - for (c, clsag) in self.clsags.drain(..).enumerate() { - let (clsag, pseudo_out) = clsag.complete( - shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::>(), - )?; - clsags.push(clsag); - pseudo_outs.push(pseudo_out); - } - } - _ => unreachable!("attempted to sign a multisig TX which wasn't CLSAG"), - } - Ok(tx) - } -} diff --git a/networks/monero/wallet/src/send/tx.rs b/networks/monero/wallet/src/send/tx.rs deleted file mode 100644 index ae6490d7..00000000 --- a/networks/monero/wallet/src/send/tx.rs +++ /dev/null @@ -1,338 +0,0 @@ -use std_shims::{vec, vec::Vec}; - -use curve25519_dalek::{ - constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE}, - Scalar, EdwardsPoint, -}; - -use crate::{ - io::{varint_len, write_varint}, - primitives::Commitment, - ringct::{ - clsag::Clsag, bulletproofs::Bulletproof, EncryptedAmount, RctType, RctBase, RctPrunable, - RctProofs, - }, - transaction::{Input, Output, Timelock, TransactionPrefix, Transaction}, - extra::{ARBITRARY_DATA_MARKER, PaymentId, Extra}, - send::{InternalPayment, SignableTransaction, SignableTransactionWithKeyImages}, -}; - -impl SignableTransaction { - // Output the inputs for this transaction. - pub(crate) fn inputs(&self, key_images: &[EdwardsPoint]) -> Vec { - debug_assert_eq!(self.inputs.len(), key_images.len()); - - let mut res = Vec::with_capacity(self.inputs.len()); - for (input, key_image) in self.inputs.iter().zip(key_images) { - res.push(Input::ToKey { - amount: None, - key_offsets: input.decoys().offsets().to_vec(), - key_image: *key_image, - }); - } - res - } - - // Output the outputs for this transaction. - pub(crate) fn outputs(&self, key_images: &[EdwardsPoint]) -> Vec { - let shared_key_derivations = self.shared_key_derivations(key_images); - debug_assert_eq!(self.payments.len(), shared_key_derivations.len()); - - let mut res = Vec::with_capacity(self.payments.len()); - for (payment, shared_key_derivations) in self.payments.iter().zip(&shared_key_derivations) { - let key = - (&shared_key_derivations.shared_key * ED25519_BASEPOINT_TABLE) + payment.address().spend(); - res.push(Output { - key: key.compress(), - amount: None, - view_tag: (match self.rct_type { - RctType::ClsagBulletproof => false, - RctType::ClsagBulletproofPlus => true, - _ => panic!("unsupported RctType"), - }) - .then_some(shared_key_derivations.view_tag), - }); - } - res - } - - // Calculate the TX extra for this transaction. - pub(crate) fn extra(&self) -> Vec { - let (tx_key, additional_keys) = self.transaction_keys_pub(); - debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len())); - let payment_id_xors = self.payment_id_xors(); - debug_assert_eq!(self.payments.len(), payment_id_xors.len()); - - let amount_of_keys = 1 + additional_keys.len(); - let mut extra = Extra::new(tx_key, additional_keys); - - if let Some((id, id_xor)) = - self.payments.iter().zip(&payment_id_xors).find_map(|(payment, payment_id_xor)| { - payment.address().payment_id().map(|id| (id, payment_id_xor)) - }) - { - let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes(); - let mut id_vec = Vec::with_capacity(1 + 8); - PaymentId::Encrypted(id) - .write(&mut id_vec) - .expect("write failed but doesn't fail"); - extra.push_nonce(id_vec); - } else { - /* - If there's no payment ID, we push a dummy (as wallet2 does) to the first payment. - - This does cause a random payment ID for the other recipient (a documented fingerprint). - Functionally, random payment IDs should be fine as wallet2 will trigger this same behavior - (a random payment ID being seen by the recipient) with a batch send if one of the recipient - addresses has a payment ID. - - The alternative would be to not include any payment ID, fingerprinting to the entire - blockchain this is non-standard wallet software (instead of just a single recipient). - */ - if self.payments.len() == 2 { - let (_, payment_id_xor) = self - .payments - .iter() - .zip(&payment_id_xors) - .find(|(payment, _)| matches!(payment, InternalPayment::Payment(_, _))) - .expect("multiple change outputs?"); - let mut id_vec = Vec::with_capacity(1 + 8); - // The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask - PaymentId::Encrypted(*payment_id_xor) - .write(&mut id_vec) - .expect("write failed but doesn't fail"); - extra.push_nonce(id_vec); - } - } - - // Include data if present - for part in &self.data { - let mut arb = vec![ARBITRARY_DATA_MARKER]; - arb.extend(part); - extra.push_nonce(arb); - } - - let mut serialized = Vec::with_capacity(32 * amount_of_keys); - extra.write(&mut serialized).expect("write failed but doesn't fail"); - serialized - } - - pub(crate) fn weight_and_necessary_fee(&self) -> (usize, u64) { - /* - This transaction is variable length to: - - The decoy offsets (fixed) - - The TX extra (variable to key images, requiring an interactive protocol) - - Thankfully, the TX extra *length* is fixed. Accordingly, we can calculate the inevitable TX's - weight at this time with a shimmed transaction. - */ - let base_weight = { - let mut key_images = Vec::with_capacity(self.inputs.len()); - let mut clsags = Vec::with_capacity(self.inputs.len()); - let mut pseudo_outs = Vec::with_capacity(self.inputs.len()); - for _ in &self.inputs { - key_images.push(ED25519_BASEPOINT_POINT); - clsags.push(Clsag { - D: ED25519_BASEPOINT_POINT, - s: vec![ - Scalar::ZERO; - match self.rct_type { - RctType::ClsagBulletproof => 11, - RctType::ClsagBulletproofPlus => 16, - _ => unreachable!("unsupported RCT type"), - } - ], - c1: Scalar::ZERO, - }); - pseudo_outs.push(ED25519_BASEPOINT_POINT); - } - let mut encrypted_amounts = Vec::with_capacity(self.payments.len()); - let mut bp_commitments = Vec::with_capacity(self.payments.len()); - let mut commitments = Vec::with_capacity(self.payments.len()); - for _ in &self.payments { - encrypted_amounts.push(EncryptedAmount::Compact { amount: [0; 8] }); - bp_commitments.push(Commitment::zero()); - commitments.push(ED25519_BASEPOINT_POINT); - } - - let padded_log2 = { - let mut log2_find = 0; - while (1 << log2_find) < self.payments.len() { - log2_find += 1; - } - log2_find - }; - // This is log2 the padded amount of IPA rows - // We have 64 rows per commitment, so we need 64 * c IPA rows - // We rewrite this as 2**6 * c - // By finding the padded log2 of c, we get 2**6 * 2**p - // This declares the log2 to be 6 + p - let lr_len = 6 + padded_log2; - - let bulletproof = match self.rct_type { - RctType::ClsagBulletproof => { - let mut bp = Vec::with_capacity(((9 + (2 * lr_len)) * 32) + 2); - let push_point = |bp: &mut Vec| { - bp.push(1); - bp.extend([0; 31]); - }; - let push_scalar = |bp: &mut Vec| bp.extend([0; 32]); - for _ in 0 .. 4 { - push_point(&mut bp); - } - for _ in 0 .. 2 { - push_scalar(&mut bp); - } - for _ in 0 .. 2 { - write_varint(&lr_len, &mut bp) - .expect("write failed but doesn't fail"); - for _ in 0 .. lr_len { - push_point(&mut bp); - } - } - for _ in 0 .. 3 { - push_scalar(&mut bp); - } - Bulletproof::read(&mut bp.as_slice()).expect("made an invalid dummy BP") - } - RctType::ClsagBulletproofPlus => { - let mut bp = Vec::with_capacity(((6 + (2 * lr_len)) * 32) + 2); - let push_point = |bp: &mut Vec| { - bp.push(1); - bp.extend([0; 31]); - }; - let push_scalar = |bp: &mut Vec| bp.extend([0; 32]); - for _ in 0 .. 3 { - push_point(&mut bp); - } - for _ in 0 .. 3 { - push_scalar(&mut bp); - } - for _ in 0 .. 2 { - write_varint(&lr_len, &mut bp) - .expect("write failed but doesn't fail"); - for _ in 0 .. lr_len { - push_point(&mut bp); - } - } - Bulletproof::read_plus(&mut bp.as_slice()).expect("made an invalid dummy BP+") - } - _ => panic!("unsupported RctType"), - }; - - // `- 1` to remove the one byte for the 0 fee - Transaction::V2 { - prefix: TransactionPrefix { - additional_timelock: Timelock::None, - inputs: self.inputs(&key_images), - outputs: self.outputs(&key_images), - extra: self.extra(), - }, - proofs: Some(RctProofs { - base: RctBase { fee: 0, encrypted_amounts, pseudo_outs: vec![], commitments }, - prunable: RctPrunable::Clsag { bulletproof, clsags, pseudo_outs }, - }), - } - .weight() - - 1 - }; - - // We now have the base weight, without the fee encoded - // The fee itself will impact the weight as its encoding is [1, 9] bytes long - let mut possible_weights = Vec::with_capacity(9); - for i in 1 ..= 9 { - possible_weights.push(base_weight + i); - } - debug_assert_eq!(possible_weights.len(), 9); - - // We now calculate the fee which would be used for each weight - let mut possible_fees = Vec::with_capacity(9); - for weight in possible_weights { - possible_fees.push(self.fee_rate.calculate_fee_from_weight(weight)); - } - - // We now look for the fee whose length matches the length used to derive it - let mut weight_and_fee = None; - for (fee_len, possible_fee) in possible_fees.into_iter().enumerate() { - let fee_len = 1 + fee_len; - debug_assert!(1 <= fee_len); - debug_assert!(fee_len <= 9); - - // We use the first fee whose encoded length is not larger than the length used within this - // weight - // This should be because the lengths are equal, yet means if somehow none are equal, this - // will still terminate successfully - if varint_len(possible_fee) <= fee_len { - weight_and_fee = Some((base_weight + fee_len, possible_fee)); - break; - } - } - weight_and_fee - .expect("length of highest possible fee was greater than highest possible fee length") - } -} - -impl SignableTransactionWithKeyImages { - pub(crate) fn transaction_without_signatures(&self) -> Transaction { - let commitments_and_encrypted_amounts = - self.intent.commitments_and_encrypted_amounts(&self.key_images); - let mut commitments = Vec::with_capacity(self.intent.payments.len()); - let mut bp_commitments = Vec::with_capacity(self.intent.payments.len()); - let mut encrypted_amounts = Vec::with_capacity(self.intent.payments.len()); - for (commitment, encrypted_amount) in commitments_and_encrypted_amounts { - commitments.push(commitment.calculate()); - bp_commitments.push(commitment); - encrypted_amounts.push(encrypted_amount); - } - let bulletproof = { - let mut bp_rng = self.intent.seeded_rng(b"bulletproof"); - (match self.intent.rct_type { - RctType::ClsagBulletproof => Bulletproof::prove(&mut bp_rng, bp_commitments), - RctType::ClsagBulletproofPlus => Bulletproof::prove_plus(&mut bp_rng, bp_commitments), - _ => panic!("unsupported RctType"), - }) - .expect("couldn't prove BP(+)s for this many payments despite checking in constructor?") - }; - - Transaction::V2 { - prefix: TransactionPrefix { - additional_timelock: Timelock::None, - inputs: self.intent.inputs(&self.key_images), - outputs: self.intent.outputs(&self.key_images), - extra: self.intent.extra(), - }, - proofs: Some(RctProofs { - base: RctBase { - fee: if self - .intent - .payments - .iter() - .any(|payment| matches!(payment, InternalPayment::Change(_))) - { - // The necessary fee is the fee - self.intent.weight_and_necessary_fee().1 - } else { - // If we don't have a change output, the difference is the fee - let inputs = - self.intent.inputs.iter().map(|input| input.commitment().amount).sum::(); - let payments = self - .intent - .payments - .iter() - .filter_map(|payment| match payment { - InternalPayment::Payment(_, amount) => Some(amount), - InternalPayment::Change(_) => None, - }) - .sum::(); - // Safe since the constructor checks inputs >= (payments + fee) - inputs - payments - }, - encrypted_amounts, - pseudo_outs: vec![], - commitments, - }, - prunable: RctPrunable::Clsag { bulletproof, clsags: vec![], pseudo_outs: vec![] }, - }), - } - } -} diff --git a/networks/monero/wallet/src/send/tx_keys.rs b/networks/monero/wallet/src/send/tx_keys.rs deleted file mode 100644 index 8ede4ff3..00000000 --- a/networks/monero/wallet/src/send/tx_keys.rs +++ /dev/null @@ -1,281 +0,0 @@ -use core::ops::Deref; -use std_shims::{vec, vec::Vec}; - -use zeroize::{Zeroize, Zeroizing}; - -use rand_core::SeedableRng; -use rand_chacha::ChaCha20Rng; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint}; - -use crate::{ - primitives::{keccak256, Commitment}, - ringct::EncryptedAmount, - SharedKeyDerivations, OutputWithDecoys, - send::{ChangeEnum, InternalPayment, SignableTransaction, key_image_sort}, -}; - -fn seeded_rng( - dst: &'static [u8], - outgoing_view_key: &[u8; 32], - mut input_keys: Vec, -) -> ChaCha20Rng { - // Apply the DST - let mut transcript = Zeroizing::new(vec![ - u8::try_from(dst.len()).expect("internal RNG with constant DST had a too-long DST specified") - ]); - transcript.extend(dst); - - // Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript - transcript.extend(outgoing_view_key); - - // We sort the inputs here to ensure a consistent order - // We use the key image sort as it's applicable and well-defined, not because these are key - // images - input_keys.sort_by(key_image_sort); - - // Ensure uniqueness across transactions by binding to a use-once object - // The keys for the inputs is binding to their key images, making them use-once - for key in input_keys { - transcript.extend(key.compress().to_bytes()); - } - - let res = ChaCha20Rng::from_seed(keccak256(&transcript)); - transcript.zeroize(); - res -} - -/// An iterator yielding an endless amount of ephemeral keys to use within a transaction. -/// -/// This is used when sending and can be used after sending to re-derive the keys used, as -/// necessary for payment proofs. -pub struct TransactionKeys(ChaCha20Rng); -impl TransactionKeys { - /// Construct a new `TransactionKeys`. - /// - /// `input_keys` is the list of keys from the outputs spent within this transaction. - pub fn new(outgoing_view_key: &Zeroizing<[u8; 32]>, input_keys: Vec) -> Self { - Self(seeded_rng(b"transaction_keys", outgoing_view_key, input_keys)) - } -} -impl Iterator for TransactionKeys { - type Item = Zeroizing; - fn next(&mut self) -> Option { - Some(Zeroizing::new(Scalar::random(&mut self.0))) - } -} - -impl SignableTransaction { - fn input_keys(&self) -> Vec { - self.inputs.iter().map(OutputWithDecoys::key).collect() - } - - pub(crate) fn seeded_rng(&self, dst: &'static [u8]) -> ChaCha20Rng { - seeded_rng(dst, &self.outgoing_view_key, self.input_keys()) - } - - fn has_payments_to_subaddresses(&self) -> bool { - self.payments.iter().any(|payment| match payment { - InternalPayment::Payment(addr, _) => addr.is_subaddress(), - InternalPayment::Change(change) => match change { - ChangeEnum::AddressOnly(addr) => addr.is_subaddress(), - // These aren't considered payments to subaddresses as we don't need to send to them as - // subaddresses - // We can calculate the shared key using the view key, as if we were receiving, instead - ChangeEnum::Standard { .. } | ChangeEnum::Guaranteed { .. } => false, - }, - }) - } - - fn should_use_additional_keys(&self) -> bool { - let has_payments_to_subaddresses = self.has_payments_to_subaddresses(); - if !has_payments_to_subaddresses { - return false; - } - - let has_change_view = self.payments.iter().any(|payment| match payment { - InternalPayment::Payment(_, _) => false, - InternalPayment::Change(change) => match change { - ChangeEnum::AddressOnly(_) => false, - ChangeEnum::Standard { .. } | ChangeEnum::Guaranteed { .. } => true, - }, - }); - - /* - If sending to a subaddress, the shared key is not `rG` yet `rB`. Because of this, a - per-subaddress shared key is necessary, causing the usage of additional keys. - - The one exception is if we're sending to a subaddress in a 2-output transaction. The second - output, the change output, will attempt scanning the singular key `rB` with `v rB`. While we - cannot calculate `r vB` with just `r` (as that'd require `vB` when we presumably only have - `vG` when sending), since we do in fact have `v` (due to it being our own view key for our - change output), we can still calculate the shared secret. - */ - has_payments_to_subaddresses && !((self.payments.len() == 2) && has_change_view) - } - - // Calculate the transaction keys used as randomness. - fn transaction_keys(&self) -> (Zeroizing, Vec>) { - let mut tx_keys = TransactionKeys::new(&self.outgoing_view_key, self.input_keys()); - - let tx_key = tx_keys.next().expect("TransactionKeys (never-ending) was exhausted"); - - let mut additional_keys = vec![]; - if self.should_use_additional_keys() { - for _ in 0 .. self.payments.len() { - additional_keys.push(tx_keys.next().expect("TransactionKeys (never-ending) was exhausted")); - } - } - (tx_key, additional_keys) - } - - fn ecdhs(&self) -> Vec> { - let (tx_key, additional_keys) = self.transaction_keys(); - debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len())); - let (tx_key_pub, additional_keys_pub) = self.transaction_keys_pub(); - debug_assert_eq!(additional_keys_pub.len(), additional_keys.len()); - - let mut res = Vec::with_capacity(self.payments.len()); - for (i, payment) in self.payments.iter().enumerate() { - let addr = payment.address(); - let key_to_use = - if addr.is_subaddress() { additional_keys.get(i).unwrap_or(&tx_key) } else { &tx_key }; - - let ecdh = match payment { - // If we don't have the view key, use the key dedicated for this address (r A) - InternalPayment::Payment(_, _) | - InternalPayment::Change(ChangeEnum::AddressOnly { .. }) => { - Zeroizing::new(key_to_use.deref() * addr.view()) - } - // If we do have the view key, use the commitment to the key (a R) - InternalPayment::Change(ChangeEnum::Standard { view_pair, .. }) => { - Zeroizing::new(view_pair.view.deref() * tx_key_pub) - } - InternalPayment::Change(ChangeEnum::Guaranteed { view_pair, .. }) => { - Zeroizing::new(view_pair.0.view.deref() * tx_key_pub) - } - }; - - res.push(ecdh); - } - res - } - - // Calculate the shared keys and the necessary derivations. - pub(crate) fn shared_key_derivations( - &self, - key_images: &[EdwardsPoint], - ) -> Vec> { - let ecdhs = self.ecdhs(); - - let uniqueness = SharedKeyDerivations::uniqueness(&self.inputs(key_images)); - - let mut res = Vec::with_capacity(self.payments.len()); - for (i, (payment, ecdh)) in self.payments.iter().zip(ecdhs).enumerate() { - let addr = payment.address(); - res.push(SharedKeyDerivations::output_derivations( - addr.is_guaranteed().then_some(uniqueness), - ecdh, - i, - )); - } - res - } - - // Calculate the payment ID XOR masks. - pub(crate) fn payment_id_xors(&self) -> Vec<[u8; 8]> { - let mut res = Vec::with_capacity(self.payments.len()); - for ecdh in self.ecdhs() { - res.push(SharedKeyDerivations::payment_id_xor(ecdh)); - } - res - } - - // Calculate the transaction_keys' commitments. - // - // These depend on the payments. Commitments for payments to subaddresses use the spend key for - // the generator. - pub(crate) fn transaction_keys_pub(&self) -> (EdwardsPoint, Vec) { - let (tx_key, additional_keys) = self.transaction_keys(); - debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len())); - - // The single transaction key uses the subaddress's spend key as its generator - let has_payments_to_subaddresses = self.has_payments_to_subaddresses(); - let should_use_additional_keys = self.should_use_additional_keys(); - if has_payments_to_subaddresses && (!should_use_additional_keys) { - debug_assert_eq!(additional_keys.len(), 0); - - let InternalPayment::Payment(addr, _) = self - .payments - .iter() - .find(|payment| matches!(payment, InternalPayment::Payment(_, _))) - .expect("payment to subaddress yet no payment") - else { - panic!("filtered payment wasn't a payment") - }; - - return (tx_key.deref() * addr.spend(), vec![]); - } - - if should_use_additional_keys { - let mut additional_keys_pub = vec![]; - for (additional_key, payment) in additional_keys.into_iter().zip(&self.payments) { - let addr = payment.address(); - // https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454 - // /src/device/device_default.cpp#L308-L312 - if addr.is_subaddress() { - additional_keys_pub.push(additional_key.deref() * addr.spend()); - } else { - additional_keys_pub.push(additional_key.deref() * ED25519_BASEPOINT_TABLE) - } - } - return (tx_key.deref() * ED25519_BASEPOINT_TABLE, additional_keys_pub); - } - - debug_assert!(!has_payments_to_subaddresses); - debug_assert!(!should_use_additional_keys); - (tx_key.deref() * ED25519_BASEPOINT_TABLE, vec![]) - } - - pub(crate) fn commitments_and_encrypted_amounts( - &self, - key_images: &[EdwardsPoint], - ) -> Vec<(Commitment, EncryptedAmount)> { - let shared_key_derivations = self.shared_key_derivations(key_images); - - let mut res = Vec::with_capacity(self.payments.len()); - for (payment, shared_key_derivations) in self.payments.iter().zip(shared_key_derivations) { - let amount = match payment { - InternalPayment::Payment(_, amount) => *amount, - InternalPayment::Change(_) => { - let inputs = self.inputs.iter().map(|input| input.commitment().amount).sum::(); - let payments = self - .payments - .iter() - .filter_map(|payment| match payment { - InternalPayment::Payment(_, amount) => Some(amount), - InternalPayment::Change(_) => None, - }) - .sum::(); - let necessary_fee = self.weight_and_necessary_fee().1; - // Safe since the constructor checked this TX has enough funds for itself - inputs - (payments + necessary_fee) - } - }; - let commitment = Commitment::new(shared_key_derivations.commitment_mask(), amount); - let encrypted_amount = EncryptedAmount::Compact { - amount: shared_key_derivations.compact_amount_encryption(amount), - }; - res.push((commitment, encrypted_amount)); - } - res - } - - pub(crate) fn sum_output_masks(&self, key_images: &[EdwardsPoint]) -> Scalar { - self - .commitments_and_encrypted_amounts(key_images) - .into_iter() - .map(|(commitment, _)| commitment.mask) - .sum() - } -} diff --git a/networks/monero/wallet/src/tests/extra.rs b/networks/monero/wallet/src/tests/extra.rs deleted file mode 100644 index 497602ce..00000000 --- a/networks/monero/wallet/src/tests/extra.rs +++ /dev/null @@ -1,202 +0,0 @@ -use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY}; - -use crate::{ - io::write_varint, - extra::{MAX_TX_EXTRA_PADDING_COUNT, ExtraField, Extra}, -}; - -// Tests derived from -// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/ -// tests/unit_tests/test_tx_utils.cpp -// which is licensed as follows: -#[rustfmt::skip] -/* -Copyright (c) 2014-2022, The Monero Project - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, -this list of conditions and the following disclaimer in the documentation -and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors -may be used to endorse or promote products derived from this software without -specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Parts of the project are originally copyright (c) 2012-2013 The Cryptonote -developers - -Parts of the project are originally copyright (c) 2014 The Boolberry -developers, distributed under the MIT licence: - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -const PUB_KEY_BYTES: [u8; 33] = [ - 1, 30, 208, 98, 162, 133, 64, 85, 83, 112, 91, 188, 89, 211, 24, 131, 39, 154, 22, 228, 80, 63, - 198, 141, 173, 111, 244, 183, 4, 149, 186, 140, 230, -]; - -fn pub_key() -> EdwardsPoint { - CompressedEdwardsY(PUB_KEY_BYTES[1 .. PUB_KEY_BYTES.len()].try_into().expect("invalid pub key")) - .decompress() - .unwrap() -} - -fn test_write_buf(extra: &Extra, buf: &[u8]) { - let mut w: Vec = vec![]; - Extra::write(extra, &mut w).unwrap(); - assert_eq!(buf, w); -} - -#[test] -fn empty_extra() { - let buf: Vec = vec![]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert!(extra.0.is_empty()); - test_write_buf(&extra, &buf); -} - -#[test] -fn padding_only_size_1() { - let buf: Vec = vec![0]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::Padding(1)]); - test_write_buf(&extra, &buf); -} - -#[test] -fn padding_only_size_2() { - let buf: Vec = vec![0, 0]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::Padding(2)]); - test_write_buf(&extra, &buf); -} - -#[test] -fn padding_only_max_size() { - let buf: Vec = vec![0; MAX_TX_EXTRA_PADDING_COUNT]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::Padding(MAX_TX_EXTRA_PADDING_COUNT)]); - test_write_buf(&extra, &buf); -} - -#[test] -fn padding_only_exceed_max_size() { - let buf: Vec = vec![0; MAX_TX_EXTRA_PADDING_COUNT + 1]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert!(extra.0.is_empty()); -} - -#[test] -fn invalid_padding_only() { - let buf: Vec = vec![0, 42]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert!(extra.0.is_empty()); -} - -#[test] -fn pub_key_only() { - let buf: Vec = PUB_KEY_BYTES.to_vec(); - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]); - test_write_buf(&extra, &buf); -} - -#[test] -fn extra_nonce_only() { - let buf: Vec = vec![2, 1, 42]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::Nonce(vec![42])]); - test_write_buf(&extra, &buf); -} - -#[test] -fn extra_nonce_only_wrong_size() { - let mut buf: Vec = vec![0; 20]; - buf[0] = 2; - buf[1] = 255; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert!(extra.0.is_empty()); -} - -#[test] -fn pub_key_and_padding() { - let mut buf: Vec = PUB_KEY_BYTES.to_vec(); - buf.extend([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - ]); - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key()), ExtraField::Padding(76)]); - test_write_buf(&extra, &buf); -} - -#[test] -fn pub_key_and_invalid_padding() { - let mut buf: Vec = PUB_KEY_BYTES.to_vec(); - buf.extend([0, 1]); - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]); -} - -#[test] -fn extra_mysterious_minergate_only() { - let buf: Vec = vec![222, 1, 42]; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![42])]); - test_write_buf(&extra, &buf); -} - -#[test] -fn extra_mysterious_minergate_only_large() { - let mut buf: Vec = vec![222]; - write_varint(&512u64, &mut buf).unwrap(); - buf.extend_from_slice(&vec![0; 512]); - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![0; 512])]); - test_write_buf(&extra, &buf); -} - -#[test] -fn extra_mysterious_minergate_only_wrong_size() { - let mut buf: Vec = vec![0; 20]; - buf[0] = 222; - buf[1] = 255; - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert!(extra.0.is_empty()); -} - -#[test] -fn extra_mysterious_minergate_and_pub_key() { - let mut buf: Vec = vec![222, 1, 42]; - buf.extend(PUB_KEY_BYTES.to_vec()); - let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap(); - assert_eq!( - extra.0, - vec![ExtraField::MysteriousMinergate(vec![42]), ExtraField::PublicKey(pub_key())] - ); - test_write_buf(&extra, &buf); -} diff --git a/networks/monero/wallet/src/tests/mod.rs b/networks/monero/wallet/src/tests/mod.rs deleted file mode 100644 index 9f151598..00000000 --- a/networks/monero/wallet/src/tests/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -mod extra; -mod scan; diff --git a/networks/monero/wallet/src/tests/scan.rs b/networks/monero/wallet/src/tests/scan.rs deleted file mode 100644 index 4f4da239..00000000 --- a/networks/monero/wallet/src/tests/scan.rs +++ /dev/null @@ -1,168 +0,0 @@ -use monero_rpc::ScannableBlock; -use crate::{ - transaction::{Pruned, Transaction}, - block::Block, - ViewPair, Scanner, WalletOutput, - output::{AbsoluteId, RelativeId, OutputData, Metadata}, - Commitment, - PaymentId::Encrypted, - transaction::Timelock, - ringct::EncryptedAmount, -}; -use zeroize::Zeroizing; -use curve25519_dalek::{Scalar, constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY}; - -const SPEND_KEY: &str = "ccf0ea10e1ea64354f42fa710c2b318e581969cf49046d809d1f0aadb3fc7a02"; -const VIEW_KEY: &str = "a28b4b2085592881df94ee95da332c16b5bb773eb8bb74730208cbb236c73806"; - -#[rustfmt::skip] -const PRUNED_TX_WITH_LONG_ENCRYPTED_AMOUNT: &str = "020001020003060101cf60390bb71aa15eb24037772012d59dc68cb4b6211e1c93206db09a6c346261020002ee8ca293511571c0005e1c144e49d09b8ff03046dbafb3e064a34cb9fc1994b600029e2e5cd08c8681dbcf2ce66071467e835f7e86613fbfed3c4fb170127b94e1072c01d3ce2a622c6e06ed465f81017dd6188c3a6e3d8e65a846f9c98416da0e150a82020901c553d35e54111bd001e0bbcbf289d701ce90e309ead2b487ec1d4d8af5d649543eb99a7620f6b54e532898527be29704f050e6f06de61e5967b2ddd506b4d6d36546065d6aae156ac7bec18c99580c07867fb98cb29853edbafec91af2df605c12f9aaa81a9165625afb6649f5a652012c5ba6612351140e1fb4a8463cc765d0a9bb7d999ba35750f365c5285d77230b76c7a612784f4845812a2899f2ca6a304fee61362db59b263115c27d2ce78af6b1d9e939c1f4036c7707851f41abe6458cf1c748353e593469ebf43536a939f7"; - -#[rustfmt::skip] -const BLOCK: &str = "0202e8e28efe04db09e2fc4d57854786220bd33e0169ff692440d27ae3932b9219df9ab1d7260b00000000014101ff050580d0acf30e02704972eb1878e94686b62fa4c0202f3e7e3a263073bd6edd751990ea769494ee80c0fc82aa0202edac72ab7c5745d4acaa95f76a3b76e238a55743cd51efb586f968e09821788d80d0dbc3f40202f9b4cf3141aac4203a1aaed01f09326615544997d1b68964928d9aafd07e38e580a0e5b9c29101023405e3aa75b1b7adf04e8c7faa3c3d45616ae740a8b11fb7cc1555dd8b9e4c9180c0dfda8ee90602d2b78accfe1c2ae57bed4fe3385f7735a988f160ef3bbc1f9d7a0c911c26ffd92101d2d55b5066d247a97696be4a84bf70873e4f149687f57e606eb6682f11650e1701b74773bbea995079805398052da9b69244bda034b089b50e4d9151dedb59a12f"; - -const OUTPUT_INDEX_FOR_FIRST_RINGCT_OUTPUT: u64 = 0; // note the miner tx is a v1 tx - -fn wallet_output0() -> WalletOutput { - WalletOutput { - absolute_id: AbsoluteId { - transaction: hex::decode("b74773bbea995079805398052da9b69244bda034b089b50e4d9151dedb59a12f") - .unwrap() - .try_into() - .unwrap(), - index_in_transaction: 0, - }, - relative_id: RelativeId { index_on_blockchain: OUTPUT_INDEX_FOR_FIRST_RINGCT_OUTPUT }, - data: OutputData { - key: CompressedEdwardsY( - hex::decode("ee8ca293511571c0005e1c144e49d09b8ff03046dbafb3e064a34cb9fc1994b6") - .unwrap() - .try_into() - .unwrap(), - ) - .decompress() - .unwrap(), - key_offset: Scalar::from_canonical_bytes( - hex::decode("f1d21a76ea0bb228fbc5f0dece0597a8ffb59de7a04b29f70b7c0310446ea905") - .unwrap() - .try_into() - .unwrap(), - ) - .unwrap(), - commitment: Commitment { - amount: 10000, - mask: Scalar::from_canonical_bytes( - hex::decode("05c2f142aaf3054cbff0a022f6c7cb75403fd92af0f9441c072ade3f273f7706") - .unwrap() - .try_into() - .unwrap(), - ) - .unwrap(), - }, - }, - metadata: Metadata { - additional_timelock: Timelock::None, - subaddress: None, - payment_id: Some(Encrypted([0, 0, 0, 0, 0, 0, 0, 0])), - arbitrary_data: [].to_vec(), - }, - } -} - -fn wallet_output1() -> WalletOutput { - WalletOutput { - absolute_id: AbsoluteId { - transaction: hex::decode("b74773bbea995079805398052da9b69244bda034b089b50e4d9151dedb59a12f") - .unwrap() - .try_into() - .unwrap(), - index_in_transaction: 1, - }, - relative_id: RelativeId { index_on_blockchain: OUTPUT_INDEX_FOR_FIRST_RINGCT_OUTPUT + 1 }, - data: OutputData { - key: CompressedEdwardsY( - hex::decode("9e2e5cd08c8681dbcf2ce66071467e835f7e86613fbfed3c4fb170127b94e107") - .unwrap() - .try_into() - .unwrap(), - ) - .decompress() - .unwrap(), - key_offset: Scalar::from_canonical_bytes( - hex::decode("c5189738c1cb40e68d464f1a1848a85f6ab2c09652a31849213dc0fefd212806") - .unwrap() - .try_into() - .unwrap(), - ) - .unwrap(), - commitment: Commitment { - amount: 10000, - mask: Scalar::from_canonical_bytes( - hex::decode("c8922ce32cb2bf454a6b77bc91423ba7a18412b71fa39a97a2a743c1fe0bad04") - .unwrap() - .try_into() - .unwrap(), - ) - .unwrap(), - }, - }, - metadata: Metadata { - additional_timelock: Timelock::None, - subaddress: None, - payment_id: Some(Encrypted([0, 0, 0, 0, 0, 0, 0, 0])), - arbitrary_data: [].to_vec(), - }, - } -} - -#[test] -fn scan_long_encrypted_amount() { - // Parse strings - let spend_key_buf = hex::decode(SPEND_KEY).unwrap(); - let spend_key = - Zeroizing::new(Scalar::from_canonical_bytes(spend_key_buf.try_into().unwrap()).unwrap()); - - let view_key_buf = hex::decode(VIEW_KEY).unwrap(); - let view_key = - Zeroizing::new(Scalar::from_canonical_bytes(view_key_buf.try_into().unwrap()).unwrap()); - - let tx_buf = hex::decode(PRUNED_TX_WITH_LONG_ENCRYPTED_AMOUNT).unwrap(); - let tx = Transaction::::read::<&[u8]>(&mut tx_buf.as_ref()).unwrap(); - - let block_buf = hex::decode(BLOCK).unwrap(); - let block = Block::read::<&[u8]>(&mut block_buf.as_ref()).unwrap(); - - // Confirm tx has long form encrypted amounts - match &tx { - Transaction::V2 { prefix: _, proofs } => { - let proofs = proofs.clone().unwrap(); - assert_eq!(proofs.base.encrypted_amounts.len(), 2); - assert!(proofs - .base - .encrypted_amounts - .iter() - .all(|o| matches!(o, EncryptedAmount::Original { .. }))); - } - _ => panic!("Unexpected tx version"), - }; - - // Prepare scanner - let spend_pub = &*spend_key * ED25519_BASEPOINT_TABLE; - let view: ViewPair = ViewPair::new(spend_pub, view_key).unwrap(); - let mut scanner = Scanner::new(view); - - // Prepare scannable block - let txs: Vec> = vec![tx]; - let scannable_block = ScannableBlock { - block, - transactions: txs, - output_index_for_first_ringct_output: Some(OUTPUT_INDEX_FOR_FIRST_RINGCT_OUTPUT), - }; - - // Scan the block - let outputs = scanner.scan(scannable_block).unwrap().not_additionally_locked(); - - assert_eq!(outputs.len(), 2); - assert_eq!(outputs[0], wallet_output0()); - assert_eq!(outputs[1], wallet_output1()); -} diff --git a/networks/monero/wallet/src/view_pair.rs b/networks/monero/wallet/src/view_pair.rs deleted file mode 100644 index 3b09f088..00000000 --- a/networks/monero/wallet/src/view_pair.rs +++ /dev/null @@ -1,144 +0,0 @@ -use core::ops::Deref; - -use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint}; - -use crate::{ - primitives::keccak256_to_scalar, - address::{Network, AddressType, SubaddressIndex, MoneroAddress}, -}; - -/// An error while working with a ViewPair. -#[derive(Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(thiserror::Error))] -pub enum ViewPairError { - /// The spend key was torsioned. - /// - /// Torsioned spend keys are of questionable spendability. This library avoids that question by - /// rejecting such ViewPairs. - // CLSAG seems to support it if the challenge does a torsion clear, FCMP++ should ship with a - // torsion clear, yet it's not worth it to modify CLSAG sign to generate challenges until the - // torsion clears and ensure spendability (nor can we reasonably guarantee that in the future) - #[cfg_attr(feature = "std", error("torsioned spend key"))] - TorsionedSpendKey, -} - -/// The pair of keys necessary to scan transactions. -/// -/// This is composed of the public spend key and the private view key. -#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] -pub struct ViewPair { - spend: EdwardsPoint, - pub(crate) view: Zeroizing, -} - -impl ViewPair { - /// Create a new ViewPair. - pub fn new(spend: EdwardsPoint, view: Zeroizing) -> Result { - if !spend.is_torsion_free() { - Err(ViewPairError::TorsionedSpendKey)?; - } - Ok(ViewPair { spend, view }) - } - - /// The public spend key for this ViewPair. - pub fn spend(&self) -> EdwardsPoint { - self.spend - } - - /// The public view key for this ViewPair. - pub fn view(&self) -> EdwardsPoint { - self.view.deref() * ED25519_BASEPOINT_TABLE - } - - pub(crate) fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar { - keccak256_to_scalar(Zeroizing::new( - [ - b"SubAddr\0".as_ref(), - Zeroizing::new(self.view.to_bytes()).as_ref(), - &index.account().to_le_bytes(), - &index.address().to_le_bytes(), - ] - .concat(), - )) - } - - pub(crate) fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) { - let scalar = self.subaddress_derivation(index); - let spend = self.spend + (&scalar * ED25519_BASEPOINT_TABLE); - let view = self.view.deref() * spend; - (spend, view) - } - - /// Derive a legacy address from this ViewPair. - /// - /// Subaddresses SHOULD be used instead. - pub fn legacy_address(&self, network: Network) -> MoneroAddress { - MoneroAddress::new(network, AddressType::Legacy, self.spend, self.view()) - } - - /// Derive a legacy integrated address from this ViewPair. - /// - /// Subaddresses SHOULD be used instead. - pub fn legacy_integrated_address(&self, network: Network, payment_id: [u8; 8]) -> MoneroAddress { - MoneroAddress::new(network, AddressType::LegacyIntegrated(payment_id), self.spend, self.view()) - } - - /// Derive a subaddress from this ViewPair. - pub fn subaddress(&self, network: Network, subaddress: SubaddressIndex) -> MoneroAddress { - let (spend, view) = self.subaddress_keys(subaddress); - MoneroAddress::new(network, AddressType::Subaddress, spend, view) - } -} - -/// The pair of keys necessary to scan outputs immune to the burning bug. -/// -/// This is composed of the public spend key and a non-zero private view key. -/// -/// 'Guaranteed' outputs, or transactions outputs to the burning bug, are not officially specified -/// by the Monero project. They should only be used if necessary. No support outside of -/// monero-wallet is promised. -#[derive(Clone, PartialEq, Eq, Zeroize)] -pub struct GuaranteedViewPair(pub(crate) ViewPair); - -impl GuaranteedViewPair { - /// Create a new GuaranteedViewPair. - pub fn new(spend: EdwardsPoint, view: Zeroizing) -> Result { - ViewPair::new(spend, view).map(GuaranteedViewPair) - } - - /// The public spend key for this GuaranteedViewPair. - pub fn spend(&self) -> EdwardsPoint { - self.0.spend() - } - - /// The public view key for this GuaranteedViewPair. - pub fn view(&self) -> EdwardsPoint { - self.0.view() - } - - /// Returns an address with the provided specification. - /// - /// The returned address will be a featured address with the guaranteed flag set. These should - /// not be presumed to be interoperable with any other software. - pub fn address( - &self, - network: Network, - subaddress: Option, - payment_id: Option<[u8; 8]>, - ) -> MoneroAddress { - let (spend, view) = if let Some(index) = subaddress { - self.0.subaddress_keys(index) - } else { - (self.spend(), self.view()) - }; - - MoneroAddress::new( - network, - AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed: true }, - spend, - view, - ) - } -} diff --git a/networks/monero/wallet/tests/add_data.rs b/networks/monero/wallet/tests/add_data.rs deleted file mode 100644 index bd600d53..00000000 --- a/networks/monero/wallet/tests/add_data.rs +++ /dev/null @@ -1,82 +0,0 @@ -use monero_serai::transaction::Transaction; -use monero_simple_request_rpc::SimpleRequestRpc; -use monero_wallet::{rpc::Rpc, extra::MAX_ARBITRARY_DATA_SIZE, send::SendError}; - -mod runner; - -#[allow(clippy::upper_case_acronyms)] -type SRR = SimpleRequestRpc; - -test!( - add_single_data_less_than_max, - ( - |_, mut builder: Builder, addr| async move { - let arbitrary_data = vec![b'\0'; MAX_ARBITRARY_DATA_SIZE - 1]; - - // make sure we can add to tx - builder.add_data(arbitrary_data.clone()).unwrap(); - - builder.add_payment(addr, 5); - (builder.build().unwrap(), (arbitrary_data,)) - }, - |_rpc: SRR, block, tx: Transaction, mut scanner: Scanner, data: (Vec,)| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.arbitrary_data()[0], data.0); - }, - ), -); - -test!( - add_multiple_data_less_than_max, - ( - |_, mut builder: Builder, addr| async move { - let mut data = vec![]; - for b in 1 ..= 3 { - data.push(vec![b; MAX_ARBITRARY_DATA_SIZE - 1]); - } - - // Add data multiple times - for data in &data { - builder.add_data(data.clone()).unwrap(); - } - - builder.add_payment(addr, 5); - (builder.build().unwrap(), data) - }, - |_rpc: SRR, block, tx: Transaction, mut scanner: Scanner, data: Vec>| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.arbitrary_data(), data); - }, - ), -); - -test!( - add_single_data_more_than_max, - ( - |_, mut builder: Builder, addr| async move { - // Make a data that is bigger than the maximum - let mut data = vec![b'a'; MAX_ARBITRARY_DATA_SIZE + 1]; - - // Make sure we get an error if we try to add it to the TX - assert_eq!(builder.add_data(data.clone()), Err(SendError::TooMuchArbitraryData)); - - // Reduce data size and retry. The data will now be 255 bytes long (including the added - // marker), exactly - data.pop(); - builder.add_data(data.clone()).unwrap(); - - builder.add_payment(addr, 5); - (builder.build().unwrap(), data) - }, - |_rpc: SRR, block, tx: Transaction, mut scanner: Scanner, data: Vec| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.arbitrary_data(), vec![data]); - }, - ), -); diff --git a/networks/monero/wallet/tests/decoys.rs b/networks/monero/wallet/tests/decoys.rs deleted file mode 100644 index 9200f7d6..00000000 --- a/networks/monero/wallet/tests/decoys.rs +++ /dev/null @@ -1,163 +0,0 @@ -use monero_simple_request_rpc::SimpleRequestRpc; -use monero_wallet::{ - DEFAULT_LOCK_WINDOW, - transaction::Transaction, - rpc::{Rpc, DecoyRpc}, - WalletOutput, -}; - -mod runner; - -test!( - select_latest_output_as_decoy_canonical, - ( - // First make an initial tx0 - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 2000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 2000000000000); - output - }, - ), - ( - // Then make a second tx1 - |rct_type: RctType, rpc: SimpleRequestRpc, mut builder: Builder, addr, state: _| async move { - let output_tx0: WalletOutput = state; - - let input = OutputWithDecoys::fingerprintable_deterministic_new( - &mut OsRng, - &rpc, - ring_len(rct_type), - rpc.get_height().await.unwrap(), - output_tx0.clone(), - ) - .await - .unwrap(); - builder.add_input(input); - builder.add_payment(addr, 1000000000000); - - (builder.build().unwrap(), (rct_type, output_tx0)) - }, - // Then make sure DSA selects freshly unlocked output from tx1 as a decoy - |rpc, _, tx: Transaction, _: Scanner, state: (_, _)| async move { - use rand_core::OsRng; - - let rpc: SimpleRequestRpc = rpc; - - let height = rpc.get_height().await.unwrap(); - - let most_recent_o_index = rpc.get_o_indexes(tx.hash()).await.unwrap().pop().unwrap(); - - // Make sure output from tx1 is in the block in which it unlocks - let out_tx1 = rpc.get_outs(&[most_recent_o_index]).await.unwrap().swap_remove(0); - assert_eq!(out_tx1.height, height - DEFAULT_LOCK_WINDOW); - assert!(out_tx1.unlocked); - - // Select decoys using spendable output from tx0 as the real, and make sure DSA selects - // the freshly unlocked output from tx1 as a decoy - let (rct_type, output_tx0): (RctType, WalletOutput) = state; - let mut selected_fresh_decoy = false; - let mut attempts = 1000; - while !selected_fresh_decoy && attempts > 0 { - let decoys = OutputWithDecoys::fingerprintable_deterministic_new( - &mut OsRng, // TODO: use a seeded RNG to consistently select the latest output - &rpc, - ring_len(rct_type), - height, - output_tx0.clone(), - ) - .await - .unwrap() - .decoys() - .clone(); - - selected_fresh_decoy = decoys.positions().contains(&most_recent_o_index); - attempts -= 1; - } - - assert!(selected_fresh_decoy); - assert_eq!(height, rpc.get_height().await.unwrap()); - }, - ), -); - -test!( - select_latest_output_as_decoy, - ( - // First make an initial tx0 - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 2000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 2000000000000); - output - }, - ), - ( - // Then make a second tx1 - |rct_type: RctType, rpc, mut builder: Builder, addr, output_tx0: WalletOutput| async move { - let rpc: SimpleRequestRpc = rpc; - - let input = OutputWithDecoys::new( - &mut OsRng, - &rpc, - ring_len(rct_type), - rpc.get_height().await.unwrap(), - output_tx0.clone(), - ) - .await - .unwrap(); - builder.add_input(input); - builder.add_payment(addr, 1000000000000); - - (builder.build().unwrap(), (rct_type, output_tx0)) - }, - // Then make sure DSA selects freshly unlocked output from tx1 as a decoy - |rpc, _, tx: Transaction, _: Scanner, state: (_, _)| async move { - use rand_core::OsRng; - - let rpc: SimpleRequestRpc = rpc; - - let height = rpc.get_height().await.unwrap(); - - let most_recent_o_index = rpc.get_o_indexes(tx.hash()).await.unwrap().pop().unwrap(); - - // Make sure output from tx1 is in the block in which it unlocks - let out_tx1 = rpc.get_outs(&[most_recent_o_index]).await.unwrap().swap_remove(0); - assert_eq!(out_tx1.height, height - DEFAULT_LOCK_WINDOW); - assert!(out_tx1.unlocked); - - // Select decoys using spendable output from tx0 as the real, and make sure DSA selects - // the freshly unlocked output from tx1 as a decoy - let (rct_type, output_tx0): (RctType, WalletOutput) = state; - let mut selected_fresh_decoy = false; - let mut attempts = 1000; - while !selected_fresh_decoy && attempts > 0 { - let decoys = OutputWithDecoys::new( - &mut OsRng, // TODO: use a seeded RNG to consistently select the latest output - &rpc, - ring_len(rct_type), - height, - output_tx0.clone(), - ) - .await - .unwrap() - .decoys() - .clone(); - - selected_fresh_decoy = decoys.positions().contains(&most_recent_o_index); - attempts -= 1; - } - - assert!(selected_fresh_decoy); - assert_eq!(height, rpc.get_height().await.unwrap()); - }, - ), -); diff --git a/networks/monero/wallet/tests/eventuality.rs b/networks/monero/wallet/tests/eventuality.rs deleted file mode 100644 index c9e1d9eb..00000000 --- a/networks/monero/wallet/tests/eventuality.rs +++ /dev/null @@ -1,80 +0,0 @@ -use curve25519_dalek::constants::ED25519_BASEPOINT_POINT; - -use monero_serai::transaction::Transaction; -use monero_wallet::{ - rpc::Rpc, - address::{AddressType, MoneroAddress}, -}; - -mod runner; - -test!( - eventuality, - ( - |_, mut builder: Builder, _| async move { - // Add a standard address, a payment ID address, a subaddress, and a guaranteed address - // Each have their own slight implications to eventualities - builder.add_payment( - MoneroAddress::new( - Network::Mainnet, - AddressType::Legacy, - ED25519_BASEPOINT_POINT, - ED25519_BASEPOINT_POINT, - ), - 1, - ); - builder.add_payment( - MoneroAddress::new( - Network::Mainnet, - AddressType::LegacyIntegrated([0xaa; 8]), - ED25519_BASEPOINT_POINT, - ED25519_BASEPOINT_POINT, - ), - 2, - ); - builder.add_payment( - MoneroAddress::new( - Network::Mainnet, - AddressType::Subaddress, - ED25519_BASEPOINT_POINT, - ED25519_BASEPOINT_POINT, - ), - 3, - ); - builder.add_payment( - MoneroAddress::new( - Network::Mainnet, - AddressType::Featured { subaddress: false, payment_id: None, guaranteed: true }, - ED25519_BASEPOINT_POINT, - ED25519_BASEPOINT_POINT, - ), - 4, - ); - let tx = builder.build().unwrap(); - let eventuality = Eventuality::from(tx.clone()); - assert_eq!( - eventuality, - Eventuality::read::<&[u8]>(&mut eventuality.serialize().as_ref()).unwrap() - ); - (tx, eventuality) - }, - |_, _, mut tx: Transaction, _, eventuality: Eventuality| async move { - // 4 explicitly outputs added and one change output - assert_eq!(tx.prefix().outputs.len(), 5); - - // The eventuality's available extra should be the actual TX's - assert_eq!(tx.prefix().extra, eventuality.extra()); - - // The TX should match - assert!(eventuality.matches(&tx.clone().into())); - - // Mutate the TX - let Transaction::V2 { proofs: Some(ref mut proofs), .. } = tx else { - panic!("TX wasn't RingCT") - }; - proofs.base.commitments[0] += ED25519_BASEPOINT_POINT; - // Verify it no longer matches - assert!(!eventuality.matches(&tx.clone().into())); - }, - ), -); diff --git a/networks/monero/wallet/tests/runner/builder.rs b/networks/monero/wallet/tests/runner/builder.rs deleted file mode 100644 index 7e2abe1e..00000000 --- a/networks/monero/wallet/tests/runner/builder.rs +++ /dev/null @@ -1,82 +0,0 @@ -use zeroize::{Zeroize, Zeroizing}; - -use monero_wallet::{ - ringct::RctType, - rpc::FeeRate, - address::MoneroAddress, - OutputWithDecoys, - send::{Change, SendError, SignableTransaction}, - extra::MAX_ARBITRARY_DATA_SIZE, -}; - -/// A builder for Monero transactions. -#[derive(Clone, PartialEq, Eq, Zeroize, Debug)] -pub struct SignableTransactionBuilder { - rct_type: RctType, - outgoing_view_key: Zeroizing<[u8; 32]>, - inputs: Vec, - payments: Vec<(MoneroAddress, u64)>, - change: Change, - data: Vec>, - fee_rate: FeeRate, -} - -impl SignableTransactionBuilder { - pub fn new( - rct_type: RctType, - outgoing_view_key: Zeroizing<[u8; 32]>, - change: Change, - fee_rate: FeeRate, - ) -> Self { - Self { - rct_type, - outgoing_view_key, - inputs: vec![], - payments: vec![], - change, - data: vec![], - fee_rate, - } - } - - pub fn add_input(&mut self, input: OutputWithDecoys) -> &mut Self { - self.inputs.push(input); - self - } - #[allow(unused)] - pub fn add_inputs(&mut self, inputs: &[OutputWithDecoys]) -> &mut Self { - self.inputs.extend(inputs.iter().cloned()); - self - } - - pub fn add_payment(&mut self, dest: MoneroAddress, amount: u64) -> &mut Self { - self.payments.push((dest, amount)); - self - } - #[allow(unused)] - pub fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) -> &mut Self { - self.payments.extend(payments); - self - } - - #[allow(unused)] - pub fn add_data(&mut self, data: Vec) -> Result<&mut Self, SendError> { - if data.len() > MAX_ARBITRARY_DATA_SIZE { - Err(SendError::TooMuchArbitraryData)?; - } - self.data.push(data); - Ok(self) - } - - pub fn build(self) -> Result { - SignableTransaction::new( - self.rct_type, - self.outgoing_view_key, - self.inputs, - self.payments, - self.change, - self.data, - self.fee_rate, - ) - } -} diff --git a/networks/monero/wallet/tests/runner/mod.rs b/networks/monero/wallet/tests/runner/mod.rs deleted file mode 100644 index 361e2f8c..00000000 --- a/networks/monero/wallet/tests/runner/mod.rs +++ /dev/null @@ -1,361 +0,0 @@ -use core::ops::Deref; -use std_shims::sync::LazyLock; - -use zeroize::Zeroizing; -use rand_core::OsRng; - -use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar}; - -use tokio::sync::Mutex; - -use monero_simple_request_rpc::SimpleRequestRpc; -use monero_wallet::{ - ringct::RctType, - transaction::Transaction, - block::Block, - rpc::{Rpc, FeeRate}, - address::{Network, AddressType, MoneroAddress}, - DEFAULT_LOCK_WINDOW, ViewPair, GuaranteedViewPair, WalletOutput, Scanner, -}; - -mod builder; -pub use builder::SignableTransactionBuilder; - -pub fn ring_len(rct_type: RctType) -> u8 { - match rct_type { - RctType::ClsagBulletproof => 11, - RctType::ClsagBulletproofPlus => 16, - _ => panic!("ring size unknown for RctType"), - } -} - -pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) { - let spend = Scalar::random(&mut OsRng); - let spend_pub = &spend * ED25519_BASEPOINT_TABLE; - let view = Zeroizing::new(Scalar::random(&mut OsRng)); - ( - spend, - ViewPair::new(spend_pub, view.clone()).unwrap(), - MoneroAddress::new( - Network::Mainnet, - AddressType::Legacy, - spend_pub, - view.deref() * ED25519_BASEPOINT_TABLE, - ), - ) -} - -#[allow(unused)] -pub fn random_guaranteed_address() -> (Scalar, GuaranteedViewPair, MoneroAddress) { - let spend = Scalar::random(&mut OsRng); - let spend_pub = &spend * ED25519_BASEPOINT_TABLE; - let view = Zeroizing::new(Scalar::random(&mut OsRng)); - ( - spend, - GuaranteedViewPair::new(spend_pub, view.clone()).unwrap(), - MoneroAddress::new( - Network::Mainnet, - AddressType::Legacy, - spend_pub, - view.deref() * ED25519_BASEPOINT_TABLE, - ), - ) -} - -// TODO: Support transactions already on-chain -// TODO: Don't have a side effect of mining blocks more blocks than needed under race conditions -pub async fn mine_until_unlocked( - rpc: &SimpleRequestRpc, - addr: &MoneroAddress, - tx_hash: [u8; 32], -) -> Block { - // mine until tx is in a block - let mut height = rpc.get_height().await.unwrap(); - let mut found = false; - let mut block = None; - while !found { - let inner_block = rpc.get_block_by_number(height - 1).await.unwrap(); - found = match inner_block.transactions.iter().find(|&&x| x == tx_hash) { - Some(_) => { - block = Some(inner_block); - true - } - None => { - height = rpc.generate_blocks(addr, 1).await.unwrap().1 + 1; - false - } - } - } - - // Mine until tx's outputs are unlocked - for _ in 0 .. (DEFAULT_LOCK_WINDOW - 1) { - rpc.generate_blocks(addr, 1).await.unwrap(); - } - - block.unwrap() -} - -// Mines 60 blocks and returns an unlocked miner TX output. -#[allow(dead_code)] -pub async fn get_miner_tx_output(rpc: &SimpleRequestRpc, view: &ViewPair) -> WalletOutput { - let mut scanner = Scanner::new(view.clone()); - - // Mine 60 blocks to unlock a miner TX - let start = rpc.get_height().await.unwrap(); - rpc.generate_blocks(&view.legacy_address(Network::Mainnet), 60).await.unwrap(); - - let block = rpc.get_block_by_number(start).await.unwrap(); - scanner - .scan(rpc.get_scannable_block(block).await.unwrap()) - .unwrap() - .ignore_additional_timelock() - .swap_remove(0) -} - -/// Make sure the weight and fee match the expected calculation. -pub fn check_weight_and_fee(tx: &Transaction, fee_rate: FeeRate) { - let Transaction::V2 { proofs: Some(ref proofs), .. } = tx else { panic!("TX wasn't RingCT") }; - let fee = proofs.base.fee; - - let weight = tx.weight(); - let expected_weight = fee_rate.calculate_weight_from_fee(fee).unwrap(); - assert_eq!(weight, expected_weight); - - let expected_fee = fee_rate.calculate_fee_from_weight(weight); - assert_eq!(fee, expected_fee); -} - -pub async fn rpc() -> SimpleRequestRpc { - let rpc = - SimpleRequestRpc::new("http://serai:seraidex@127.0.0.1:18081".to_string()).await.unwrap(); - - const BLOCKS_TO_MINE: usize = 110; - - // Only run once - if rpc.get_height().await.unwrap() > BLOCKS_TO_MINE { - return rpc; - } - - let addr = MoneroAddress::new( - Network::Mainnet, - AddressType::Legacy, - &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, - &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, - ); - - // Mine enough blocks to ensure decoy availability - rpc.generate_blocks(&addr, BLOCKS_TO_MINE).await.unwrap(); - - rpc -} - -pub(crate) static SEQUENTIAL: LazyLock> = LazyLock::new(|| Mutex::new(())); - -#[macro_export] -macro_rules! async_sequential { - ($(async fn $name: ident() $body: block)*) => { - $( - #[tokio::test] - async fn $name() { - let guard = runner::SEQUENTIAL.lock().await; - let local = tokio::task::LocalSet::new(); - local.run_until(async move { - if let Err(err) = tokio::task::spawn_local(async move { $body }).await { - drop(guard); - Err(err).unwrap() - } - }).await; - } - )* - } -} - -#[macro_export] -macro_rules! test { - ( - $name: ident, - ( - $first_tx: expr, - $first_checks: expr, - ), - $(( - $tx: expr, - $checks: expr, - )$(,)?),* - ) => { - async_sequential! { - async fn $name() { - use core::{ops::Deref, any::Any}; - #[cfg(feature = "multisig")] - use std::collections::HashMap; - - use zeroize::Zeroizing; - use rand_core::{RngCore, OsRng}; - - use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar}; - - #[cfg(feature = "multisig")] - use frost::{ - curve::Ed25519, - Participant, - tests::{THRESHOLD, key_gen}, - }; - - use monero_wallet::{ - ringct::RctType, - rpc::FeePriority, - address::Network, - ViewPair, Scanner, OutputWithDecoys, - send::{Change, SignableTransaction, Eventuality}, - }; - - use runner::{ - SignableTransactionBuilder, ring_len, random_address, rpc, mine_until_unlocked, - get_miner_tx_output, check_weight_and_fee, - }; - - type Builder = SignableTransactionBuilder; - - // Run each function as both a single signer and as a multisig - #[allow(clippy::redundant_closure_call)] - for multisig in [false, true] { - // Only run the multisig variant if multisig is enabled - if multisig { - #[cfg(not(feature = "multisig"))] - continue; - } - - let spend = Zeroizing::new(Scalar::random(&mut OsRng)); - #[cfg(feature = "multisig")] - let keys = key_gen::<_, Ed25519>(&mut OsRng); - - let spend_pub = if !multisig { - spend.deref() * ED25519_BASEPOINT_TABLE - } else { - #[cfg(not(feature = "multisig"))] - panic!("Multisig branch called without the multisig feature"); - #[cfg(feature = "multisig")] - keys[&Participant::new(1).unwrap()].group_key().0 - }; - - let rpc = rpc().await; - - let view_priv = Zeroizing::new(Scalar::random(&mut OsRng)); - let mut outgoing_view = Zeroizing::new([0; 32]); - OsRng.fill_bytes(outgoing_view.as_mut()); - let view = ViewPair::new(spend_pub, view_priv.clone()).unwrap(); - let addr = view.legacy_address(Network::Mainnet); - - let miner_tx = get_miner_tx_output(&rpc, &view).await; - - let rct_type = match rpc.get_hardfork_version().await.unwrap() { - 14 => RctType::ClsagBulletproof, - 15 | 16 => RctType::ClsagBulletproofPlus, - _ => panic!("unrecognized hardfork version"), - }; - - let builder = SignableTransactionBuilder::new( - rct_type, - outgoing_view, - Change::new( - ViewPair::new( - &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, - Zeroizing::new(Scalar::random(&mut OsRng)) - ).unwrap(), - None, - ), - rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(), - ); - - let sign = |tx: SignableTransaction| { - let spend = spend.clone(); - #[cfg(feature = "multisig")] - let keys = keys.clone(); - - assert_eq!(&SignableTransaction::read(&mut tx.serialize().as_slice()).unwrap(), &tx); - - let eventuality = Eventuality::from(tx.clone()); - - let tx = if !multisig { - tx.sign(&mut OsRng, &spend).unwrap() - } else { - #[cfg(not(feature = "multisig"))] - panic!("multisig branch called without the multisig feature"); - #[cfg(feature = "multisig")] - { - let mut machines = HashMap::new(); - for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) { - machines.insert(i, tx.clone().multisig(&keys[&i]).unwrap()); - } - - frost::tests::sign_without_caching(&mut OsRng, machines, &[]) - } - }; - - assert_eq!(&eventuality.extra(), &tx.prefix().extra, "eventuality extra was distinct"); - assert!(eventuality.matches(&tx.clone().into()), "eventuality didn't match"); - - tx - }; - - // TODO: Generate a distinct wallet for each transaction to prevent overlap - let next_addr = addr; - - let temp = Box::new({ - let mut builder = builder.clone(); - - let input = OutputWithDecoys::fingerprintable_deterministic_new( - &mut OsRng, - &rpc, - ring_len(rct_type), - rpc.get_height().await.unwrap(), - miner_tx, - ).await.unwrap(); - builder.add_input(input); - - let (tx, state) = ($first_tx)(rpc.clone(), builder, next_addr).await; - let fee_rate = tx.fee_rate().clone(); - let signed = sign(tx); - rpc.publish_transaction(&signed).await.unwrap(); - let block = - mine_until_unlocked(&rpc, &random_address().2, signed.hash()).await; - let block = rpc.get_scannable_block(block).await.unwrap(); - let tx = rpc.get_transaction(signed.hash()).await.unwrap(); - check_weight_and_fee(&tx, fee_rate); - let scanner = Scanner::new(view.clone()); - ($first_checks)(rpc.clone(), block, tx, scanner, state).await - }); - #[allow(unused_variables, unused_mut, unused_assignments)] - let mut carried_state: Box = temp; - - $( - let (tx, state) = ($tx)( - rct_type, - rpc.clone(), - builder.clone(), - next_addr, - *carried_state.downcast().unwrap() - ).await; - let fee_rate = tx.fee_rate().clone(); - let signed = sign(tx); - rpc.publish_transaction(&signed).await.unwrap(); - let block = - mine_until_unlocked(&rpc, &random_address().2, signed.hash()).await; - let block = rpc.get_scannable_block(block).await.unwrap(); - let tx = rpc.get_transaction(signed.hash()).await.unwrap(); - if stringify!($name) != "spend_one_input_to_two_outputs_no_change" { - // Skip weight and fee check for the above test because when there is no change, - // the change is added to the fee - check_weight_and_fee(&tx, fee_rate); - } - #[allow(unused_assignments)] - { - let scanner = Scanner::new(view.clone()); - carried_state = Box::new(($checks)(rpc.clone(), block, tx, scanner, state).await); - } - )* - } - } - } - } -} diff --git a/networks/monero/wallet/tests/scan.rs b/networks/monero/wallet/tests/scan.rs deleted file mode 100644 index 1eb7583b..00000000 --- a/networks/monero/wallet/tests/scan.rs +++ /dev/null @@ -1,160 +0,0 @@ -use monero_simple_request_rpc::SimpleRequestRpc; -use monero_wallet::{ - transaction::Transaction, rpc::Rpc, address::SubaddressIndex, extra::PaymentId, GuaranteedScanner, -}; - -mod runner; - -#[allow(clippy::upper_case_acronyms)] -type SRR = SimpleRequestRpc; -type Tx = Transaction; - -test!( - scan_standard_address, - ( - |_, mut builder: Builder, _| async move { - let view = runner::random_address().1; - let scanner = Scanner::new(view.clone()); - builder.add_payment(view.legacy_address(Network::Mainnet), 5); - (builder.build().unwrap(), scanner) - }, - |_rpc: SRR, block, tx: Transaction, _, mut state: Scanner| async move { - let output = state.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - let dummy_payment_id = PaymentId::Encrypted([0u8; 8]); - assert_eq!(output.payment_id(), Some(dummy_payment_id)); - }, - ), -); - -test!( - scan_subaddress, - ( - |_, mut builder: Builder, _| async move { - let subaddress = SubaddressIndex::new(0, 1).unwrap(); - - let view = runner::random_address().1; - let mut scanner = Scanner::new(view.clone()); - scanner.register_subaddress(subaddress); - - builder.add_payment(view.subaddress(Network::Mainnet, subaddress), 5); - (builder.build().unwrap(), (scanner, subaddress)) - }, - |_rpc: SRR, block, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move { - let output = state.0.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.subaddress(), Some(state.1)); - }, - ), -); - -test!( - scan_integrated_address, - ( - |_, mut builder: Builder, _| async move { - let view = runner::random_address().1; - let scanner = Scanner::new(view.clone()); - - let mut payment_id = [0u8; 8]; - OsRng.fill_bytes(&mut payment_id); - - builder.add_payment(view.legacy_integrated_address(Network::Mainnet, payment_id), 5); - (builder.build().unwrap(), (scanner, payment_id)) - }, - |_rpc: SRR, block, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move { - let output = state.0.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(state.1))); - }, - ), -); - -test!( - scan_guaranteed, - ( - |_, mut builder: Builder, _| async move { - let view = runner::random_guaranteed_address().1; - let scanner = GuaranteedScanner::new(view.clone()); - builder.add_payment(view.address(Network::Mainnet, None, None), 5); - (builder.build().unwrap(), scanner) - }, - |_rpc: SRR, block, tx: Transaction, _, mut scanner: GuaranteedScanner| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.subaddress(), None); - }, - ), -); - -test!( - scan_guaranteed_subaddress, - ( - |_, mut builder: Builder, _| async move { - let subaddress = SubaddressIndex::new(0, 2).unwrap(); - - let view = runner::random_guaranteed_address().1; - let mut scanner = GuaranteedScanner::new(view.clone()); - scanner.register_subaddress(subaddress); - - builder.add_payment(view.address(Network::Mainnet, Some(subaddress), None), 5); - (builder.build().unwrap(), (scanner, subaddress)) - }, - |_rpc: SRR, block, tx: Tx, _, mut state: (GuaranteedScanner, SubaddressIndex)| async move { - let output = state.0.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.subaddress(), Some(state.1)); - }, - ), -); - -test!( - scan_guaranteed_integrated, - ( - |_, mut builder: Builder, _| async move { - let view = runner::random_guaranteed_address().1; - let scanner = GuaranteedScanner::new(view.clone()); - let mut payment_id = [0u8; 8]; - OsRng.fill_bytes(&mut payment_id); - - builder.add_payment(view.address(Network::Mainnet, None, Some(payment_id)), 5); - (builder.build().unwrap(), (scanner, payment_id)) - }, - |_rpc: SRR, block, tx: Transaction, _, mut state: (GuaranteedScanner, [u8; 8])| async move { - let output = state.0.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(state.1))); - }, - ), -); - -test!( - scan_guaranteed_integrated_subaddress, - ( - |_, mut builder: Builder, _| async move { - let subaddress = SubaddressIndex::new(0, 3).unwrap(); - - let view = runner::random_guaranteed_address().1; - let mut scanner = GuaranteedScanner::new(view.clone()); - scanner.register_subaddress(subaddress); - - let mut payment_id = [0u8; 8]; - OsRng.fill_bytes(&mut payment_id); - - builder.add_payment(view.address(Network::Mainnet, Some(subaddress), Some(payment_id)), 5); - (builder.build().unwrap(), (scanner, payment_id, subaddress)) - }, - |_rpc, block, tx: Tx, _, mut state: (GuaranteedScanner, [u8; 8], SubaddressIndex)| async move { - let output = state.0.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(state.1))); - assert_eq!(output.subaddress(), Some(state.2)); - }, - ), -); diff --git a/networks/monero/wallet/tests/send.rs b/networks/monero/wallet/tests/send.rs deleted file mode 100644 index de225fe1..00000000 --- a/networks/monero/wallet/tests/send.rs +++ /dev/null @@ -1,401 +0,0 @@ -use std::collections::HashSet; - -use rand_core::OsRng; - -use monero_simple_request_rpc::SimpleRequestRpc; -use monero_wallet::{ - ringct::RctType, - transaction::Transaction, - rpc::{ScannableBlock, Rpc}, - address::SubaddressIndex, - extra::Extra, - WalletOutput, OutputWithDecoys, -}; - -mod runner; -use runner::{SignableTransactionBuilder, ring_len}; - -#[allow(clippy::upper_case_acronyms)] -type SRR = SimpleRequestRpc; -type SB = ScannableBlock; - -// Set up inputs, select decoys, then add them to the TX builder -async fn add_inputs( - rct_type: RctType, - rpc: &SimpleRequestRpc, - outputs: Vec, - builder: &mut SignableTransactionBuilder, -) { - for output in outputs { - builder.add_input( - OutputWithDecoys::fingerprintable_deterministic_new( - &mut OsRng, - rpc, - ring_len(rct_type), - rpc.get_height().await.unwrap(), - output, - ) - .await - .unwrap(), - ); - } -} - -test!( - spend_miner_output, - ( - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 5); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 5); - }, - ), -); - -test!( - spend_multiple_outputs, - ( - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 1000000000000); - builder.add_payment(addr, 2000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let mut outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 2); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].transaction(), tx.hash()); - outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount)); - assert_eq!(outputs[0].commitment().amount, 1000000000000); - assert_eq!(outputs[1].commitment().amount, 2000000000000); - outputs - }, - ), - ( - |rct_type: RctType, rpc, mut builder: Builder, addr, outputs: Vec| async move { - add_inputs(rct_type, &rpc, outputs, &mut builder).await; - builder.add_payment(addr, 6); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 6); - }, - ), -); - -test!( - // Ideally, this would be single_R, yet it isn't feasible to apply allow(non_snake_case) here - single_r_subaddress_send, - ( - // Consume this builder for an output we can use in the future - // This is needed because we can't get the input from the passed in builder - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 1000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 1); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].commitment().amount, 1000000000000); - outputs - }, - ), - ( - |rct_type, rpc: SimpleRequestRpc, _, _, outputs: Vec| async move { - use monero_wallet::rpc::FeePriority; - - let view_priv = Zeroizing::new(Scalar::random(&mut OsRng)); - let mut outgoing_view = Zeroizing::new([0; 32]); - OsRng.fill_bytes(outgoing_view.as_mut()); - let change_view = - ViewPair::new(&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, view_priv.clone()) - .unwrap(); - - let mut builder = SignableTransactionBuilder::new( - rct_type, - outgoing_view, - Change::new(change_view.clone(), None), - rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(), - ); - add_inputs(rct_type, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await; - - // Send to a subaddress - let sub_view = ViewPair::new( - &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, - Zeroizing::new(Scalar::random(&mut OsRng)), - ) - .unwrap(); - builder - .add_payment(sub_view.subaddress(Network::Mainnet, SubaddressIndex::new(0, 1).unwrap()), 1); - (builder.build().unwrap(), (change_view, sub_view)) - }, - |_rpc: SRR, block: SB, tx: Transaction, _, views: (ViewPair, ViewPair)| async move { - // Make sure the change can pick up its output - let mut change_scanner = Scanner::new(views.0); - assert!(change_scanner.scan(block.clone()).unwrap().not_additionally_locked().len() == 1); - - // Make sure the subaddress can pick up its output - let mut sub_scanner = Scanner::new(views.1); - sub_scanner.register_subaddress(SubaddressIndex::new(0, 1).unwrap()); - let sub_outputs = sub_scanner.scan(block).unwrap().not_additionally_locked(); - assert!(sub_outputs.len() == 1); - assert_eq!(sub_outputs[0].transaction(), tx.hash()); - assert_eq!(sub_outputs[0].commitment().amount, 1); - assert!(sub_outputs[0].subaddress().unwrap().account() == 0); - assert!(sub_outputs[0].subaddress().unwrap().address() == 1); - - // Make sure only one R was included in TX extra - assert!(Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()) - .unwrap() - .keys() - .unwrap() - .1 - .is_none()); - }, - ), -); - -test!( - spend_one_input_to_one_output_plus_change, - ( - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 2000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 1); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].commitment().amount, 2000000000000); - outputs - }, - ), - ( - |rct_type: RctType, rpc, mut builder: Builder, addr, outputs: Vec| async move { - add_inputs(rct_type, &rpc, outputs, &mut builder).await; - builder.add_payment(addr, 2); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - assert_eq!(output.commitment().amount, 2); - }, - ), -); - -test!( - spend_max_outputs, - ( - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 1000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 1); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].commitment().amount, 1000000000000); - outputs - }, - ), - ( - |rct_type: RctType, rpc, mut builder: Builder, addr, outputs: Vec| async move { - add_inputs(rct_type, &rpc, outputs, &mut builder).await; - - for i in 0 .. 15 { - builder.add_payment(addr, i + 1); - } - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let mut scanned_tx = scanner.scan(block).unwrap().not_additionally_locked(); - - let mut output_amounts = HashSet::new(); - for i in 0 .. 15 { - output_amounts.insert(i + 1); - } - for _ in 0 .. 15 { - let output = scanned_tx.swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - let amount = output.commitment().amount; - assert!(output_amounts.remove(&amount)); - } - assert_eq!(output_amounts.len(), 0); - }, - ), -); - -test!( - spend_max_outputs_to_subaddresses, - ( - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 1000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 1); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].commitment().amount, 1000000000000); - outputs - }, - ), - ( - |rct_type: RctType, rpc, mut builder: Builder, _, outputs: Vec| async move { - add_inputs(rct_type, &rpc, outputs, &mut builder).await; - - let view = runner::random_address().1; - let mut scanner = Scanner::new(view.clone()); - - let mut subaddresses = vec![]; - for i in 0 .. 15 { - let subaddress = SubaddressIndex::new(0, i + 1).unwrap(); - scanner.register_subaddress(subaddress); - - builder.add_payment(view.subaddress(Network::Mainnet, subaddress), u64::from(i + 1)); - subaddresses.push(subaddress); - } - - (builder.build().unwrap(), (scanner, subaddresses)) - }, - |_rpc: SimpleRequestRpc, - block, - tx: Transaction, - _, - mut state: (Scanner, Vec)| async move { - use std::collections::HashMap; - - let mut scanned_tx = state.0.scan(block).unwrap().not_additionally_locked(); - - let mut output_amounts_by_subaddress = HashMap::new(); - for i in 0 .. 15 { - output_amounts_by_subaddress.insert(u64::try_from(i + 1).unwrap(), state.1[i]); - } - for _ in 0 .. 15 { - let output = scanned_tx.swap_remove(0); - assert_eq!(output.transaction(), tx.hash()); - let amount = output.commitment().amount; - - assert_eq!( - output.subaddress().unwrap(), - output_amounts_by_subaddress.remove(&amount).unwrap() - ); - } - assert_eq!(output_amounts_by_subaddress.len(), 0); - }, - ), -); - -test!( - spend_one_input_to_two_outputs_no_change, - ( - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 1000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 1); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].commitment().amount, 1000000000000); - outputs - }, - ), - ( - |rct_type, rpc: SimpleRequestRpc, _, addr, outputs: Vec| async move { - use monero_wallet::rpc::FeePriority; - - let mut outgoing_view = Zeroizing::new([0; 32]); - OsRng.fill_bytes(outgoing_view.as_mut()); - let mut builder = SignableTransactionBuilder::new( - rct_type, - outgoing_view, - Change::fingerprintable(None), - rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(), - ); - add_inputs(rct_type, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await; - builder.add_payment(addr, 10000); - builder.add_payment(addr, 50000); - - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let mut outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 2); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[1].transaction(), tx.hash()); - outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount)); - assert_eq!(outputs[0].commitment().amount, 10000); - assert_eq!(outputs[1].commitment().amount, 50000); - - // The remainder should get shunted to fee, which is fingerprintable - let Transaction::V2 { proofs: Some(ref proofs), .. } = tx else { panic!("TX wasn't RingCT") }; - assert_eq!(proofs.base.fee, 1000000000000 - 10000 - 50000); - }, - ), -); - -test!( - subaddress_change, - ( - // Consume this builder for an output we can use in the future - // This is needed because we can't get the input from the passed in builder - |_, mut builder: Builder, addr| async move { - builder.add_payment(addr, 1000000000000); - (builder.build().unwrap(), ()) - }, - |_rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move { - let outputs = scanner.scan(block).unwrap().not_additionally_locked(); - assert_eq!(outputs.len(), 1); - assert_eq!(outputs[0].transaction(), tx.hash()); - assert_eq!(outputs[0].commitment().amount, 1000000000000); - outputs - }, - ), - ( - |rct_type, rpc: SimpleRequestRpc, _, _, outputs: Vec| async move { - use monero_wallet::rpc::FeePriority; - - let view_priv = Zeroizing::new(Scalar::random(&mut OsRng)); - let mut outgoing_view = Zeroizing::new([0; 32]); - OsRng.fill_bytes(outgoing_view.as_mut()); - let change_view = - ViewPair::new(&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, view_priv.clone()) - .unwrap(); - - let mut builder = SignableTransactionBuilder::new( - rct_type, - outgoing_view, - Change::new(change_view.clone(), Some(SubaddressIndex::new(0, 1).unwrap())), - rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(), - ); - add_inputs(rct_type, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await; - - // Send to a random address - let view = ViewPair::new( - &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, - Zeroizing::new(Scalar::random(&mut OsRng)), - ) - .unwrap(); - builder.add_payment(view.legacy_address(Network::Mainnet), 1); - (builder.build().unwrap(), change_view) - }, - |_rpc: SimpleRequestRpc, block, _, _, change_view: ViewPair| async move { - // Make sure the change can pick up its output - let mut change_scanner = Scanner::new(change_view); - change_scanner.register_subaddress(SubaddressIndex::new(0, 1).unwrap()); - let outputs = change_scanner.scan(block).unwrap().not_additionally_locked(); - assert!(outputs.len() == 1); - assert!(outputs[0].subaddress().unwrap().account() == 0); - assert!(outputs[0].subaddress().unwrap().address() == 1); - }, - ), -); diff --git a/networks/monero/wallet/tests/wallet2_compatibility.rs b/networks/monero/wallet/tests/wallet2_compatibility.rs deleted file mode 100644 index f9e2a5cd..00000000 --- a/networks/monero/wallet/tests/wallet2_compatibility.rs +++ /dev/null @@ -1,361 +0,0 @@ -use rand_core::{OsRng, RngCore}; - -use serde::Deserialize; -use serde_json::json; - -use monero_simple_request_rpc::SimpleRequestRpc; -use monero_wallet::{ - transaction::Transaction, - rpc::{FeePriority, Rpc}, - address::{Network, SubaddressIndex, MoneroAddress}, - extra::{MAX_ARBITRARY_DATA_SIZE, Extra, PaymentId}, - Scanner, -}; - -mod runner; - -#[derive(Clone, Copy, PartialEq, Eq)] -enum AddressSpec { - Legacy, - LegacyIntegrated([u8; 8]), - Subaddress(SubaddressIndex), -} - -#[derive(Deserialize, Debug)] -struct EmptyResponse {} - -async fn make_integrated_address(rpc: &SimpleRequestRpc, payment_id: [u8; 8]) -> String { - #[derive(Debug, Deserialize)] - struct IntegratedAddressResponse { - integrated_address: String, - } - - let res = rpc - .json_rpc_call::( - "make_integrated_address", - Some(json!({ "payment_id": hex::encode(payment_id) })), - ) - .await - .unwrap(); - - res.integrated_address -} - -async fn initialize_rpcs() -> (SimpleRequestRpc, SimpleRequestRpc, MoneroAddress) { - let wallet_rpc = SimpleRequestRpc::new("http://127.0.0.1:18082".to_string()).await.unwrap(); - let daemon_rpc = runner::rpc().await; - - #[derive(Debug, Deserialize)] - struct AddressResponse { - address: String, - } - - let mut wallet_id = [0; 8]; - OsRng.fill_bytes(&mut wallet_id); - let _: EmptyResponse = wallet_rpc - .json_rpc_call( - "create_wallet", - Some(json!({ "filename": hex::encode(wallet_id), "language": "English" })), - ) - .await - .unwrap(); - - let address: AddressResponse = - wallet_rpc.json_rpc_call("get_address", Some(json!({ "account_index": 0 }))).await.unwrap(); - - // Fund the new wallet - let address = MoneroAddress::from_str(Network::Mainnet, &address.address).unwrap(); - daemon_rpc.generate_blocks(&address, 70).await.unwrap(); - - (wallet_rpc, daemon_rpc, address) -} - -async fn from_wallet_rpc_to_self(spec: AddressSpec) { - // initialize rpc - let (wallet_rpc, daemon_rpc, wallet_rpc_addr) = initialize_rpcs().await; - - // make an addr - let (_, view_pair, _) = runner::random_address(); - let addr = match spec { - AddressSpec::Legacy => view_pair.legacy_address(Network::Mainnet), - AddressSpec::LegacyIntegrated(payment_id) => { - view_pair.legacy_integrated_address(Network::Mainnet, payment_id) - } - AddressSpec::Subaddress(index) => view_pair.subaddress(Network::Mainnet, index), - }; - - // refresh & make a tx - let _: EmptyResponse = wallet_rpc.json_rpc_call("refresh", None).await.unwrap(); - - #[derive(Debug, Deserialize)] - struct TransferResponse { - tx_hash: String, - } - let tx: TransferResponse = wallet_rpc - .json_rpc_call( - "transfer", - Some(json!({ - "destinations": [{"address": addr.to_string(), "amount": 1_000_000_000_000u64 }], - })), - ) - .await - .unwrap(); - let tx_hash = hex::decode(tx.tx_hash).unwrap().try_into().unwrap(); - - let fee_rate = daemon_rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(); - - // unlock it - let block = runner::mine_until_unlocked(&daemon_rpc, &wallet_rpc_addr, tx_hash).await; - let block = daemon_rpc.get_scannable_block(block).await.unwrap(); - - // Create the scanner - let mut scanner = Scanner::new(view_pair); - if let AddressSpec::Subaddress(index) = spec { - scanner.register_subaddress(index); - } - - // Retrieve it and scan it - let output = scanner.scan(block).unwrap().not_additionally_locked().swap_remove(0); - assert_eq!(output.transaction(), tx_hash); - - runner::check_weight_and_fee(&daemon_rpc.get_transaction(tx_hash).await.unwrap(), fee_rate); - - match spec { - AddressSpec::Subaddress(index) => { - assert_eq!(output.subaddress(), Some(index)); - assert_eq!(output.payment_id(), Some(PaymentId::Encrypted([0u8; 8]))); - } - AddressSpec::LegacyIntegrated(payment_id) => { - assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(payment_id))); - assert_eq!(output.subaddress(), None); - } - AddressSpec::Legacy => { - assert_eq!(output.subaddress(), None); - assert_eq!(output.payment_id(), Some(PaymentId::Encrypted([0u8; 8]))); - } - } - assert_eq!(output.commitment().amount, 1000000000000); -} - -async_sequential!( - async fn receipt_of_wallet_rpc_tx_standard() { - from_wallet_rpc_to_self(AddressSpec::Legacy).await; - } - - async fn receipt_of_wallet_rpc_tx_subaddress() { - from_wallet_rpc_to_self(AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap())).await; - } - - async fn receipt_of_wallet_rpc_tx_integrated() { - let mut payment_id = [0u8; 8]; - OsRng.fill_bytes(&mut payment_id); - from_wallet_rpc_to_self(AddressSpec::LegacyIntegrated(payment_id)).await; - } -); - -#[derive(PartialEq, Eq, Debug, Deserialize)] -struct Index { - major: u32, - minor: u32, -} - -#[derive(Debug, Deserialize)] -struct Transfer { - payment_id: String, - subaddr_index: Index, - amount: u64, -} - -#[derive(Debug, Deserialize)] -struct TransfersResponse { - transfer: Transfer, - transfers: Vec, -} - -test!( - send_to_wallet_rpc_standard, - ( - |_, mut builder: Builder, _| async move { - // initialize rpc - let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await; - - // add destination - builder.add_payment(wallet_rpc_addr, 1000000); - (builder.build().unwrap(), wallet_rpc) - }, - |_, _, tx: Transaction, _, data: SimpleRequestRpc| async move { - // confirm receipt - let _: EmptyResponse = data.json_rpc_call("refresh", None).await.unwrap(); - let transfer: TransfersResponse = data - .json_rpc_call("get_transfer_by_txid", Some(json!({ "txid": hex::encode(tx.hash()) }))) - .await - .unwrap(); - assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 }); - assert_eq!(transfer.transfer.amount, 1000000); - assert_eq!(transfer.transfer.payment_id, hex::encode([0u8; 8])); - }, - ), -); - -test!( - send_to_wallet_rpc_subaddress, - ( - |_, mut builder: Builder, _| async move { - // initialize rpc - let (wallet_rpc, _, _) = initialize_rpcs().await; - - // make the subaddress - #[derive(Debug, Deserialize)] - struct AccountResponse { - address: String, - account_index: u32, - } - let addr: AccountResponse = wallet_rpc.json_rpc_call("create_account", None).await.unwrap(); - assert!(addr.account_index != 0); - - builder - .add_payment(MoneroAddress::from_str(Network::Mainnet, &addr.address).unwrap(), 1000000); - (builder.build().unwrap(), (wallet_rpc, addr.account_index)) - }, - |_, _, tx: Transaction, _, data: (SimpleRequestRpc, u32)| async move { - // confirm receipt - let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap(); - let transfer: TransfersResponse = data - .0 - .json_rpc_call( - "get_transfer_by_txid", - Some(json!({ "txid": hex::encode(tx.hash()), "account_index": data.1 })), - ) - .await - .unwrap(); - assert_eq!(transfer.transfer.subaddr_index, Index { major: data.1, minor: 0 }); - assert_eq!(transfer.transfer.amount, 1000000); - assert_eq!(transfer.transfer.payment_id, hex::encode([0u8; 8])); - - // Make sure only one R was included in TX extra - assert!(Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()) - .unwrap() - .keys() - .unwrap() - .1 - .is_none()); - }, - ), -); - -test!( - send_to_wallet_rpc_subaddresses, - ( - |_, mut builder: Builder, _| async move { - // initialize rpc - let (wallet_rpc, daemon_rpc, _) = initialize_rpcs().await; - - // make the subaddress - #[derive(Debug, Deserialize)] - struct AddressesResponse { - addresses: Vec, - address_index: u32, - } - let addrs: AddressesResponse = wallet_rpc - .json_rpc_call("create_address", Some(json!({ "account_index": 0, "count": 2 }))) - .await - .unwrap(); - assert!(addrs.address_index != 0); - assert!(addrs.addresses.len() == 2); - - builder.add_payments(&[ - (MoneroAddress::from_str(Network::Mainnet, &addrs.addresses[0]).unwrap(), 1000000), - (MoneroAddress::from_str(Network::Mainnet, &addrs.addresses[1]).unwrap(), 2000000), - ]); - (builder.build().unwrap(), (wallet_rpc, daemon_rpc, addrs.address_index)) - }, - |_, _, tx: Transaction, _, data: (SimpleRequestRpc, SimpleRequestRpc, u32)| async move { - // confirm receipt - let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap(); - let transfer: TransfersResponse = data - .0 - .json_rpc_call( - "get_transfer_by_txid", - Some(json!({ "txid": hex::encode(tx.hash()), "account_index": 0 })), - ) - .await - .unwrap(); - - assert_eq!(transfer.transfers.len(), 2); - for t in transfer.transfers { - match t.amount { - 1000000 => assert_eq!(t.subaddr_index, Index { major: 0, minor: data.2 }), - 2000000 => assert_eq!(t.subaddr_index, Index { major: 0, minor: data.2 + 1 }), - _ => unreachable!(), - } - } - - // Make sure 3 additional pub keys are included in TX extra - let keys = - Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()).unwrap().keys().unwrap().1.unwrap(); - - assert_eq!(keys.len(), 3); - }, - ), -); - -test!( - send_to_wallet_rpc_integrated, - ( - |_, mut builder: Builder, _| async move { - // initialize rpc - let (wallet_rpc, _, _) = initialize_rpcs().await; - - // make the addr - let mut payment_id = [0u8; 8]; - OsRng.fill_bytes(&mut payment_id); - let addr = make_integrated_address(&wallet_rpc, payment_id).await; - - builder.add_payment(MoneroAddress::from_str(Network::Mainnet, &addr).unwrap(), 1000000); - (builder.build().unwrap(), (wallet_rpc, payment_id)) - }, - |_, _, tx: Transaction, _, data: (SimpleRequestRpc, [u8; 8])| async move { - // confirm receipt - let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap(); - let transfer: TransfersResponse = data - .0 - .json_rpc_call("get_transfer_by_txid", Some(json!({ "txid": hex::encode(tx.hash()) }))) - .await - .unwrap(); - assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 }); - assert_eq!(transfer.transfer.payment_id, hex::encode(data.1)); - assert_eq!(transfer.transfer.amount, 1000000); - }, - ), -); - -test!( - send_to_wallet_rpc_with_arb_data, - ( - |_, mut builder: Builder, _| async move { - // initialize rpc - let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await; - - // add destination - builder.add_payment(wallet_rpc_addr, 1000000); - - // Make 2 data that is the full 255 bytes - for _ in 0 .. 2 { - let data = vec![b'a'; MAX_ARBITRARY_DATA_SIZE]; - builder.add_data(data).unwrap(); - } - - (builder.build().unwrap(), wallet_rpc) - }, - |_, _, tx: Transaction, _, data: SimpleRequestRpc| async move { - // confirm receipt - let _: EmptyResponse = data.json_rpc_call("refresh", None).await.unwrap(); - let transfer: TransfersResponse = data - .json_rpc_call("get_transfer_by_txid", Some(json!({ "txid": hex::encode(tx.hash()) }))) - .await - .unwrap(); - assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 }); - assert_eq!(transfer.transfer.amount, 1000000); - }, - ), -); diff --git a/processor/Cargo.toml b/processor/Cargo.toml index 9d29bc7c..e881a85e 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -52,8 +52,8 @@ ethereum-serai = { path = "../networks/ethereum", default-features = false, opti # Monero dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"], optional = true } -monero-simple-request-rpc = { path = "../networks/monero/rpc/simple-request", default-features = false, optional = true } -monero-wallet = { path = "../networks/monero/wallet", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4", default-features = false, optional = true } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } # Application log = { version = "0.4", default-features = false, features = ["std"] } diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 2186b26c..80ca60eb 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -39,7 +39,7 @@ simple-request = { path = "../../common/request", version = "0.1", optional = tr bitcoin = { version = "0.32", optional = true } ciphersuite = { path = "../../crypto/ciphersuite", version = "0.4", optional = true } -monero-wallet = { path = "../../networks/monero/wallet", version = "0.1.0", default-features = false, features = ["std"], optional = true } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4", version = "0.1.0", default-features = false, features = ["std"], optional = true } [dev-dependencies] rand_core = "0.6" diff --git a/tests/full-stack/Cargo.toml b/tests/full-stack/Cargo.toml index 12af01bd..ddcbbbdc 100644 --- a/tests/full-stack/Cargo.toml +++ b/tests/full-stack/Cargo.toml @@ -27,8 +27,8 @@ rand_core = { version = "0.6", default-features = false } curve25519-dalek = { version = "4", features = ["rand_core"] } bitcoin-serai = { path = "../../networks/bitcoin" } -monero-simple-request-rpc = { path = "../../networks/monero/rpc/simple-request" } -monero-wallet = { path = "../../networks/monero/wallet" } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } scale = { package = "parity-scale-codec", version = "3" } serde = "1" diff --git a/tests/no-std/Cargo.toml b/tests/no-std/Cargo.toml index 16ca5d24..36ba85f4 100644 --- a/tests/no-std/Cargo.toml +++ b/tests/no-std/Cargo.toml @@ -34,5 +34,3 @@ dkg = { path = "../../crypto/dkg", default-features = false } # frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false } bitcoin-serai = { path = "../../networks/bitcoin", default-features = false, features = ["hazmat"] } - -monero-wallet = { path = "../../networks/monero/wallet", default-features = false, features = ["compile-time-generators"] } diff --git a/tests/no-std/src/lib.rs b/tests/no-std/src/lib.rs index f1824050..8339da2e 100644 --- a/tests/no-std/src/lib.rs +++ b/tests/no-std/src/lib.rs @@ -19,5 +19,3 @@ pub use frost_schnorrkel; */ pub use bitcoin_serai; - -pub use monero_wallet; diff --git a/tests/processor/Cargo.toml b/tests/processor/Cargo.toml index 8817b0c9..395bcad8 100644 --- a/tests/processor/Cargo.toml +++ b/tests/processor/Cargo.toml @@ -31,8 +31,8 @@ bitcoin-serai = { path = "../../networks/bitcoin" } k256 = "0.13" ethereum-serai = { path = "../../networks/ethereum" } -monero-simple-request-rpc = { path = "../../networks/monero/rpc/simple-request" } -monero-wallet = { path = "../../networks/monero/wallet" } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } messages = { package = "serai-processor-messages", path = "../../processor/messages" } From b743c9a43e4b5271ff856fbad87266f249e8801c Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 15:26:16 -0400 Subject: [PATCH 065/116] Update Rust version This causes the Serai node to compile and run again. --- .github/nightly-version | 2 +- .github/workflows/lint.yml | 2 +- Cargo.toml | 22 ++++++++++++++++++++-- deny.toml | 1 + rust-toolchain.toml | 2 +- 5 files changed, 24 insertions(+), 5 deletions(-) diff --git a/.github/nightly-version b/.github/nightly-version index e67d5713..45e51c4c 100644 --- a/.github/nightly-version +++ b/.github/nightly-version @@ -1 +1 @@ -nightly-2024-09-01 +nightly-2025-08-01 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index eae3c59a..fd63493c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -26,7 +26,7 @@ jobs: uses: ./.github/actions/build-dependencies - name: Install nightly rust - run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c clippy + run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c rust-src -c clippy - name: Run Clippy run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module diff --git a/Cargo.toml b/Cargo.toml index e0911b41..6361422d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -154,6 +154,7 @@ directories-next = { path = "patches/directories-next" } [workspace.lints.clippy] unwrap_or_default = "allow" +manual_is_multiple_of = "allow" borrow_as_ptr = "deny" cast_lossless = "deny" cast_possible_truncation = "deny" @@ -178,14 +179,14 @@ large_stack_arrays = "deny" linkedlist = "deny" macro_use_imports = "deny" manual_instant_elapsed = "deny" -manual_let_else = "deny" +# TODO manual_let_else = "deny" manual_ok_or = "deny" manual_string_new = "deny" map_unwrap_or = "deny" match_bool = "deny" match_same_arms = "deny" missing_fields_in_debug = "deny" -needless_continue = "deny" +# TODO needless_continue = "deny" needless_pass_by_value = "deny" ptr_cast_constness = "deny" range_minus_one = "deny" @@ -202,3 +203,20 @@ unnested_or_patterns = "deny" unused_async = "deny" unused_self = "deny" zero_sized_map_values = "deny" + +# TODO: These were incurred when updating Rust as necessary for compilation, yet aren't being fixed +# at this time due to the impacts it'd have throughout the repository (when this isn't actively the +# primary branch, `next` is) +needless_continue = "allow" +needless_lifetimes = "allow" +useless_conversion = "allow" +empty_line_after_doc_comments = "allow" +manual_div_ceil = "allow" +manual_let_else = "allow" +unnecessary_map_or = "allow" +result_large_err = "allow" +unneeded_struct_pattern = "allow" +[workspace.lints.rust] +mismatched_lifetime_syntaxes = "allow" +unused_attributes = "allow" +unused-parens = "allow" diff --git a/deny.toml b/deny.toml index 881761c8..cc2f2c3d 100644 --- a/deny.toml +++ b/deny.toml @@ -106,4 +106,5 @@ allow-git = [ "https://github.com/monero-oxide/monero-oxide", "https://github.com/serai-dex/substrate-bip39", "https://github.com/serai-dex/substrate", + "https://github.com/serai-dex/polkadot-sdk", ] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 7ed4c04e..cdd2b730 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.82" +channel = "1.89" targets = ["wasm32v1-none"] profile = "minimal" components = ["rust-src", "rustfmt", "clippy"] From 6c33e187451f0361e19ffa0fc2d66a9ebc8bfae2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 16:14:10 -0400 Subject: [PATCH 066/116] Explicitly install python3 to fix build-dependencies --- .github/actions/build-dependencies/action.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index 1c6c7f56..b34bb933 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -10,8 +10,10 @@ runs: sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" + + # The following removal command requires explicitly installing python/removing shim-signed + sudo apt install --fix-missing --fix-broken -y python3 python3.12 libpython3-stdlib sudo apt remove -y --allow-remove-essential -f shim-signed - # This command would fail, due to shim-signed having unmet dependencies, hence its removal sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" if: runner.os == 'Linux' From 078d6e51e5fd857ac2d598ef608371be0d9bd700 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 16:17:31 -0400 Subject: [PATCH 067/116] Re-install python3 after removal to solve unmet dependencies --- .github/actions/build-dependencies/action.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/actions/build-dependencies/action.yml b/.github/actions/build-dependencies/action.yml index b34bb933..c50ce6cf 100644 --- a/.github/actions/build-dependencies/action.yml +++ b/.github/actions/build-dependencies/action.yml @@ -11,10 +11,11 @@ runs: sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*" sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*" - # The following removal command requires explicitly installing python/removing shim-signed - sudo apt install --fix-missing --fix-broken -y python3 python3.12 libpython3-stdlib - sudo apt remove -y --allow-remove-essential -f shim-signed + sudo apt remove -y --allow-remove-essential -f shim-signed *python3* + # This removal command requires the prior removals due to unmet dependencies otherwise sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*" + # Reinstall python3 as a general dependency of a functional operating system + sudo apt install python3 if: runner.os == 'Linux' - name: Remove unused packages From 15a9cbef40282eb2d8f434207d5bde24b3745dcf Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 17:33:22 -0400 Subject: [PATCH 068/116] git checkout -f next ./crypto Proceeds to remove the eVRF DKG after, only keeping what's relevant to this branch alone. --- crypto/ciphersuite/Cargo.toml | 2 +- crypto/ciphersuite/src/dalek.rs | 6 + crypto/ciphersuite/src/ed448.rs | 6 + crypto/ciphersuite/src/kp256.rs | 18 ++- crypto/ciphersuite/src/lib.rs | 9 ++ crypto/dalek-ff-group/Cargo.toml | 2 +- crypto/dalek-ff-group/src/field.rs | 4 +- crypto/dkg/Cargo.toml | 6 +- crypto/dkg/src/encryption.rs | 214 ++++++++++++++++------------- crypto/dkg/src/lib.rs | 102 ++++++++++---- crypto/dkg/src/musig.rs | 43 ++---- crypto/dkg/src/pedpop.rs | 35 +++-- crypto/dkg/src/promote.rs | 1 + crypto/dkg/src/tests/mod.rs | 5 +- crypto/dkg/src/tests/pedpop.rs | 18 ++- crypto/dkg/src/tests/promote.rs | 4 + crypto/dleq/Cargo.toml | 6 +- crypto/dleq/src/cross_group/mod.rs | 2 +- crypto/dleq/src/lib.rs | 4 +- crypto/ed448/Cargo.toml | 2 +- crypto/ff-group-tests/src/group.rs | 8 +- crypto/frost/Cargo.toml | 2 +- crypto/frost/src/sign.rs | 18 ++- crypto/frost/src/tests/vectors.rs | 1 + crypto/multiexp/Cargo.toml | 2 +- crypto/multiexp/src/lib.rs | 2 +- crypto/schnorr/Cargo.toml | 2 +- crypto/schnorr/src/aggregate.rs | 3 +- crypto/schnorrkel/Cargo.toml | 2 +- crypto/transcript/Cargo.toml | 2 +- 30 files changed, 318 insertions(+), 213 deletions(-) diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 9fcf60a6..b666dbaa 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite authors = ["Luke Parker "] keywords = ["ciphersuite", "ff", "group"] edition = "2021" -rust-version = "1.74" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/ciphersuite/src/dalek.rs b/crypto/ciphersuite/src/dalek.rs index bd9c70c1..a04195b2 100644 --- a/crypto/ciphersuite/src/dalek.rs +++ b/crypto/ciphersuite/src/dalek.rs @@ -28,6 +28,12 @@ macro_rules! dalek_curve { $Point::generator() } + fn reduce_512(mut scalar: [u8; 64]) -> Self::F { + let res = Scalar::from_bytes_mod_order_wide(&scalar); + scalar.zeroize(); + res + } + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { Scalar::from_hash(Sha512::new_with_prefix(&[dst, data].concat())) } diff --git a/crypto/ciphersuite/src/ed448.rs b/crypto/ciphersuite/src/ed448.rs index 8a927251..0b19ffa5 100644 --- a/crypto/ciphersuite/src/ed448.rs +++ b/crypto/ciphersuite/src/ed448.rs @@ -66,6 +66,12 @@ impl Ciphersuite for Ed448 { Point::generator() } + fn reduce_512(mut scalar: [u8; 64]) -> Self::F { + let res = Self::hash_to_F(b"Ciphersuite-reduce_512", &scalar); + scalar.zeroize(); + res + } + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_ref().try_into().unwrap()) } diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index 37fdb2e4..a1f64ae4 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -6,7 +6,7 @@ use group::ff::PrimeField; use elliptic_curve::{ generic_array::GenericArray, - bigint::{NonZero, CheckedAdd, Encoding, U384}, + bigint::{NonZero, CheckedAdd, Encoding, U384, U512}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}, }; @@ -31,6 +31,22 @@ macro_rules! kp_curve { $lib::ProjectivePoint::GENERATOR } + fn reduce_512(scalar: [u8; 64]) -> Self::F { + let mut modulus = [0; 64]; + modulus[32 ..].copy_from_slice(&(Self::F::ZERO - Self::F::ONE).to_bytes()); + let modulus = U512::from_be_slice(&modulus).checked_add(&U512::ONE).unwrap(); + + let mut wide = + U512::from_be_bytes(scalar).rem(&NonZero::new(modulus).unwrap()).to_be_bytes(); + + let mut array = *GenericArray::from_slice(&wide[32 ..]); + let res = $lib::Scalar::from_repr(array).unwrap(); + + wide.zeroize(); + array.zeroize(); + res + } + fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { // While one of these two libraries does support directly hashing to the Scalar field, the // other doesn't. While that's probably an oversight, this is a universally working method diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index e5ea6645..6519a413 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -62,6 +62,12 @@ pub trait Ciphersuite: // While group does provide this in its API, privacy coins may want to use a custom basepoint fn generator() -> Self::G; + /// Reduce 512 bits into a uniform scalar. + /// + /// If 512 bits is insufficient to perform a reduction into a uniform scalar, the ciphersuite + /// will perform a hash to sample the necessary bits. + fn reduce_512(scalar: [u8; 64]) -> Self::F; + /// Hash the provided domain-separation tag and message to a scalar. Ciphersuites MAY naively /// prefix the tag to the message, enabling transpotion between the two. Accordingly, this /// function should NOT be used in any scheme where one tag is a valid substring of another @@ -99,6 +105,9 @@ pub trait Ciphersuite: } /// Read a canonical point from something implementing std::io::Read. + /// + /// The provided implementation is safe so long as `GroupEncoding::to_bytes` always returns a + /// canonical serialization. #[cfg(any(feature = "alloc", feature = "std"))] #[allow(non_snake_case)] fn read_G(reader: &mut R) -> io::Result { diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index 29b8806c..b41e1f4e 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-gr authors = ["Luke Parker "] keywords = ["curve25519", "ed25519", "ristretto", "dalek", "group"] edition = "2021" -rust-version = "1.66" +rust-version = "1.71" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 60c6c9ea..10ca67d9 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -35,7 +35,7 @@ impl_modulus!( type ResidueType = Residue; /// A constant-time implementation of the Ed25519 field. -#[derive(Clone, Copy, PartialEq, Eq, Default, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Default, Debug, Zeroize)] pub struct FieldElement(ResidueType); // Square root of -1. @@ -92,7 +92,7 @@ impl Neg for FieldElement { } } -impl<'a> Neg for &'a FieldElement { +impl Neg for &FieldElement { type Output = FieldElement; fn neg(self) -> Self::Output { (*self).neg() diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 7ed301f5..db54f218 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.79" +rust-version = "1.81" [package.metadata.docs.rs] all-features = true @@ -17,7 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] -thiserror = { version = "1", default-features = false, optional = true } +thiserror = { version = "2", default-features = false } rand_core = { version = "0.6", default-features = false } @@ -42,7 +42,7 @@ ciphersuite = { path = "../ciphersuite", default-features = false, features = [" [features] std = [ - "thiserror", + "thiserror/std", "rand_core/std", diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index 51cf6b06..1ad721f6 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -98,11 +98,11 @@ fn ecdh(private: &Zeroizing, public: C::G) -> Zeroizing(context: &str, ecdh: &Zeroizing) -> ChaCha20 { +fn cipher(context: [u8; 32], ecdh: &Zeroizing) -> ChaCha20 { // Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly // TODO: https://github.com/serai-dex/serai/issues/151 let mut transcript = RecommendedTranscript::new(b"DKG Encryption v0.2"); - transcript.append_message(b"context", context.as_bytes()); + transcript.append_message(b"context", context); transcript.domain_separate(b"encryption_key"); @@ -134,7 +134,7 @@ fn cipher(context: &str, ecdh: &Zeroizing) -> ChaCha20 { fn encrypt( rng: &mut R, - context: &str, + context: [u8; 32], from: Participant, to: C::G, mut msg: Zeroizing, @@ -197,7 +197,7 @@ impl EncryptedMessage { pub(crate) fn invalidate_msg( &mut self, rng: &mut R, - context: &str, + context: [u8; 32], from: Participant, ) { // Invalidate the message by specifying a new key/Schnorr PoP @@ -219,7 +219,7 @@ impl EncryptedMessage { pub(crate) fn invalidate_share_serialization( &mut self, rng: &mut R, - context: &str, + context: [u8; 32], from: Participant, to: C::G, ) { @@ -243,7 +243,7 @@ impl EncryptedMessage { pub(crate) fn invalidate_share_value( &mut self, rng: &mut R, - context: &str, + context: [u8; 32], from: Participant, to: C::G, ) { @@ -300,14 +300,14 @@ impl EncryptionKeyProof { // This still doesn't mean the DKG offers an authenticated channel. The per-message keys have no // root of trust other than their existence in the assumed-to-exist external authenticated channel. fn pop_challenge( - context: &str, + context: [u8; 32], nonce: C::G, key: C::G, sender: Participant, msg: &[u8], ) -> C::F { let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Proof of Possession v0.2"); - transcript.append_message(b"context", context.as_bytes()); + transcript.append_message(b"context", context); transcript.domain_separate(b"proof_of_possession"); @@ -323,9 +323,9 @@ fn pop_challenge( C::hash_to_F(b"DKG-encryption-proof_of_possession", &transcript.challenge(b"schnorr")) } -fn encryption_key_transcript(context: &str) -> RecommendedTranscript { +fn encryption_key_transcript(context: [u8; 32]) -> RecommendedTranscript { let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Correctness Proof v0.2"); - transcript.append_message(b"context", context.as_bytes()); + transcript.append_message(b"context", context); transcript } @@ -337,58 +337,17 @@ pub(crate) enum DecryptionError { InvalidProof, } -// A simple box for managing encryption. -#[derive(Clone)] -pub(crate) struct Encryption { - context: String, - i: Option, - enc_key: Zeroizing, - enc_pub_key: C::G, +// A simple box for managing decryption. +#[derive(Clone, Debug)] +pub(crate) struct Decryption { + context: [u8; 32], enc_keys: HashMap, } -impl fmt::Debug for Encryption { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt - .debug_struct("Encryption") - .field("context", &self.context) - .field("i", &self.i) - .field("enc_pub_key", &self.enc_pub_key) - .field("enc_keys", &self.enc_keys) - .finish_non_exhaustive() +impl Decryption { + pub(crate) fn new(context: [u8; 32]) -> Self { + Self { context, enc_keys: HashMap::new() } } -} - -impl Zeroize for Encryption { - fn zeroize(&mut self) { - self.enc_key.zeroize(); - self.enc_pub_key.zeroize(); - for (_, mut value) in self.enc_keys.drain() { - value.zeroize(); - } - } -} - -impl Encryption { - pub(crate) fn new( - context: String, - i: Option, - rng: &mut R, - ) -> Self { - let enc_key = Zeroizing::new(C::random_nonzero_F(rng)); - Self { - context, - i, - enc_pub_key: C::generator() * enc_key.deref(), - enc_key, - enc_keys: HashMap::new(), - } - } - - pub(crate) fn registration(&self, msg: M) -> EncryptionKeyMessage { - EncryptionKeyMessage { msg, enc_key: self.enc_pub_key } - } - pub(crate) fn register( &mut self, participant: Participant, @@ -402,13 +361,109 @@ impl Encryption { msg.msg } + // Given a message, and the intended decryptor, and a proof for its key, decrypt the message. + // Returns None if the key was wrong. + pub(crate) fn decrypt_with_proof( + &self, + from: Participant, + decryptor: Participant, + mut msg: EncryptedMessage, + // There's no encryption key proof if the accusation is of an invalid signature + proof: Option>, + ) -> Result, DecryptionError> { + if !msg.pop.verify( + msg.key, + pop_challenge::(self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), + ) { + Err(DecryptionError::InvalidSignature)?; + } + + if let Some(proof) = proof { + // Verify this is the decryption key for this message + proof + .dleq + .verify( + &mut encryption_key_transcript(self.context), + &[C::generator(), msg.key], + &[self.enc_keys[&decryptor], *proof.key], + ) + .map_err(|_| DecryptionError::InvalidProof)?; + + cipher::(self.context, &proof.key).apply_keystream(msg.msg.as_mut().as_mut()); + Ok(msg.msg) + } else { + Err(DecryptionError::InvalidProof) + } + } +} + +// A simple box for managing encryption. +#[derive(Clone)] +pub(crate) struct Encryption { + context: [u8; 32], + i: Participant, + enc_key: Zeroizing, + enc_pub_key: C::G, + decryption: Decryption, +} + +impl fmt::Debug for Encryption { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("Encryption") + .field("context", &self.context) + .field("i", &self.i) + .field("enc_pub_key", &self.enc_pub_key) + .field("decryption", &self.decryption) + .finish_non_exhaustive() + } +} + +impl Zeroize for Encryption { + fn zeroize(&mut self) { + self.enc_key.zeroize(); + self.enc_pub_key.zeroize(); + for (_, mut value) in self.decryption.enc_keys.drain() { + value.zeroize(); + } + } +} + +impl Encryption { + pub(crate) fn new( + context: [u8; 32], + i: Participant, + rng: &mut R, + ) -> Self { + let enc_key = Zeroizing::new(C::random_nonzero_F(rng)); + Self { + context, + i, + enc_pub_key: C::generator() * enc_key.deref(), + enc_key, + decryption: Decryption::new(context), + } + } + + pub(crate) fn registration(&self, msg: M) -> EncryptionKeyMessage { + EncryptionKeyMessage { msg, enc_key: self.enc_pub_key } + } + + pub(crate) fn register( + &mut self, + participant: Participant, + msg: EncryptionKeyMessage, + ) -> M { + self.decryption.register(participant, msg) + } + pub(crate) fn encrypt( &self, rng: &mut R, participant: Participant, msg: Zeroizing, ) -> EncryptedMessage { - encrypt(rng, &self.context, self.i.unwrap(), self.enc_keys[&participant], msg) + encrypt(rng, self.context, self.i, self.decryption.enc_keys[&participant], msg) } pub(crate) fn decrypt( @@ -426,18 +481,18 @@ impl Encryption { batch, batch_id, msg.key, - pop_challenge::(&self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), + pop_challenge::(self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), ); let key = ecdh::(&self.enc_key, msg.key); - cipher::(&self.context, &key).apply_keystream(msg.msg.as_mut().as_mut()); + cipher::(self.context, &key).apply_keystream(msg.msg.as_mut().as_mut()); ( msg.msg, EncryptionKeyProof { key, dleq: DLEqProof::prove( rng, - &mut encryption_key_transcript(&self.context), + &mut encryption_key_transcript(self.context), &[C::generator(), msg.key], &self.enc_key, ), @@ -445,38 +500,7 @@ impl Encryption { ) } - // Given a message, and the intended decryptor, and a proof for its key, decrypt the message. - // Returns None if the key was wrong. - pub(crate) fn decrypt_with_proof( - &self, - from: Participant, - decryptor: Participant, - mut msg: EncryptedMessage, - // There's no encryption key proof if the accusation is of an invalid signature - proof: Option>, - ) -> Result, DecryptionError> { - if !msg.pop.verify( - msg.key, - pop_challenge::(&self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), - ) { - Err(DecryptionError::InvalidSignature)?; - } - - if let Some(proof) = proof { - // Verify this is the decryption key for this message - proof - .dleq - .verify( - &mut encryption_key_transcript(&self.context), - &[C::generator(), msg.key], - &[self.enc_keys[&decryptor], *proof.key], - ) - .map_err(|_| DecryptionError::InvalidProof)?; - - cipher::(&self.context, &proof.key).apply_keystream(msg.msg.as_mut().as_mut()); - Ok(msg.msg) - } else { - Err(DecryptionError::InvalidProof) - } + pub(crate) fn into_decryption(self) -> Decryption { + self.decryption } } diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 478f400f..5bc6f101 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -4,7 +4,6 @@ use core::fmt::{self, Debug}; -#[cfg(feature = "std")] use thiserror::Error; use zeroize::Zeroize; @@ -63,8 +62,7 @@ impl fmt::Display for Participant { } /// Various errors possible during key generation. -#[derive(Clone, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "std", derive(Error))] +#[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum DkgError { /// A parameter was zero. #[cfg_attr(feature = "std", error("a parameter was 0 (threshold {0}, participants {1})"))] @@ -205,25 +203,37 @@ mod lib { } } - /// Calculate the lagrange coefficient for a signing set. - pub fn lagrange(i: Participant, included: &[Participant]) -> F { - let i_f = F::from(u64::from(u16::from(i))); + #[derive(Clone, PartialEq, Eq, Debug, Zeroize)] + pub(crate) enum Interpolation { + Constant(Vec), + Lagrange, + } - let mut num = F::ONE; - let mut denom = F::ONE; - for l in included { - if i == *l { - continue; + impl Interpolation { + pub(crate) fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F { + match self { + Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)], + Interpolation::Lagrange => { + let i_f = F::from(u64::from(u16::from(i))); + + let mut num = F::ONE; + let mut denom = F::ONE; + for l in included { + if i == *l { + continue; + } + + let share = F::from(u64::from(u16::from(*l))); + num *= share; + denom *= share - i_f; + } + + // Safe as this will only be 0 if we're part of the above loop + // (which we have an if case to avoid) + num * denom.invert().unwrap() + } } - - let share = F::from(u64::from(u16::from(*l))); - num *= share; - denom *= share - i_f; } - - // Safe as this will only be 0 if we're part of the above loop - // (which we have an if case to avoid) - num * denom.invert().unwrap() } /// Keys and verification shares generated by a DKG. @@ -232,6 +242,8 @@ mod lib { pub struct ThresholdCore { /// Threshold Parameters. pub(crate) params: ThresholdParams, + /// The interpolation method used. + pub(crate) interpolation: Interpolation, /// Secret share key. pub(crate) secret_share: Zeroizing, @@ -246,6 +258,7 @@ mod lib { fmt .debug_struct("ThresholdCore") .field("params", &self.params) + .field("interpolation", &self.interpolation) .field("group_key", &self.group_key) .field("verification_shares", &self.verification_shares) .finish_non_exhaustive() @@ -255,6 +268,7 @@ mod lib { impl Zeroize for ThresholdCore { fn zeroize(&mut self) { self.params.zeroize(); + self.interpolation.zeroize(); self.secret_share.zeroize(); self.group_key.zeroize(); for share in self.verification_shares.values_mut() { @@ -266,16 +280,14 @@ mod lib { impl ThresholdCore { pub(crate) fn new( params: ThresholdParams, + interpolation: Interpolation, secret_share: Zeroizing, verification_shares: HashMap, ) -> ThresholdCore { let t = (1 ..= params.t()).map(Participant).collect::>(); - ThresholdCore { - params, - secret_share, - group_key: t.iter().map(|i| verification_shares[i] * lagrange::(*i, &t)).sum(), - verification_shares, - } + let group_key = + t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum(); + ThresholdCore { params, interpolation, secret_share, group_key, verification_shares } } /// Parameters for these keys. @@ -304,6 +316,15 @@ mod lib { writer.write_all(&self.params.t.to_le_bytes())?; writer.write_all(&self.params.n.to_le_bytes())?; writer.write_all(&self.params.i.to_bytes())?; + match &self.interpolation { + Interpolation::Constant(c) => { + writer.write_all(&[0])?; + for c in c { + writer.write_all(c.to_repr().as_ref())?; + } + } + Interpolation::Lagrange => writer.write_all(&[1])?, + }; let mut share_bytes = self.secret_share.to_repr(); writer.write_all(share_bytes.as_ref())?; share_bytes.as_mut().zeroize(); @@ -352,6 +373,20 @@ mod lib { ) }; + let mut interpolation = [0]; + reader.read_exact(&mut interpolation)?; + let interpolation = match interpolation[0] { + 0 => Interpolation::Constant({ + let mut res = Vec::with_capacity(usize::from(n)); + for _ in 0 .. n { + res.push(C::read_F(reader)?); + } + res + }), + 1 => Interpolation::Lagrange, + _ => Err(io::Error::other("invalid interpolation method"))?, + }; + let secret_share = Zeroizing::new(C::read_F(reader)?); let mut verification_shares = HashMap::new(); @@ -361,6 +396,7 @@ mod lib { Ok(ThresholdCore::new( ThresholdParams::new(t, n, i).map_err(|_| io::Error::other("invalid parameters"))?, + interpolation, secret_share, verification_shares, )) @@ -383,6 +419,7 @@ mod lib { /// View of keys, interpolated and offset for usage. #[derive(Clone)] pub struct ThresholdView { + interpolation: Interpolation, offset: C::F, group_key: C::G, included: Vec, @@ -395,6 +432,7 @@ mod lib { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt .debug_struct("ThresholdView") + .field("interpolation", &self.interpolation) .field("offset", &self.offset) .field("group_key", &self.group_key) .field("included", &self.included) @@ -480,12 +518,13 @@ mod lib { included.sort(); let mut secret_share = Zeroizing::new( - lagrange::(self.params().i(), &included) * self.secret_share().deref(), + self.core.interpolation.interpolation_factor(self.params().i(), &included) * + self.secret_share().deref(), ); let mut verification_shares = self.verification_shares(); for (i, share) in &mut verification_shares { - *share *= lagrange::(*i, &included); + *share *= self.core.interpolation.interpolation_factor(*i, &included); } // The offset is included by adding it to the participant with the lowest ID @@ -496,6 +535,7 @@ mod lib { *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * offset; Ok(ThresholdView { + interpolation: self.core.interpolation.clone(), offset, group_key: self.group_key(), secret_share, @@ -528,6 +568,14 @@ mod lib { &self.included } + /// Return the interpolation factor for a signer. + pub fn interpolation_factor(&self, participant: Participant) -> Option { + if !self.included.contains(&participant) { + None? + } + Some(self.interpolation.interpolation_factor(participant, &self.included)) + } + /// Return the interpolated, offset secret share. pub fn secret_share(&self) -> &Zeroizing { &self.secret_share diff --git a/crypto/dkg/src/musig.rs b/crypto/dkg/src/musig.rs index 4d6b54c8..82843272 100644 --- a/crypto/dkg/src/musig.rs +++ b/crypto/dkg/src/musig.rs @@ -7,8 +7,6 @@ use std_shims::collections::HashMap; #[cfg(feature = "std")] use zeroize::Zeroizing; -#[cfg(feature = "std")] -use ciphersuite::group::ff::Field; use ciphersuite::{ group::{Group, GroupEncoding}, Ciphersuite, @@ -16,7 +14,7 @@ use ciphersuite::{ use crate::DkgError; #[cfg(feature = "std")] -use crate::{Participant, ThresholdParams, ThresholdCore, lagrange}; +use crate::{Participant, ThresholdParams, Interpolation, ThresholdCore}; fn check_keys(keys: &[C::G]) -> Result> { if keys.is_empty() { @@ -67,6 +65,7 @@ pub fn musig_key(context: &[u8], keys: &[C::G]) -> Result(context, keys)?; let mut res = C::G::identity(); for i in 1 ..= keys_len { + // TODO: Calculate this with a multiexp res += keys[usize::from(i - 1)] * binding_factor::(transcript.clone(), i); } Ok(res) @@ -104,38 +103,26 @@ pub fn musig( binding.push(binding_factor::(transcript.clone(), i)); } - // Multiply our private key by our binding factor - let mut secret_share = private_key.clone(); - *secret_share *= binding[pos]; + // Our secret share is our private key + let secret_share = private_key.clone(); // Calculate verification shares let mut verification_shares = HashMap::new(); - // When this library offers a ThresholdView for a specific signing set, it applies the lagrange - // factor - // Since this is a n-of-n scheme, there's only one possible signing set, and one possible - // lagrange factor - // In the name of simplicity, we define the group key as the sum of all bound keys - // Accordingly, the secret share must be multiplied by the inverse of the lagrange factor, along - // with all verification shares - // This is less performant than simply defining the group key as the sum of all post-lagrange - // bound keys, yet the simplicity is preferred - let included = (1 ..= keys_len) - // This error also shouldn't be possible, for the same reasons as documented above - .map(|l| Participant::new(l).ok_or(DkgError::InvalidSigningSet)) - .collect::, _>>()?; let mut group_key = C::G::identity(); - for (l, p) in included.iter().enumerate() { - let bound = keys[l] * binding[l]; - group_key += bound; + for l in 1 ..= keys_len { + let key = keys[usize::from(l) - 1]; + group_key += key * binding[usize::from(l - 1)]; - let lagrange_inv = lagrange::(*p, &included).invert().unwrap(); - if params.i() == *p { - *secret_share *= lagrange_inv; - } - verification_shares.insert(*p, bound * lagrange_inv); + // These errors also shouldn't be possible, for the same reasons as documented above + verification_shares.insert(Participant::new(l).ok_or(DkgError::InvalidSigningSet)?, key); } debug_assert_eq!(C::generator() * secret_share.deref(), verification_shares[¶ms.i()]); debug_assert_eq!(musig_key::(context, keys).unwrap(), group_key); - Ok(ThresholdCore { params, secret_share, group_key, verification_shares }) + Ok(ThresholdCore::new( + params, + Interpolation::Constant(binding), + secret_share, + verification_shares, + )) } diff --git a/crypto/dkg/src/pedpop.rs b/crypto/dkg/src/pedpop.rs index 1faeebe5..adfc6958 100644 --- a/crypto/dkg/src/pedpop.rs +++ b/crypto/dkg/src/pedpop.rs @@ -22,9 +22,9 @@ use multiexp::{multiexp_vartime, BatchVerifier}; use schnorr::SchnorrSignature; use crate::{ - Participant, DkgError, ThresholdParams, ThresholdCore, validate_map, + Participant, DkgError, ThresholdParams, Interpolation, ThresholdCore, validate_map, encryption::{ - ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, EncryptionKeyProof, + ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, Decryption, EncryptionKeyProof, DecryptionError, }, }; @@ -32,10 +32,10 @@ use crate::{ type FrostError = DkgError>; #[allow(non_snake_case)] -fn challenge(context: &str, l: Participant, R: &[u8], Am: &[u8]) -> C::F { +fn challenge(context: [u8; 32], l: Participant, R: &[u8], Am: &[u8]) -> C::F { let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2"); transcript.domain_separate(b"schnorr_proof_of_knowledge"); - transcript.append_message(b"context", context.as_bytes()); + transcript.append_message(b"context", context); transcript.append_message(b"participant", l.to_bytes()); transcript.append_message(b"nonce", R); transcript.append_message(b"commitments", Am); @@ -86,15 +86,15 @@ impl ReadWrite for Commitments { #[derive(Debug, Zeroize)] pub struct KeyGenMachine { params: ThresholdParams, - context: String, + context: [u8; 32], _curve: PhantomData, } impl KeyGenMachine { /// Create a new machine to generate a key. /// - /// The context string should be unique among multisigs. - pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine { + /// The context should be unique among multisigs. + pub fn new(params: ThresholdParams, context: [u8; 32]) -> KeyGenMachine { KeyGenMachine { params, context, _curve: PhantomData } } @@ -129,11 +129,11 @@ impl KeyGenMachine { // There's no reason to spend the time and effort to make this deterministic besides a // general obsession with canonicity and determinism though r, - challenge::(&self.context, self.params.i(), nonce.to_bytes().as_ref(), &cached_msg), + challenge::(self.context, self.params.i(), nonce.to_bytes().as_ref(), &cached_msg), ); // Additionally create an encryption mechanism to protect the secret shares - let encryption = Encryption::new(self.context.clone(), Some(self.params.i), rng); + let encryption = Encryption::new(self.context, self.params.i, rng); // Step 4: Broadcast let msg = @@ -225,7 +225,7 @@ impl ReadWrite for SecretShare { #[derive(Zeroize)] pub struct SecretShareMachine { params: ThresholdParams, - context: String, + context: [u8; 32], coefficients: Vec>, our_commitments: Vec, encryption: Encryption, @@ -274,7 +274,7 @@ impl SecretShareMachine { &mut batch, l, msg.commitments[0], - challenge::(&self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg), + challenge::(self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg), ); commitments.insert(l, msg.commitments.drain(..).collect::>()); @@ -472,9 +472,10 @@ impl KeyMachine { let KeyMachine { commitments, encryption, params, secret } = self; Ok(BlameMachine { commitments, - encryption, + encryption: encryption.into_decryption(), result: Some(ThresholdCore { params, + interpolation: Interpolation::Lagrange, secret_share: secret, group_key: stripes[0], verification_shares, @@ -486,7 +487,7 @@ impl KeyMachine { /// A machine capable of handling blame proofs. pub struct BlameMachine { commitments: HashMap>, - encryption: Encryption, + encryption: Decryption, result: Option>, } @@ -505,7 +506,6 @@ impl Zeroize for BlameMachine { for commitments in self.commitments.values_mut() { commitments.zeroize(); } - self.encryption.zeroize(); self.result.zeroize(); } } @@ -598,14 +598,13 @@ impl AdditionalBlameMachine { /// authenticated as having come from the supposed party and verified as valid. Usage of invalid /// commitments is considered undefined behavior, and may cause everything from inaccurate blame /// to panics. - pub fn new( - rng: &mut R, - context: String, + pub fn new( + context: [u8; 32], n: u16, mut commitment_msgs: HashMap>>, ) -> Result> { let mut commitments = HashMap::new(); - let mut encryption = Encryption::new(context, None, rng); + let mut encryption = Decryption::new(context); for i in 1 ..= n { let i = Participant::new(i).unwrap(); let Some(msg) = commitment_msgs.remove(&i) else { Err(DkgError::MissingParticipant(i))? }; diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/src/promote.rs index 7cad4f23..c8dcaed0 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/src/promote.rs @@ -113,6 +113,7 @@ impl> GeneratorPromotion< Ok(ThresholdKeys { core: Arc::new(ThresholdCore::new( params, + self.base.core.interpolation.clone(), self.base.secret_share().clone(), verification_shares, )), diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs index f21d7254..0078020a 100644 --- a/crypto/dkg/src/tests/mod.rs +++ b/crypto/dkg/src/tests/mod.rs @@ -6,7 +6,7 @@ use rand_core::{RngCore, CryptoRng}; use ciphersuite::{group::ff::Field, Ciphersuite}; -use crate::{Participant, ThresholdCore, ThresholdKeys, lagrange, musig::musig as musig_fn}; +use crate::{Participant, ThresholdCore, ThresholdKeys, musig::musig as musig_fn}; mod musig; pub use musig::test_musig; @@ -43,7 +43,8 @@ pub fn recover_key(keys: &HashMap> let included = keys.keys().copied().collect::>(); let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| { - accum + (lagrange::(*i, &included) * keys.secret_share().deref()) + accum + + (first.core.interpolation.interpolation_factor(*i, &included) * keys.secret_share().deref()) }); assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys"); group_private diff --git a/crypto/dkg/src/tests/pedpop.rs b/crypto/dkg/src/tests/pedpop.rs index 3ae383e3..42d7af67 100644 --- a/crypto/dkg/src/tests/pedpop.rs +++ b/crypto/dkg/src/tests/pedpop.rs @@ -14,7 +14,7 @@ use crate::{ type PedPoPEncryptedMessage = EncryptedMessage::F>>; type PedPoPSecretShares = HashMap>; -const CONTEXT: &str = "DKG Test Key Generation"; +const CONTEXT: [u8; 32] = *b"DKG Test Key Generation "; // Commit, then return commitment messages, enc keys, and shares #[allow(clippy::type_complexity)] @@ -31,7 +31,7 @@ fn commit_enc_keys_and_shares( let mut enc_keys = HashMap::new(); for i in (1 ..= PARTICIPANTS).map(Participant) { let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); - let machine = KeyGenMachine::::new(params, CONTEXT.to_string()); + let machine = KeyGenMachine::::new(params, CONTEXT); let (machine, these_commitments) = machine.generate_coefficients(rng); machines.insert(i, machine); @@ -147,14 +147,12 @@ mod literal { // Verify machines constructed with AdditionalBlameMachine::new work assert_eq!( - AdditionalBlameMachine::new( - &mut OsRng, - CONTEXT.to_string(), - PARTICIPANTS, - commitment_msgs.clone() - ) - .unwrap() - .blame(ONE, TWO, msg.clone(), blame.clone()), + AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame( + ONE, + TWO, + msg.clone(), + blame.clone() + ), ONE, ); } diff --git a/crypto/dkg/src/tests/promote.rs b/crypto/dkg/src/tests/promote.rs index 99c00433..242f085b 100644 --- a/crypto/dkg/src/tests/promote.rs +++ b/crypto/dkg/src/tests/promote.rs @@ -28,6 +28,10 @@ impl Ciphersuite for AltGenerator { C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") } + fn reduce_512(scalar: [u8; 64]) -> Self::F { + ::reduce_512(scalar) + } + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { ::hash_to_F(dst, data) } diff --git a/crypto/dleq/Cargo.toml b/crypto/dleq/Cargo.toml index fc25899f..a2b8ad9e 100644 --- a/crypto/dleq/Cargo.toml +++ b/crypto/dleq/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq" authors = ["Luke Parker "] edition = "2021" -rust-version = "1.79" +rust-version = "1.81" [package.metadata.docs.rs] all-features = true @@ -18,7 +18,7 @@ workspace = true [dependencies] rustversion = "1" -thiserror = { version = "1", optional = true } +thiserror = { version = "2", default-features = false, optional = true } rand_core = { version = "0.6", default-features = false } zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } @@ -44,7 +44,7 @@ dalek-ff-group = { path = "../dalek-ff-group" } transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"] } [features] -std = ["rand_core/std", "zeroize/std", "digest/std", "transcript/std", "ff/std", "multiexp?/std"] +std = ["thiserror?/std", "rand_core/std", "zeroize/std", "digest/std", "transcript/std", "ff/std", "multiexp?/std"] serialize = ["std"] # Needed for cross-group DLEqs diff --git a/crypto/dleq/src/cross_group/mod.rs b/crypto/dleq/src/cross_group/mod.rs index 8014ea9f..c530f60a 100644 --- a/crypto/dleq/src/cross_group/mod.rs +++ b/crypto/dleq/src/cross_group/mod.rs @@ -92,7 +92,7 @@ impl Generators { } /// Error for cross-group DLEq proofs. -#[derive(Error, PartialEq, Eq, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)] pub enum DLEqError { /// Invalid proof length. #[error("invalid proof length")] diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index a8958a2e..f6aed25a 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -37,11 +37,11 @@ pub(crate) fn challenge(transcript: &mut T) -> F { // Get a wide amount of bytes to safely reduce without bias // In most cases, <=1.5x bytes is enough. 2x is still standard and there's some theoretical // groups which may technically require more than 1.5x bytes for this to work as intended - let target_bytes = ((usize::try_from(F::NUM_BITS).unwrap() + 7) / 8) * 2; + let target_bytes = usize::try_from(F::NUM_BITS).unwrap().div_ceil(8) * 2; let mut challenge_bytes = transcript.challenge(b"challenge"); let challenge_bytes_len = challenge_bytes.as_ref().len(); // If the challenge is 32 bytes, and we need 64, we need two challenges - let needed_challenges = (target_bytes + (challenge_bytes_len - 1)) / challenge_bytes_len; + let needed_challenges = target_bytes.div_ceil(challenge_bytes_len); // The following algorithm should be equivalent to a wide reduction of the challenges, // interpreted as concatenated, big-endian byte string diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index b0d0026e..64c1b243 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ed448" authors = ["Luke Parker "] keywords = ["ed448", "ff", "group"] edition = "2021" -rust-version = "1.66" +rust-version = "1.71" [package.metadata.docs.rs] all-features = true diff --git a/crypto/ff-group-tests/src/group.rs b/crypto/ff-group-tests/src/group.rs index 0f0aab4e..f2b69acc 100644 --- a/crypto/ff-group-tests/src/group.rs +++ b/crypto/ff-group-tests/src/group.rs @@ -154,18 +154,20 @@ pub fn test_group(rng: &mut R) { /// Test encoding and decoding of group elements. pub fn test_encoding() { - let test = |point: G, msg| { + let test = |point: G, msg| -> G { let bytes = point.to_bytes(); let mut repr = G::Repr::default(); repr.as_mut().copy_from_slice(bytes.as_ref()); - assert_eq!(point, G::from_bytes(&repr).unwrap(), "{msg} couldn't be encoded and decoded"); + let decoded = G::from_bytes(&repr).unwrap(); + assert_eq!(point, decoded, "{msg} couldn't be encoded and decoded"); assert_eq!( point, G::from_bytes_unchecked(&repr).unwrap(), "{msg} couldn't be encoded and decoded", ); + decoded }; - test(G::identity(), "identity"); + assert!(bool::from(test(G::identity(), "identity").is_identity())); test(G::generator(), "generator"); test(G::generator() + G::generator(), "(generator * 2)"); } diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 29a974f2..1d030621 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -17,7 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] -thiserror = "1" +thiserror = { version = "2", default-features = false, features = ["std"] } rand_core = { version = "0.6", default-features = false, features = ["std"] } rand_chacha = { version = "0.3", default-features = false, features = ["std"] } diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 693960d5..ae567c87 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -203,14 +203,15 @@ pub trait SignMachine: Send + Sync + Sized { /// SignatureMachine this SignMachine turns into. type SignatureMachine: SignatureMachine; - /// Cache this preprocess for usage later. This cached preprocess MUST only be used once. Reuse - /// of it enables recovery of your private key share. Third-party recovery of a cached preprocess - /// also enables recovery of your private key share, so this MUST be treated with the same - /// security as your private key share. + /// Cache this preprocess for usage later. + /// + /// This cached preprocess MUST only be used once. Reuse of it enables recovery of your private + /// key share. Third-party recovery of a cached preprocess also enables recovery of your private + /// key share, so this MUST be treated with the same security as your private key share. fn cache(self) -> CachedPreprocess; /// Create a sign machine from a cached preprocess. - + /// /// After this, the preprocess must be deleted so it's never reused. Any reuse will presumably /// cause the signer to leak their secret share. fn from_cache( @@ -219,11 +220,14 @@ pub trait SignMachine: Send + Sync + Sized { cache: CachedPreprocess, ) -> (Self, Self::Preprocess); - /// Read a Preprocess message. Despite taking self, this does not save the preprocess. - /// It must be externally cached and passed into sign. + /// Read a Preprocess message. + /// + /// Despite taking self, this does not save the preprocess. It must be externally cached and + /// passed into sign. fn read_preprocess(&self, reader: &mut R) -> io::Result; /// Sign a message. + /// /// Takes in the participants' preprocess messages. Returns the signature share to be broadcast /// to all participants, over an authenticated channel. The parties who participate here will /// become the signing set for this session. diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 7be6478a..dc0453a1 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -122,6 +122,7 @@ fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap"] keywords = ["multiexp", "ff", "group"] edition = "2021" -rust-version = "1.79" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/multiexp/src/lib.rs b/crypto/multiexp/src/lib.rs index dfd8e033..604d0fd6 100644 --- a/crypto/multiexp/src/lib.rs +++ b/crypto/multiexp/src/lib.rs @@ -59,7 +59,7 @@ pub(crate) fn prep_bits>( for pair in pairs { let p = groupings.len(); let mut bits = pair.0.to_le_bits(); - groupings.push(vec![0; (bits.len() + (w_usize - 1)) / w_usize]); + groupings.push(vec![0; bits.len().div_ceil(w_usize)]); for (i, mut bit) in bits.iter_mut().enumerate() { let mut bit = u8_from_bool(&mut bit); diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 2ea04f5b..06a9710e 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr" authors = ["Luke Parker "] keywords = ["schnorr", "ff", "group"] edition = "2021" -rust-version = "1.79" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index d393c98e..cc0ac620 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -31,9 +31,8 @@ fn weight(digest: &mut DigestTran // Derive a scalar from enough bits of entropy that bias is < 2^128 // This can't be const due to its usage of a generic // Also due to the usize::try_from, yet that could be replaced with an `as` - // The + 7 forces it to round up #[allow(non_snake_case)] - let BYTES: usize = usize::try_from(((F::NUM_BITS + 128) + 7) / 8).unwrap(); + let BYTES: usize = usize::try_from((F::NUM_BITS + 128).div_ceil(8)).unwrap(); let mut remaining = BYTES; diff --git a/crypto/schnorrkel/Cargo.toml b/crypto/schnorrkel/Cargo.toml index 47717af5..70b96612 100644 --- a/crypto/schnorrkel/Cargo.toml +++ b/crypto/schnorrkel/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorrkel" authors = ["Luke Parker "] keywords = ["frost", "multisig", "threshold", "schnorrkel"] edition = "2021" -rust-version = "1.79" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/transcript/Cargo.toml b/crypto/transcript/Cargo.toml index 84e08abf..566ad56b 100644 --- a/crypto/transcript/Cargo.toml +++ b/crypto/transcript/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/transcript" authors = ["Luke Parker "] keywords = ["transcript"] edition = "2021" -rust-version = "1.79" +rust-version = "1.73" [package.metadata.docs.rs] all-features = true From f2563d39cbd560379411ad2393c127171a926789 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 20:37:03 -0400 Subject: [PATCH 069/116] Correct crypto MSRVs --- Cargo.lock | 6 +++--- common/zalloc/Cargo.toml | 2 +- crypto/dalek-ff-group/Cargo.toml | 2 +- crypto/dalek-ff-group/src/lib.rs | 16 ++++++++++++---- crypto/dkg/Cargo.toml | 2 +- crypto/dleq/Cargo.toml | 2 +- crypto/ed448/Cargo.toml | 2 +- 7 files changed, 20 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 799988d5..95a91959 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2222,7 +2222,7 @@ dependencies = [ "rand_core", "schnorr-signatures", "std-shims", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] @@ -2241,7 +2241,7 @@ dependencies = [ "multiexp", "rand_core", "rustversion", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] @@ -4838,7 +4838,7 @@ dependencies = [ "schnorr-signatures", "serde_json", "subtle", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] diff --git a/common/zalloc/Cargo.toml b/common/zalloc/Cargo.toml index af4e7c1c..88e59ec0 100644 --- a/common/zalloc/Cargo.toml +++ b/common/zalloc/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/common/zalloc" authors = ["Luke Parker "] keywords = [] edition = "2021" -rust-version = "1.77.0" +rust-version = "1.77" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index b41e1f4e..e5793c98 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-gr authors = ["Luke Parker "] keywords = ["curve25519", "ed25519", "ristretto", "dalek", "group"] edition = "2021" -rust-version = "1.71" +rust-version = "1.65" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index e6aad5b2..0e96bfa6 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -40,11 +40,19 @@ pub use field::FieldElement; // Use black_box when possible #[rustversion::since(1.66)] -use core::hint::black_box; -#[rustversion::before(1.66)] -fn black_box(val: T) -> T { - val +mod black_box { + pub(crate) fn black_box(val: T) -> T { + #[allow(clippy::incompatible_msrv)] + core::hint::black_box(val) + } } +#[rustversion::before(1.66)] +mod black_box { + pub(crate) fn black_box(val: T) -> T { + val + } +} +use black_box::black_box; fn u8_from_bool(bit_ref: &mut bool) -> u8 { let bit_ref = black_box(bit_ref); diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index db54f218..b144cb26 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.81" +rust-version = "1.80" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dleq/Cargo.toml b/crypto/dleq/Cargo.toml index a2b8ad9e..61e9a362 100644 --- a/crypto/dleq/Cargo.toml +++ b/crypto/dleq/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq" authors = ["Luke Parker "] edition = "2021" -rust-version = "1.81" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index 64c1b243..2302d7b3 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ed448" authors = ["Luke Parker "] keywords = ["ed448", "ff", "group"] edition = "2021" -rust-version = "1.71" +rust-version = "1.65" [package.metadata.docs.rs] all-features = true From 38dd8cb191a7330e6b99e2762be86265d500f68d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 21:15:59 -0400 Subject: [PATCH 070/116] Support taking arbitrary linear combinations of signing keys, not just additive offsets --- crypto/dkg/src/lib.rs | 92 +++++++++++++++++++++++++++------------ crypto/dkg/src/musig.rs | 1 + crypto/dkg/src/promote.rs | 8 +++- 3 files changed, 72 insertions(+), 29 deletions(-) diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 5bc6f101..b7dc4b17 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -412,14 +412,17 @@ mod lib { #[zeroize(skip)] pub(crate) core: Arc>, + // Scalar applied to these keys. + pub(crate) scalar: C::F, // Offset applied to these keys. - pub(crate) offset: Option, + pub(crate) offset: C::F, } - /// View of keys, interpolated and offset for usage. + /// View of keys, interpolated and with the expected linear combination taken for usage. #[derive(Clone)] pub struct ThresholdView { interpolation: Interpolation, + scalar: C::F, offset: C::F, group_key: C::G, included: Vec, @@ -433,6 +436,7 @@ mod lib { fmt .debug_struct("ThresholdView") .field("interpolation", &self.interpolation) + .field("scalar", &self.scalar) .field("offset", &self.offset) .field("group_key", &self.group_key) .field("included", &self.included) @@ -444,6 +448,7 @@ mod lib { impl Zeroize for ThresholdView { fn zeroize(&mut self) { + self.scalar.zeroize(); self.offset.zeroize(); self.group_key.zeroize(); self.included.zeroize(); @@ -460,25 +465,42 @@ mod lib { impl ThresholdKeys { /// Create a new set of ThresholdKeys from a ThresholdCore. pub fn new(core: ThresholdCore) -> ThresholdKeys { - ThresholdKeys { core: Arc::new(core), offset: None } + ThresholdKeys { core: Arc::new(core), scalar: C::F::ONE, offset: C::F::ZERO } + } + + /// Scale the keys by a given scalar to allow for various account and privacy schemes. + /// + /// This scalar is ephemeral and will not be included when these keys are serialized. The + /// scalar is applied on top of any already-existing scalar/offset. + /// + /// Returns `None` if the scalar is equal to `0`. + #[must_use] + pub fn scale(mut self, scalar: C::F) -> Option> { + if bool::from(scalar.is_zero()) { + None?; + } + self.scalar *= scalar; + self.offset *= scalar; + Some(self) } /// Offset the keys by a given scalar to allow for various account and privacy schemes. /// - /// This offset is ephemeral and will not be included when these keys are serialized. It also - /// accumulates, so calling offset multiple times will produce a offset of the offsets' sum. + /// This offset is ephemeral and will not be included when these keys are serialized. The + /// offset is applied on top of any already-existing scalar/offset. #[must_use] - pub fn offset(&self, offset: C::F) -> ThresholdKeys { - let mut res = self.clone(); - // Carry any existing offset - // Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a - // one-time-key offset - res.offset = Some(offset + res.offset.unwrap_or(C::F::ZERO)); - res + pub fn offset(mut self, offset: C::F) -> ThresholdKeys { + self.offset += offset; + self + } + + /// Return the current scalar in-use for these keys. + pub fn current_scalar(&self) -> C::F { + self.scalar } /// Return the current offset in-use for these keys. - pub fn current_offset(&self) -> Option { + pub fn current_offset(&self) -> C::F { self.offset } @@ -492,9 +514,9 @@ mod lib { &self.core.secret_share } - /// Return the group key, with any offset applied. + /// Return the group key, with the expected linear combination taken. pub fn group_key(&self) -> C::G { - self.core.group_key + (C::generator() * self.offset.unwrap_or(C::F::ZERO)) + (self.core.group_key * self.scalar) + (C::generator() * self.offset) } /// Return all participants' verification shares without any offsetting. @@ -507,8 +529,8 @@ mod lib { self.core.serialize() } - /// Obtain a view of these keys, with any offset applied, interpolated for the specified signing - /// set. + /// Obtain a view of these keys, interpolated for the specified signing set, with the specified + /// linear combination taken. pub fn view(&self, mut included: Vec) -> Result, DkgError<()>> { if (included.len() < self.params().t.into()) || (usize::from(self.params().n()) < included.len()) @@ -517,26 +539,36 @@ mod lib { } included.sort(); + // The interpolation occurs multiplicatively, letting us scale by the scalar now + let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref()); let mut secret_share = Zeroizing::new( self.core.interpolation.interpolation_factor(self.params().i(), &included) * - self.secret_share().deref(), + secret_share_scaled.deref(), ); let mut verification_shares = self.verification_shares(); for (i, share) in &mut verification_shares { - *share *= self.core.interpolation.interpolation_factor(*i, &included); + *share *= self.scalar * self.core.interpolation.interpolation_factor(*i, &included); } - // The offset is included by adding it to the participant with the lowest ID - let offset = self.offset.unwrap_or(C::F::ZERO); + /* + The offset is included by adding it to the participant with the lowest ID. + + This is done after interpolating to ensure, regardless of the method of interpolation, that + the method of interpolation does not scale the offset. For Lagrange interpolation, we could + add the offset to every key share before interpolating, yet for Constant interpolation, we + _have_ to add it as we do here (which also works even when we intend to perform Lagrange + interpolation). + */ if included[0] == self.params().i() { - *secret_share += offset; + *secret_share += self.offset; } - *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * offset; + *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset; Ok(ThresholdView { interpolation: self.core.interpolation.clone(), - offset, + scalar: self.scalar, + offset: self.offset, group_key: self.group_key(), secret_share, original_verification_shares: self.verification_shares(), @@ -553,7 +585,12 @@ mod lib { } impl ThresholdView { - /// Return the offset for this view. + /// Return the scalar applied to this view. + pub fn scalar(&self) -> C::F { + self.scalar + } + + /// Return the offset applied to this view. pub fn offset(&self) -> C::F { self.offset } @@ -576,7 +613,7 @@ mod lib { Some(self.interpolation.interpolation_factor(participant, &self.included)) } - /// Return the interpolated, offset secret share. + /// Return the interpolated secret share, with the expected linear combination taken. pub fn secret_share(&self) -> &Zeroizing { &self.secret_share } @@ -586,7 +623,8 @@ mod lib { self.original_verification_shares[&l] } - /// Return the interpolated, offset verification share for the specified participant. + /// Return the interpolated verification share, with the expected linear combination taken, + /// for the specified participant. pub fn verification_share(&self, l: Participant) -> C::G { self.verification_shares[&l] } diff --git a/crypto/dkg/src/musig.rs b/crypto/dkg/src/musig.rs index 82843272..82a755db 100644 --- a/crypto/dkg/src/musig.rs +++ b/crypto/dkg/src/musig.rs @@ -111,6 +111,7 @@ pub fn musig( let mut group_key = C::G::identity(); for l in 1 ..= keys_len { let key = keys[usize::from(l) - 1]; + // TODO: Use a multiexp for this group_key += key * binding[usize::from(l - 1)]; // These errors also shouldn't be possible, for the same reasons as documented above diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/src/promote.rs index c8dcaed0..d92b382d 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/src/promote.rs @@ -7,7 +7,10 @@ use std::{ use rand_core::{RngCore, CryptoRng}; -use ciphersuite::{group::GroupEncoding, Ciphersuite}; +use ciphersuite::{ + group::{ff::Field, GroupEncoding}, + Ciphersuite, +}; use transcript::{Transcript, RecommendedTranscript}; use dleq::DLEqProof; @@ -117,7 +120,8 @@ impl> GeneratorPromotion< self.base.secret_share().clone(), verification_shares, )), - offset: None, + scalar: C2::F::ONE, + offset: C2::F::ZERO, }) } } From 3919cf55aef3e73f2ef439a6a08b4772072c7064 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 15 Aug 2025 21:16:23 -0400 Subject: [PATCH 071/116] Extend modular-frost to test with scaled and offset keys The transcript transcripted the group key _plus_ the offset, when it should've only transcripted the group key as the declared group key already had the offset applied. This has been fixed. --- crypto/frost/src/sign.rs | 7 +------ crypto/frost/src/tests/mod.rs | 5 +++-- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index ae567c87..4f5f59bd 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -357,12 +357,7 @@ impl> SignMachine for AlgorithmSignMachi // Re-format into the FROST-expected rho transcript let mut rho_transcript = A::Transcript::new(b"FROST_rho"); - rho_transcript.append_message( - b"group_key", - (self.params.keys.group_key() + - (C::generator() * self.params.keys.current_offset().unwrap_or(C::F::ZERO))) - .to_bytes(), - ); + rho_transcript.append_message(b"group_key", self.params.keys.group_key().to_bytes()); rho_transcript.append_message(b"message", C::hash_msg(msg)); rho_transcript.append_message( b"preprocesses", diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index db6553aa..2bb9e3ea 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -251,10 +251,11 @@ pub fn test_offset_schnorr>(rng: &m let mut keys = key_gen(&mut *rng); let group_key = keys[&Participant::new(1).unwrap()].group_key(); + let scalar = C::F::from(3); let offset = C::F::from(5); - let offset_key = group_key + (C::generator() * offset); + let offset_key = (group_key * scalar) + (C::generator() * offset); for keys in keys.values_mut() { - *keys = keys.offset(offset); + *keys = keys.clone().scale(scalar).unwrap().offset(offset); assert_eq!(keys.group_key(), offset_key); } From 9f84adf8b36308823c62d95e5bfc35f81287020d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 01:24:40 -0400 Subject: [PATCH 072/116] Smash dkg into dkg, dkg-[recovery, promote, musig, pedpop] promote and pedpop require dleq, which don't support no-std. All three should be moved outside the Serai repository, per #597, as none are planned for use and worth covering under our BBP. --- .github/workflows/crypto-tests.yml | 4 + Cargo.lock | 54 +- Cargo.toml | 4 + crypto/dkg/Cargo.toml | 30 +- crypto/dkg/LICENSE | 2 +- crypto/dkg/README.md | 22 +- crypto/dkg/musig/Cargo.toml | 49 + crypto/dkg/musig/LICENSE | 21 + crypto/dkg/musig/README.md | 12 + crypto/dkg/musig/src/lib.rs | 162 +++ crypto/dkg/musig/src/tests.rs | 70 + crypto/dkg/pedpop/Cargo.toml | 37 + crypto/dkg/pedpop/LICENSE | 21 + crypto/dkg/pedpop/README.md | 12 + crypto/dkg/{ => pedpop}/src/encryption.rs | 4 +- .../dkg/{src/pedpop.rs => pedpop/src/lib.rs} | 128 +- crypto/dkg/pedpop/src/tests.rs | 345 +++++ crypto/dkg/promote/Cargo.toml | 34 + crypto/dkg/promote/LICENSE | 21 + crypto/dkg/promote/README.md | 12 + .../{src/promote.rs => promote/src/lib.rs} | 99 +- crypto/dkg/promote/src/tests.rs | 113 ++ crypto/dkg/recovery/Cargo.toml | 34 + crypto/dkg/recovery/LICENSE | 21 + crypto/dkg/recovery/README.md | 14 + crypto/dkg/recovery/src/lib.rs | 85 ++ crypto/dkg/src/lib.rs | 1163 +++++++++-------- crypto/dkg/src/musig.rs | 129 -- crypto/dkg/src/tests/mod.rs | 102 -- crypto/dkg/src/tests/musig.rs | 61 - crypto/dkg/src/tests/pedpop.rs | 331 ----- crypto/dkg/src/tests/promote.rs | 66 - crypto/frost/Cargo.toml | 6 +- tests/no-std/Cargo.toml | 2 + tests/no-std/src/lib.rs | 2 + 35 files changed, 1910 insertions(+), 1362 deletions(-) create mode 100644 crypto/dkg/musig/Cargo.toml create mode 100644 crypto/dkg/musig/LICENSE create mode 100644 crypto/dkg/musig/README.md create mode 100644 crypto/dkg/musig/src/lib.rs create mode 100644 crypto/dkg/musig/src/tests.rs create mode 100644 crypto/dkg/pedpop/Cargo.toml create mode 100644 crypto/dkg/pedpop/LICENSE create mode 100644 crypto/dkg/pedpop/README.md rename crypto/dkg/{ => pedpop}/src/encryption.rs (99%) rename crypto/dkg/{src/pedpop.rs => pedpop/src/lib.rs} (86%) create mode 100644 crypto/dkg/pedpop/src/tests.rs create mode 100644 crypto/dkg/promote/Cargo.toml create mode 100644 crypto/dkg/promote/LICENSE create mode 100644 crypto/dkg/promote/README.md rename crypto/dkg/{src/promote.rs => promote/src/lib.rs} (54%) create mode 100644 crypto/dkg/promote/src/tests.rs create mode 100644 crypto/dkg/recovery/Cargo.toml create mode 100644 crypto/dkg/recovery/LICENSE create mode 100644 crypto/dkg/recovery/README.md create mode 100644 crypto/dkg/recovery/src/lib.rs delete mode 100644 crypto/dkg/src/musig.rs delete mode 100644 crypto/dkg/src/tests/mod.rs delete mode 100644 crypto/dkg/src/tests/musig.rs delete mode 100644 crypto/dkg/src/tests/pedpop.rs delete mode 100644 crypto/dkg/src/tests/promote.rs diff --git a/.github/workflows/crypto-tests.yml b/.github/workflows/crypto-tests.yml index d9d1df08..2e853e71 100644 --- a/.github/workflows/crypto-tests.yml +++ b/.github/workflows/crypto-tests.yml @@ -36,5 +36,9 @@ jobs: -p schnorr-signatures \ -p dleq \ -p dkg \ + -p dkg-recovery \ + -p dkg-promote \ + -p dkg-musig \ + -p dkg-pedpop \ -p modular-frost \ -p frost-schnorrkel diff --git a/Cargo.lock b/Cargo.lock index 95a91959..e4009f87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2211,17 +2211,65 @@ dependencies = [ [[package]] name = "dkg" -version = "0.5.1" +version = "0.6.0" dependencies = [ "borsh", + "ciphersuite", + "std-shims", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-musig" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", + "dkg-recovery", + "multiexp", + "rand_core", + "std-shims", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-pedpop" +version = "0.6.0" +dependencies = [ "chacha20", "ciphersuite", + "dkg", "dleq", "flexible-transcript", "multiexp", "rand_core", "schnorr-signatures", - "std-shims", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-promote" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", + "dkg-recovery", + "dleq", + "flexible-transcript", + "rand_core", + "thiserror 2.0.14", + "zeroize", +] + +[[package]] +name = "dkg-recovery" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", "thiserror 2.0.14", "zeroize", ] @@ -8324,6 +8372,8 @@ dependencies = [ "ciphersuite", "dalek-ff-group", "dkg", + "dkg-musig", + "dkg-recovery", "dleq", "flexible-transcript", "minimal-ed448", diff --git a/Cargo.toml b/Cargo.toml index 6361422d..d1b1862e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,10 @@ members = [ "crypto/schnorr", "crypto/dleq", "crypto/dkg", + "crypto/dkg/recovery", + "crypto/dkg/promote", + "crypto/dkg/musig", + "crypto/dkg/pedpop", "crypto/frost", "crypto/schnorrkel", diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index b144cb26..51dc9162 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dkg" -version = "0.5.1" +version = "0.6.0" description = "Distributed key generation over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" @@ -17,50 +17,28 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] +zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive", "alloc"] } + thiserror = { version = "2", default-features = false } -rand_core = { version = "0.6", default-features = false } - -zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } - std-shims = { version = "0.1", path = "../../common/std-shims", default-features = false } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true } -transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false, features = ["recommended"] } -chacha20 = { version = "0.9", default-features = false, features = ["zeroize"] } - -ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false } -multiexp = { path = "../multiexp", version = "0.4", default-features = false } - -schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false } -dleq = { path = "../dleq", version = "^0.4.1", default-features = false } +ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } [dev-dependencies] -rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] } [features] std = [ "thiserror/std", - "rand_core/std", - "std-shims/std", "borsh?/std", - "transcript/std", - "chacha20/std", - "ciphersuite/std", - "multiexp/std", - "multiexp/batch", - - "schnorr/std", - "dleq/std", - "dleq/serialize" ] borsh = ["dep:borsh"] -tests = ["rand_core/getrandom"] default = ["std"] diff --git a/crypto/dkg/LICENSE b/crypto/dkg/LICENSE index be67c32f..6f7adff3 100644 --- a/crypto/dkg/LICENSE +++ b/crypto/dkg/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2021-2023 Luke Parker +Copyright (c) 2021-2025 Luke Parker Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/crypto/dkg/README.md b/crypto/dkg/README.md index 27e3412a..eaad6ed5 100644 --- a/crypto/dkg/README.md +++ b/crypto/dkg/README.md @@ -1,16 +1,14 @@ # Distributed Key Generation -A collection of implementations of various distributed key generation protocols. +A crate implementing a type for keys, presumably the result of a distributed key generation +protocol, and utilities from there. -All included protocols resolve into the provided `Threshold` types, intended to -enable their modularity. Additional utilities around these types, such as -promotion from one generator to another, are also provided. +This crate used to host implementations of distributed key generation protocols as well (hence the +name). Those have been smashed into their own crates, such as +[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop) -Currently, the only included protocol is the two-round protocol from the -[FROST paper](https://eprint.iacr.org/2020/852). - -This library was -[audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf), -culminating in commit -[669d2dbffc1dafb82a09d9419ea182667115df06](https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06). -Any subsequent changes have not undergone auditing. +Before being smashed, this crate was [audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/musig/Cargo.toml b/crypto/dkg/musig/Cargo.toml new file mode 100644 index 00000000..e2a971e7 --- /dev/null +++ b/crypto/dkg/musig/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "dkg-musig" +version = "0.6.0" +description = "The MuSig key aggregation protocol" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/musig" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +thiserror = { version = "2", default-features = false } + +rand_core = { version = "0.6", default-features = false } + +zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } + +std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false } + +multiexp = { path = "../../multiexp", version = "0.4", default-features = false } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } +dkg = { path = "../", default-features = false } + +[dev-dependencies] +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] } + +[features] +std = [ + "thiserror/std", + + "rand_core/std", + + "std-shims/std", + + "multiexp/std", + "ciphersuite/std", + "dkg/std", +] +default = ["std"] diff --git a/crypto/dkg/musig/LICENSE b/crypto/dkg/musig/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/musig/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/musig/README.md b/crypto/dkg/musig/README.md new file mode 100644 index 00000000..9720e6f0 --- /dev/null +++ b/crypto/dkg/musig/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation - MuSig + +This implements the MuSig key aggregation protocol for the [`dkg`](https://docs.rs/dkg) crate's +types. + +This crate was originally part of the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit +[669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/musig/src/lib.rs b/crypto/dkg/musig/src/lib.rs new file mode 100644 index 00000000..36f4fd31 --- /dev/null +++ b/crypto/dkg/musig/src/lib.rs @@ -0,0 +1,162 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +#![cfg_attr(not(feature = "std"), no_std)] + +use core::ops::Deref; +use std_shims::{ + vec, + vec::Vec, + collections::{HashSet, HashMap}, +}; + +use zeroize::Zeroizing; + +use ciphersuite::{group::GroupEncoding, Ciphersuite}; + +pub use dkg::*; + +#[cfg(test)] +mod tests; + +/// Errors encountered when working with threshold keys. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum MusigError { + /// No keys were provided. + #[error("no keys provided")] + NoKeysProvided, + /// Too many keys were provided. + #[error("too many keys (allowed {max}, provided {provided})")] + TooManyKeysProvided { + /// The maximum amount of keys allowed. + max: u16, + /// The amount of keys provided. + provided: usize, + }, + /// A participant was duplicated. + #[error("a participant was duplicated")] + DuplicatedParticipant(C::G), + /// Participating, yet our public key wasn't found in the list of keys. + #[error("private key's public key wasn't present in the list of public keys")] + NotPresent, + /// An error propagated from the underlying `dkg` crate. + #[error("error from dkg ({0})")] + DkgError(DkgError), +} + +fn check_keys(keys: &[C::G]) -> Result> { + if keys.is_empty() { + Err(MusigError::NoKeysProvided)?; + } + + let keys_len = u16::try_from(keys.len()) + .map_err(|_| MusigError::TooManyKeysProvided { max: u16::MAX, provided: keys.len() })?; + + let mut set = HashSet::with_capacity(keys.len()); + for key in keys { + let bytes = key.to_bytes().as_ref().to_vec(); + if !set.insert(bytes) { + Err(MusigError::DuplicatedParticipant(*key))?; + } + } + + Ok(keys_len) +} + +fn binding_factor_transcript( + context: [u8; 32], + keys_len: u16, + keys: &[C::G], +) -> Vec { + debug_assert_eq!(usize::from(keys_len), keys.len()); + + let mut transcript = vec![]; + transcript.extend(&context); + transcript.extend(keys_len.to_le_bytes()); + for key in keys { + transcript.extend(key.to_bytes().as_ref()); + } + transcript +} + +fn binding_factor(mut transcript: Vec, i: u16) -> C::F { + transcript.extend(i.to_le_bytes()); + C::hash_to_F(b"dkg-musig", &transcript) +} + +#[allow(clippy::type_complexity)] +fn musig_key_multiexp( + context: [u8; 32], + keys: &[C::G], +) -> Result, MusigError> { + let keys_len = check_keys::(keys)?; + let transcript = binding_factor_transcript::(context, keys_len, keys); + let mut multiexp = Vec::with_capacity(keys.len()); + for i in 1 ..= keys_len { + multiexp.push((binding_factor::(transcript.clone(), i), keys[usize::from(i - 1)])); + } + Ok(multiexp) +} + +/// The group key resulting from using this library's MuSig key aggregation. +/// +/// This function executes in variable time and MUST NOT be used with secret data. +pub fn musig_key_vartime( + context: [u8; 32], + keys: &[C::G], +) -> Result> { + Ok(multiexp::multiexp_vartime(&musig_key_multiexp(context, keys)?)) +} + +/// The group key resulting from using this library's MuSig key aggregation. +pub fn musig_key(context: [u8; 32], keys: &[C::G]) -> Result> { + Ok(multiexp::multiexp(&musig_key_multiexp(context, keys)?)) +} + +/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key. +pub fn musig( + context: [u8; 32], + private_key: Zeroizing, + keys: &[C::G], +) -> Result, MusigError> { + let our_pub_key = C::generator() * private_key.deref(); + let Some(our_i) = keys.iter().position(|key| *key == our_pub_key) else { + Err(MusigError::DkgError(DkgError::NotParticipating))? + }; + + let keys_len: u16 = check_keys::(keys)?; + + let params = ThresholdParams::new( + keys_len, + keys_len, + // The `+ 1` won't fail as `keys.len() <= u16::MAX`, so any index is `< u16::MAX` + Participant::new( + u16::try_from(our_i).expect("keys.len() <= u16::MAX yet index of keys > u16::MAX?") + 1, + ) + .expect("i + 1 != 0"), + ) + .map_err(MusigError::DkgError)?; + + let transcript = binding_factor_transcript::(context, keys_len, keys); + let mut binding_factors = Vec::with_capacity(keys.len()); + let mut multiexp = Vec::with_capacity(keys.len()); + let mut verification_shares = HashMap::with_capacity(keys.len()); + for (i, key) in (1 ..= keys_len).zip(keys.iter().copied()) { + let binding_factor = binding_factor::(transcript.clone(), i); + binding_factors.push(binding_factor); + multiexp.push((binding_factor, key)); + + let i = Participant::new(i).expect("non-zero u16 wasn't a valid Participant index?"); + verification_shares.insert(i, key); + } + let group_key = multiexp::multiexp(&multiexp); + debug_assert_eq!(our_pub_key, verification_shares[¶ms.i()]); + debug_assert_eq!(musig_key_vartime::(context, keys).unwrap(), group_key); + + ThresholdKeys::new( + params, + Interpolation::Constant(binding_factors), + private_key, + verification_shares, + ) + .map_err(MusigError::DkgError) +} diff --git a/crypto/dkg/musig/src/tests.rs b/crypto/dkg/musig/src/tests.rs new file mode 100644 index 00000000..a48dda68 --- /dev/null +++ b/crypto/dkg/musig/src/tests.rs @@ -0,0 +1,70 @@ +use std::collections::HashMap; + +use zeroize::Zeroizing; +use rand_core::OsRng; + +use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto}; + +use dkg_recovery::recover_key; +use crate::*; + +/// Tests MuSig key generation. +#[test] +pub fn test_musig() { + const PARTICIPANTS: u16 = 5; + + let mut keys = vec![]; + let mut pub_keys = vec![]; + for _ in 0 .. PARTICIPANTS { + let key = Zeroizing::new(::F::random(&mut OsRng)); + pub_keys.push(::generator() * *key); + keys.push(key); + } + + const CONTEXT: [u8; 32] = *b"MuSig Test "; + + // Empty signing set + musig::(CONTEXT, Zeroizing::new(::F::ZERO), &[]) + .unwrap_err(); + // Signing set we're not part of + musig::( + CONTEXT, + Zeroizing::new(::F::ZERO), + &[::generator()], + ) + .unwrap_err(); + + // Test with n keys + { + let mut created_keys = HashMap::new(); + let mut verification_shares = HashMap::new(); + let group_key = musig_key::(CONTEXT, &pub_keys).unwrap(); + for (i, key) in keys.iter().enumerate() { + let these_keys = musig::(CONTEXT, key.clone(), &pub_keys).unwrap(); + assert_eq!(these_keys.params().t(), PARTICIPANTS); + assert_eq!(these_keys.params().n(), PARTICIPANTS); + assert_eq!(usize::from(u16::from(these_keys.params().i())), i + 1); + + verification_shares.insert( + these_keys.params().i(), + ::generator() * **these_keys.secret_share(), + ); + + assert_eq!(these_keys.group_key(), group_key); + + created_keys.insert(these_keys.params().i(), these_keys); + } + + for keys in created_keys.values() { + for (l, verification_share) in &verification_shares { + assert_eq!(keys.original_verification_share(*l), *verification_share); + } + } + + assert_eq!( + ::generator() * + *recover_key(&created_keys.values().cloned().collect::>()).unwrap(), + group_key + ); + } +} diff --git a/crypto/dkg/pedpop/Cargo.toml b/crypto/dkg/pedpop/Cargo.toml new file mode 100644 index 00000000..cfc128d1 --- /dev/null +++ b/crypto/dkg/pedpop/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "dkg-pedpop" +version = "0.6.0" +description = "The PedPoP distributed key generation protocol" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/pedpop" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +thiserror = { version = "2", default-features = false, features = ["std"] } + +zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } +rand_core = { version = "0.6", default-features = false, features = ["std"] } + +transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] } +chacha20 = { version = "0.9", default-features = false, features = ["std", "zeroize"] } + +multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["std"] } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } +schnorr = { package = "schnorr-signatures", path = "../../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } +dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } + +dkg = { path = "../", default-features = false, features = ["std"] } + +[dev-dependencies] +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } diff --git a/crypto/dkg/pedpop/LICENSE b/crypto/dkg/pedpop/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/pedpop/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/pedpop/README.md b/crypto/dkg/pedpop/README.md new file mode 100644 index 00000000..4ff801e0 --- /dev/null +++ b/crypto/dkg/pedpop/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation - PedPoP + +This implements the PedPoP distributed key generation protocol for the [`dkg`](https://docs.rs/dkg) +crate's types. + +This crate was originally part of the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit +[669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/pedpop/src/encryption.rs similarity index 99% rename from crypto/dkg/src/encryption.rs rename to crypto/dkg/pedpop/src/encryption.rs index 1ad721f6..ceec4b31 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/pedpop/src/encryption.rs @@ -21,7 +21,7 @@ use multiexp::BatchVerifier; use schnorr::SchnorrSignature; use dleq::DLEqProof; -use crate::{Participant, ThresholdParams}; +use dkg::{Participant, ThresholdParams}; mod sealed { use super::*; @@ -69,7 +69,7 @@ impl EncryptionKeyMessage { buf } - #[cfg(any(test, feature = "tests"))] + #[cfg(test)] pub(crate) fn enc_key(&self) -> C::G { self.enc_key } diff --git a/crypto/dkg/src/pedpop.rs b/crypto/dkg/pedpop/src/lib.rs similarity index 86% rename from crypto/dkg/src/pedpop.rs rename to crypto/dkg/pedpop/src/lib.rs index adfc6958..f9d609f8 100644 --- a/crypto/dkg/src/pedpop.rs +++ b/crypto/dkg/pedpop/src/lib.rs @@ -1,15 +1,20 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +// This crate requires `dleq` which doesn't support no-std via std-shims +// #![cfg_attr(not(feature = "std"), no_std)] + use core::{marker::PhantomData, ops::Deref, fmt}; use std::{ io::{self, Read, Write}, collections::HashMap, }; -use rand_core::{RngCore, CryptoRng}; - use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; +use rand_core::{RngCore, CryptoRng}; use transcript::{Transcript, RecommendedTranscript}; +use multiexp::{multiexp_vartime, BatchVerifier}; use ciphersuite::{ group::{ ff::{Field, PrimeField}, @@ -17,29 +22,75 @@ use ciphersuite::{ }, Ciphersuite, }; -use multiexp::{multiexp_vartime, BatchVerifier}; use schnorr::SchnorrSignature; -use crate::{ - Participant, DkgError, ThresholdParams, Interpolation, ThresholdCore, validate_map, - encryption::{ - ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, Decryption, EncryptionKeyProof, - DecryptionError, - }, -}; +pub use dkg::*; -type FrostError = DkgError>; +mod encryption; +pub use encryption::*; + +#[cfg(test)] +mod tests; + +/// Errors possible during key generation. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum PedPoPError { + /// An incorrect amount of participants was provided. + #[error("incorrect amount of participants (expected {expected}, found {found})")] + IncorrectAmountOfParticipants { expected: usize, found: usize }, + /// An invalid proof of knowledge was provided. + #[error("invalid proof of knowledge (participant {0})")] + InvalidCommitments(Participant), + /// An invalid DKG share was provided. + #[error("invalid share (participant {participant}, blame {blame})")] + InvalidShare { participant: Participant, blame: Option> }, + /// A participant was missing. + #[error("missing participant {0}")] + MissingParticipant(Participant), + /// An error propagated from the underlying `dkg` crate. + #[error("error from dkg ({0})")] + DkgError(DkgError), +} + +// Validate a map of values to have the expected included participants +fn validate_map( + map: &HashMap, + included: &[Participant], + ours: Participant, +) -> Result<(), PedPoPError> { + if (map.len() + 1) != included.len() { + Err(PedPoPError::IncorrectAmountOfParticipants { + expected: included.len(), + found: map.len() + 1, + })?; + } + + for included in included { + if *included == ours { + if map.contains_key(included) { + Err(PedPoPError::DkgError(DkgError::DuplicatedParticipant(*included)))?; + } + continue; + } + + if !map.contains_key(included) { + Err(PedPoPError::MissingParticipant(*included))?; + } + } + + Ok(()) +} #[allow(non_snake_case)] fn challenge(context: [u8; 32], l: Participant, R: &[u8], Am: &[u8]) -> C::F { - let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2"); + let mut transcript = RecommendedTranscript::new(b"DKG PedPoP v0.2"); transcript.domain_separate(b"schnorr_proof_of_knowledge"); transcript.append_message(b"context", context); transcript.append_message(b"participant", l.to_bytes()); transcript.append_message(b"nonce", R); transcript.append_message(b"commitments", Am); - C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr")) + C::hash_to_F(b"DKG-PedPoP-proof_of_knowledge-0", &transcript.challenge(b"schnorr")) } /// The commitments message, intended to be broadcast to all other parties. @@ -98,7 +149,7 @@ impl KeyGenMachine { KeyGenMachine { params, context, _curve: PhantomData } } - /// Start generating a key according to the FROST DKG spec. + /// Start generating a key according to the PedPoP DKG specification present in the FROST paper. /// /// Returns a commitments message to be sent to all parties over an authenticated channel. If any /// party submits multiple sets of commitments, they MUST be treated as malicious. @@ -106,7 +157,7 @@ impl KeyGenMachine { self, rng: &mut R, ) -> (SecretShareMachine, EncryptionKeyMessage>) { - let t = usize::from(self.params.t); + let t = usize::from(self.params.t()); let mut coefficients = Vec::with_capacity(t); let mut commitments = Vec::with_capacity(t); let mut cached_msg = vec![]; @@ -133,7 +184,7 @@ impl KeyGenMachine { ); // Additionally create an encryption mechanism to protect the secret shares - let encryption = Encryption::new(self.context, self.params.i, rng); + let encryption = Encryption::new(self.context, self.params.i(), rng); // Step 4: Broadcast let msg = @@ -250,21 +301,21 @@ impl SecretShareMachine { &mut self, rng: &mut R, mut commitment_msgs: HashMap>>, - ) -> Result>, FrostError> { + ) -> Result>, PedPoPError> { validate_map( &commitment_msgs, - &(1 ..= self.params.n()).map(Participant).collect::>(), + &self.params.all_participant_indexes().collect::>(), self.params.i(), )?; let mut batch = BatchVerifier::::new(commitment_msgs.len()); let mut commitments = HashMap::new(); - for l in (1 ..= self.params.n()).map(Participant) { + for l in self.params.all_participant_indexes() { let Some(msg) = commitment_msgs.remove(&l) else { continue }; let mut msg = self.encryption.register(l, msg); if msg.commitments.len() != self.params.t().into() { - Err(FrostError::InvalidCommitments(l))?; + Err(PedPoPError::InvalidCommitments(l))?; } // Step 5: Validate each proof of knowledge @@ -280,9 +331,9 @@ impl SecretShareMachine { commitments.insert(l, msg.commitments.drain(..).collect::>()); } - batch.verify_vartime_with_vartime_blame().map_err(FrostError::InvalidCommitments)?; + batch.verify_vartime_with_vartime_blame().map_err(PedPoPError::InvalidCommitments)?; - commitments.insert(self.params.i, self.our_commitments.drain(..).collect()); + commitments.insert(self.params.i(), self.our_commitments.drain(..).collect()); Ok(commitments) } @@ -299,13 +350,13 @@ impl SecretShareMachine { commitments: HashMap>>, ) -> Result< (KeyMachine, HashMap>>), - FrostError, + PedPoPError, > { let commitments = self.verify_r1(&mut *rng, commitments)?; // Step 1: Generate secret shares for all other parties let mut res = HashMap::new(); - for l in (1 ..= self.params.n()).map(Participant) { + for l in self.params.all_participant_indexes() { // Don't insert our own shares to the byte buffer which is meant to be sent around // An app developer could accidentally send it. Best to keep this black boxed if l == self.params.i() { @@ -413,10 +464,10 @@ impl KeyMachine { mut self, rng: &mut R, mut shares: HashMap>>, - ) -> Result, FrostError> { + ) -> Result, PedPoPError> { validate_map( &shares, - &(1 ..= self.params.n()).map(Participant).collect::>(), + &self.params.all_participant_indexes().collect::>(), self.params.i(), )?; @@ -427,7 +478,7 @@ impl KeyMachine { self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes); let share = Zeroizing::new(Option::::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| { - FrostError::InvalidShare { participant: l, blame: Some(blame.clone()) } + PedPoPError::InvalidShare { participant: l, blame: Some(blame.clone()) } })?); share_bytes.zeroize(); *self.secret += share.deref(); @@ -444,7 +495,7 @@ impl KeyMachine { BatchId::Decryption(l) => (l, None), BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())), }; - FrostError::InvalidShare { participant: l, blame } + PedPoPError::InvalidShare { participant: l, blame } })?; // Stripe commitments per t and sum them in advance. Calculating verification shares relies on @@ -458,7 +509,7 @@ impl KeyMachine { // Calculate each user's verification share let mut verification_shares = HashMap::new(); - for i in (1 ..= self.params.n()).map(Participant) { + for i in self.params.all_participant_indexes() { verification_shares.insert( i, if i == self.params.i() { @@ -473,13 +524,10 @@ impl KeyMachine { Ok(BlameMachine { commitments, encryption: encryption.into_decryption(), - result: Some(ThresholdCore { - params, - interpolation: Interpolation::Lagrange, - secret_share: secret, - group_key: stripes[0], - verification_shares, - }), + result: Some( + ThresholdKeys::new(params, Interpolation::Lagrange, secret, verification_shares) + .map_err(PedPoPError::DkgError)?, + ), }) } } @@ -488,7 +536,7 @@ impl KeyMachine { pub struct BlameMachine { commitments: HashMap>, encryption: Decryption, - result: Option>, + result: Option>, } impl fmt::Debug for BlameMachine { @@ -520,7 +568,7 @@ impl BlameMachine { /// territory of consensus protocols. This library does not handle that nor does it provide any /// tooling to do so. This function is solely intended to force users to acknowledge they're /// completing the protocol, not processing any blame. - pub fn complete(self) -> ThresholdCore { + pub fn complete(self) -> ThresholdKeys { self.result.unwrap() } @@ -602,12 +650,12 @@ impl AdditionalBlameMachine { context: [u8; 32], n: u16, mut commitment_msgs: HashMap>>, - ) -> Result> { + ) -> Result> { let mut commitments = HashMap::new(); let mut encryption = Decryption::new(context); for i in 1 ..= n { let i = Participant::new(i).unwrap(); - let Some(msg) = commitment_msgs.remove(&i) else { Err(DkgError::MissingParticipant(i))? }; + let Some(msg) = commitment_msgs.remove(&i) else { Err(PedPoPError::MissingParticipant(i))? }; commitments.insert(i, encryption.register(i, msg).commitments); } Ok(AdditionalBlameMachine(BlameMachine { commitments, encryption, result: None })) diff --git a/crypto/dkg/pedpop/src/tests.rs b/crypto/dkg/pedpop/src/tests.rs new file mode 100644 index 00000000..483b8b3b --- /dev/null +++ b/crypto/dkg/pedpop/src/tests.rs @@ -0,0 +1,345 @@ +use std::collections::HashMap; + +use rand_core::{RngCore, CryptoRng, OsRng}; + +use ciphersuite::{Ciphersuite, Ristretto}; + +use crate::*; + +const THRESHOLD: u16 = 3; +const PARTICIPANTS: u16 = 5; + +/// Clone a map without a specific value. +fn clone_without( + map: &HashMap, + without: &K, +) -> HashMap { + let mut res = map.clone(); + res.remove(without).unwrap(); + res +} + +type PedPoPEncryptedMessage = EncryptedMessage::F>>; +type PedPoPSecretShares = HashMap>; + +const CONTEXT: [u8; 32] = *b"DKG Test Key Generation "; + +// Commit, then return commitment messages, enc keys, and shares +#[allow(clippy::type_complexity)] +fn commit_enc_keys_and_shares( + rng: &mut R, +) -> ( + HashMap>, + HashMap>>, + HashMap, + HashMap>, +) { + let mut machines = HashMap::new(); + let mut commitments = HashMap::new(); + let mut enc_keys = HashMap::new(); + for i in (1 ..= PARTICIPANTS).map(|i| Participant::new(i).unwrap()) { + let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); + let machine = KeyGenMachine::::new(params, CONTEXT); + let (machine, these_commitments) = machine.generate_coefficients(rng); + machines.insert(i, machine); + + commitments.insert( + i, + EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params) + .unwrap(), + ); + enc_keys.insert(i, commitments[&i].enc_key()); + } + + let mut secret_shares = HashMap::new(); + let machines = machines + .drain() + .map(|(l, machine)| { + let (machine, mut shares) = + machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); + let shares = shares + .drain() + .map(|(l, share)| { + ( + l, + EncryptedMessage::read::<&[u8]>( + &mut share.serialize().as_ref(), + // Only t/n actually matters, so hardcode i to 1 here + ThresholdParams::new(THRESHOLD, PARTICIPANTS, Participant::new(1).unwrap()).unwrap(), + ) + .unwrap(), + ) + }) + .collect::>(); + secret_shares.insert(l, shares); + (l, machine) + }) + .collect::>(); + + (machines, commitments, enc_keys, secret_shares) +} + +fn generate_secret_shares( + shares: &HashMap>, + recipient: Participant, +) -> PedPoPSecretShares { + let mut our_secret_shares = HashMap::new(); + for (i, shares) in shares { + if recipient == *i { + continue; + } + our_secret_shares.insert(*i, shares[&recipient].clone()); + } + our_secret_shares +} + +/// Fully perform the PedPoP key generation algorithm. +fn pedpop_gen( + rng: &mut R, +) -> HashMap> { + let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng); + + let mut verification_shares = None; + let mut group_key = None; + machines + .drain() + .map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete(); + + // Verify the verification_shares are agreed upon + if verification_shares.is_none() { + verification_shares = Some( + these_keys + .params() + .all_participant_indexes() + .map(|i| (i, these_keys.original_verification_share(i))) + .collect::>(), + ); + } + assert_eq!( + verification_shares.as_ref().unwrap(), + &these_keys + .params() + .all_participant_indexes() + .map(|i| (i, these_keys.original_verification_share(i))) + .collect::>() + ); + + // Verify the group keys are agreed upon + if group_key.is_none() { + group_key = Some(these_keys.group_key()); + } + assert_eq!(group_key.unwrap(), these_keys.group_key()); + + (i, these_keys) + }) + .collect::>() +} + +const ONE: Participant = Participant::new(1).unwrap(); +const TWO: Participant = Participant::new(2).unwrap(); + +#[test] +fn test_pedpop() { + let _ = core::hint::black_box(pedpop_gen::<_, Ristretto>(&mut OsRng)); +} + +fn test_blame( + commitment_msgs: &HashMap>>, + machines: Vec>, + msg: &PedPoPEncryptedMessage, + blame: &Option>, +) { + for machine in machines { + let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone()); + assert_eq!(blamed, ONE); + // Verify additional blame also works + assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE); + + // Verify machines constructed with AdditionalBlameMachine::new work + assert_eq!( + AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame( + ONE, + TWO, + msg.clone(), + blame.clone() + ), + ONE, + ); + } +} + +// TODO: Write a macro which expands to the following +#[test] +fn invalid_encryption_pop_blame() { + let (mut machines, commitment_msgs, _, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + // Mutate the PoP of the encrypted message from 1 to 2 + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop(); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == TWO { + assert_eq!( + machine.err(), + Some(PedPoPError::InvalidShare { participant: ONE, blame: None }) + ); + // Explicitly declare we have a blame object, which happens to be None since invalid PoP + // is self-explainable + blame = Some(None); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); +} + +#[test] +fn invalid_ecdh_blame() { + let (mut machines, commitment_msgs, _, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + // Mutate the share to trigger a blame event + // Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass + // While here, 2 is malicious, this is so 1 creates the blame proof + // We then malleate 1's blame proof, so 1 ends up malicious + // Doesn't simply invalidate the PoP as that won't have a blame statement + // By mutating the encrypted data, we do ensure a blame statement is created + secret_shares + .get_mut(&TWO) + .unwrap() + .get_mut(&ONE) + .unwrap() + .invalidate_msg(&mut OsRng, CONTEXT, TWO); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == ONE { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + blame.as_mut().unwrap().as_mut().unwrap().invalidate_key(); + test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); +} + +// This should be largely equivalent to the prior test +#[test] +fn invalid_dleq_blame() { + let (mut machines, commitment_msgs, _, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + secret_shares + .get_mut(&TWO) + .unwrap() + .get_mut(&ONE) + .unwrap() + .invalidate_msg(&mut OsRng, CONTEXT, TWO); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == ONE { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq(); + test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); +} + +#[test] +fn invalid_share_serialization_blame() { + let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization( + &mut OsRng, + CONTEXT, + ONE, + enc_keys[&TWO], + ); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == TWO { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); +} + +#[test] +fn invalid_share_value_blame() { + let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = + commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); + + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value( + &mut OsRng, + CONTEXT, + ONE, + enc_keys[&TWO], + ); + + let mut blame = None; + let machines = machines + .drain() + .filter_map(|(i, machine)| { + let our_secret_shares = generate_secret_shares(&secret_shares, i); + let machine = machine.calculate_share(&mut OsRng, our_secret_shares); + if i == TWO { + blame = Some(match machine.err() { + Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), + _ => panic!(), + }); + None + } else { + Some(machine.unwrap()) + } + }) + .collect::>(); + + test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); +} diff --git a/crypto/dkg/promote/Cargo.toml b/crypto/dkg/promote/Cargo.toml new file mode 100644 index 00000000..e5f57ce9 --- /dev/null +++ b/crypto/dkg/promote/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "dkg-promote" +version = "0.6.0" +description = "Promotions for keys from the dkg crate" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/promote" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +thiserror = { version = "2", default-features = false, features = ["std"] } + +rand_core = { version = "0.6", default-features = false, features = ["std"] } + +transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } +dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } + +dkg = { path = "../", default-features = false, features = ["std"] } + +[dev-dependencies] +zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] } diff --git a/crypto/dkg/promote/LICENSE b/crypto/dkg/promote/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/promote/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/promote/README.md b/crypto/dkg/promote/README.md new file mode 100644 index 00000000..a5f8a9e6 --- /dev/null +++ b/crypto/dkg/promote/README.md @@ -0,0 +1,12 @@ +# Distributed Key Generation - Promote + +This crate implements 'promotions' for keys from the [`dkg`](https://docs.rs/dkg) crate. A promotion +takes a set of keys and maps it to a different `Ciphersuite`. + +This crate was originally part of the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit +[669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/promote/src/lib.rs similarity index 54% rename from crypto/dkg/src/promote.rs rename to crypto/dkg/promote/src/lib.rs index d92b382d..6fb08807 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/promote/src/lib.rs @@ -1,25 +1,52 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +// This crate requires `dleq` which doesn't support no-std via std-shims +// #![cfg_attr(not(feature = "std"), no_std)] + use core::{marker::PhantomData, ops::Deref}; use std::{ io::{self, Read, Write}, - sync::Arc, collections::HashMap, }; use rand_core::{RngCore, CryptoRng}; -use ciphersuite::{ - group::{ff::Field, GroupEncoding}, - Ciphersuite, -}; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use transcript::{Transcript, RecommendedTranscript}; use dleq::DLEqProof; -use crate::{Participant, DkgError, ThresholdCore, ThresholdKeys, validate_map}; +pub use dkg::*; -/// Promote a set of keys to another Ciphersuite definition. -pub trait CiphersuitePromote { - fn promote(self) -> ThresholdKeys; +#[cfg(test)] +mod tests; + +/// Errors encountered when promoting keys. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum PromotionError { + /// Invalid participant identifier. + #[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")] + InvalidParticipant { + /// The total amount of participants. + n: u16, + /// The specified participant. + participant: Participant, + }, + + /// An incorrect amount of participants was specified. + #[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")] + IncorrectAmountOfParticipants { + /// The threshold required. + t: u16, + /// The total amount of participants. + n: u16, + /// The amount of participants specified. + amount: usize, + }, + + /// Participant provided an invalid proof. + #[error("invalid proof {0}")] + InvalidProof(Participant), } fn transcript(key: &G, i: Participant) -> RecommendedTranscript { @@ -68,8 +95,9 @@ pub struct GeneratorPromotion { } impl> GeneratorPromotion { - /// Begin promoting keys from one generator to another. Returns a proof this share was properly - /// promoted. + /// Begin promoting keys from one generator to another. + /// + /// Returns a proof this share was properly promoted. pub fn promote( rng: &mut R, base: ThresholdKeys, @@ -79,7 +107,7 @@ impl> GeneratorPromotion< share: C2::generator() * base.secret_share().deref(), proof: DLEqProof::prove( rng, - &mut transcript(&base.core.group_key(), base.params().i), + &mut transcript(&base.original_group_key(), base.params().i()), &[C1::generator(), C2::generator()], base.secret_share(), ), @@ -92,36 +120,49 @@ impl> GeneratorPromotion< pub fn complete( self, proofs: &HashMap>, - ) -> Result, DkgError<()>> { + ) -> Result, PromotionError> { let params = self.base.params(); - validate_map(proofs, &(1 ..= params.n).map(Participant).collect::>(), params.i)?; - - let original_shares = self.base.verification_shares(); + if proofs.len() != (usize::from(params.n()) - 1) { + Err(PromotionError::IncorrectAmountOfParticipants { + t: params.n(), + n: params.n(), + amount: proofs.len() + 1, + })?; + } + for i in proofs.keys().copied() { + if u16::from(i) > params.n() { + Err(PromotionError::InvalidParticipant { n: params.n(), participant: i })?; + } + } let mut verification_shares = HashMap::new(); - verification_shares.insert(params.i, self.proof.share); - for (i, proof) in proofs { - let i = *i; + verification_shares.insert(params.i(), self.proof.share); + for i in 1 ..= params.n() { + let i = Participant::new(i).unwrap(); + if i == params.i() { + continue; + } + + let proof = proofs.get(&i).unwrap(); proof .proof .verify( - &mut transcript(&self.base.core.group_key(), i), + &mut transcript(&self.base.original_group_key(), i), &[C1::generator(), C2::generator()], - &[original_shares[&i], proof.share], + &[self.base.original_verification_share(i), proof.share], ) - .map_err(|_| DkgError::InvalidCommitments(i))?; + .map_err(|_| PromotionError::InvalidProof(i))?; verification_shares.insert(i, proof.share); } - Ok(ThresholdKeys { - core: Arc::new(ThresholdCore::new( + Ok( + ThresholdKeys::new( params, - self.base.core.interpolation.clone(), + self.base.interpolation().clone(), self.base.secret_share().clone(), verification_shares, - )), - scalar: C2::F::ONE, - offset: C2::F::ZERO, - }) + ) + .unwrap(), + ) } } diff --git a/crypto/dkg/promote/src/tests.rs b/crypto/dkg/promote/src/tests.rs new file mode 100644 index 00000000..1cae60d9 --- /dev/null +++ b/crypto/dkg/promote/src/tests.rs @@ -0,0 +1,113 @@ +use core::marker::PhantomData; +use std::collections::HashMap; + +use zeroize::{Zeroize, Zeroizing}; +use rand_core::OsRng; + +use ciphersuite::{ + group::{ff::Field, Group}, + Ciphersuite, Ristretto, +}; + +use dkg::*; +use dkg_recovery::recover_key; +use crate::{GeneratorPromotion, GeneratorProof}; + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +struct AltGenerator { + _curve: PhantomData, +} + +impl Ciphersuite for AltGenerator { + type F = C::F; + type G = C::G; + type H = C::H; + + const ID: &'static [u8] = b"Alternate Ciphersuite"; + + fn generator() -> Self::G { + C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") + } + + fn reduce_512(scalar: [u8; 64]) -> Self::F { + ::reduce_512(scalar) + } + + fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { + ::hash_to_F(dst, data) + } +} + +/// Clone a map without a specific value. +pub fn clone_without( + map: &HashMap, + without: &K, +) -> HashMap { + let mut res = map.clone(); + res.remove(without).unwrap(); + res +} + +// Test promotion of threshold keys to another generator +#[test] +fn test_generator_promotion() { + // Generate a set of `ThresholdKeys` + const PARTICIPANTS: u16 = 5; + let keys: [ThresholdKeys<_>; PARTICIPANTS as usize] = { + let shares: [::F; PARTICIPANTS as usize] = + core::array::from_fn(|_| ::F::random(&mut OsRng)); + let verification_shares = (0 .. PARTICIPANTS) + .map(|i| { + ( + Participant::new(i + 1).unwrap(), + ::generator() * shares[usize::from(i)], + ) + }) + .collect::>(); + core::array::from_fn(|i| { + ThresholdKeys::new( + ThresholdParams::new( + PARTICIPANTS, + PARTICIPANTS, + Participant::new(u16::try_from(i + 1).unwrap()).unwrap(), + ) + .unwrap(), + Interpolation::Constant(vec![::F::ONE; PARTICIPANTS as usize]), + Zeroizing::new(shares[i]), + verification_shares.clone(), + ) + .unwrap() + }) + }; + + // Perform the promotion + let mut promotions = HashMap::new(); + let mut proofs = HashMap::new(); + for keys in &keys { + let i = keys.params().i(); + let (promotion, proof) = + GeneratorPromotion::<_, AltGenerator>::promote(&mut OsRng, keys.clone()); + promotions.insert(i, promotion); + proofs.insert( + i, + GeneratorProof::::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap(), + ); + } + + // Complete the promotion, and verify it worked + let new_group_key = AltGenerator::::generator() * *recover_key(&keys).unwrap(); + for (i, promoting) in promotions.drain() { + let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); + assert_eq!(keys[usize::from(u16::from(i) - 1)].params(), promoted.params()); + assert_eq!(keys[usize::from(u16::from(i) - 1)].secret_share(), promoted.secret_share()); + assert_eq!(new_group_key, promoted.group_key()); + for l in 0 .. PARTICIPANTS { + let verification_share = + promoted.original_verification_share(Participant::new(l + 1).unwrap()); + assert_eq!( + AltGenerator::::generator() * **keys[usize::from(l)].secret_share(), + verification_share + ); + } + } +} diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml new file mode 100644 index 00000000..e2e7485c --- /dev/null +++ b/crypto/dkg/recovery/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "dkg-recovery" +version = "0.6.0" +description = "Recover a secret-shared key from a collection of dkg::ThresholdKeys" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recovery" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +zeroize = { version = "^1.5", default-features = false } + +thiserror = { version = "2", default-features = false } + +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } +dkg = { path = "../", default-features = false } + +[features] +std = [ + "zeroize/std", + "thiserror/std", + "ciphersuite/std", + "dkg/std", +] +default = ["std"] diff --git a/crypto/dkg/recovery/LICENSE b/crypto/dkg/recovery/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/recovery/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/recovery/README.md b/crypto/dkg/recovery/README.md new file mode 100644 index 00000000..eaad6ed5 --- /dev/null +++ b/crypto/dkg/recovery/README.md @@ -0,0 +1,14 @@ +# Distributed Key Generation + +A crate implementing a type for keys, presumably the result of a distributed key generation +protocol, and utilities from there. + +This crate used to host implementations of distributed key generation protocols as well (hence the +name). Those have been smashed into their own crates, such as +[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop) + +Before being smashed, this crate was [audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/recovery/src/lib.rs b/crypto/dkg/recovery/src/lib.rs new file mode 100644 index 00000000..bef0956b --- /dev/null +++ b/crypto/dkg/recovery/src/lib.rs @@ -0,0 +1,85 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +#![no_std] + +use core::ops::{Deref, DerefMut}; +extern crate alloc; +use alloc::vec::Vec; + +use zeroize::Zeroizing; + +use ciphersuite::Ciphersuite; + +pub use dkg::*; + +/// Errors encountered when recovering a secret-shared key from a collection of +/// `dkg::ThresholdKeys`. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum RecoveryError { + /// No keys were provided. + #[error("no keys provided")] + NoKeysProvided, + /// Not enough keys were provided. + #[error("not enough keys provided (threshold required {required}, provided {provided})")] + NotEnoughKeysProvided { required: u16, provided: usize }, + /// The keys had inconsistent parameters. + #[error("keys had inconsistent parameters")] + InconsistentParameters, + /// The keys are from distinct secret-sharing sessions or otherwise corrupt. + #[error("recovery failed")] + Failure, + /// An error propagated from the underlying `dkg` crate. + #[error("error from dkg ({0})")] + DkgError(DkgError), +} + +/// Recover a shared secret from a collection of `dkg::ThresholdKeys`. +pub fn recover_key( + keys: &[ThresholdKeys], +) -> Result, RecoveryError> { + let included = keys.iter().map(|keys| keys.params().i()).collect::>(); + + let keys_len = keys.len(); + let mut keys = keys.iter(); + let first_keys = keys.next().ok_or(RecoveryError::NoKeysProvided)?; + { + let t = first_keys.params().t(); + if keys_len < usize::from(t) { + Err(RecoveryError::NotEnoughKeysProvided { required: t, provided: keys_len })?; + } + } + { + let first_params = ( + first_keys.params().t(), + first_keys.params().n(), + first_keys.group_key(), + first_keys.current_scalar(), + first_keys.current_offset(), + ); + for keys in keys.clone() { + let params = ( + keys.params().t(), + keys.params().n(), + keys.group_key(), + keys.current_scalar(), + keys.current_offset(), + ); + if params != first_params { + Err(RecoveryError::InconsistentParameters)?; + } + } + } + + let mut res: Zeroizing<_> = + first_keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().clone(); + for keys in keys { + *res.deref_mut() += + keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().deref(); + } + + if (C::generator() * res.deref()) != first_keys.group_key() { + Err(RecoveryError::Failure)?; + } + + Ok(res) +} diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index b7dc4b17..b98236a8 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -2,39 +2,29 @@ #![doc = include_str!("../README.md")] #![cfg_attr(not(feature = "std"), no_std)] -use core::fmt::{self, Debug}; +use core::{ + ops::Deref, + fmt::{self, Debug}, +}; +use std_shims::{sync::Arc, vec, vec::Vec, collections::HashMap, io}; -use thiserror::Error; +use zeroize::{Zeroize, Zeroizing}; -use zeroize::Zeroize; - -/// MuSig-style key aggregation. -pub mod musig; - -/// Encryption types and utilities used to secure DKG messages. -#[cfg(feature = "std")] -pub mod encryption; - -/// The PedPoP distributed key generation protocol described in the -/// [FROST paper](https://eprint.iacr.org/2020/852), augmented to be verifiable. -#[cfg(feature = "std")] -pub mod pedpop; - -/// Promote keys between ciphersuites. -#[cfg(feature = "std")] -pub mod promote; - -/// Tests for application-provided curves and algorithms. -#[cfg(any(test, feature = "tests"))] -pub mod tests; +use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + GroupEncoding, + }, + Ciphersuite, +}; /// The ID of a participant, defined as a non-zero u16. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] -pub struct Participant(pub(crate) u16); +pub struct Participant(u16); impl Participant { /// Create a new Participant identifier from a u16. - pub fn new(i: u16) -> Option { + pub const fn new(i: u16) -> Option { if i == 0 { None } else { @@ -44,7 +34,7 @@ impl Participant { /// Convert a Participant identifier to bytes. #[allow(clippy::wrong_self_convention)] - pub fn to_bytes(&self) -> [u8; 2] { + pub const fn to_bytes(&self) -> [u8; 2] { self.0.to_le_bytes() } } @@ -61,574 +51,631 @@ impl fmt::Display for Participant { } } -/// Various errors possible during key generation. -#[derive(Clone, PartialEq, Eq, Debug, Error)] -pub enum DkgError { +/// Errors encountered when working with threshold keys. +#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)] +pub enum DkgError { /// A parameter was zero. - #[cfg_attr(feature = "std", error("a parameter was 0 (threshold {0}, participants {1})"))] - ZeroParameter(u16, u16), + #[error("a parameter was 0 (threshold {t}, participants {n})")] + ZeroParameter { + /// The specified threshold. + t: u16, + /// The specified total amount of participants. + n: u16, + }, + /// The threshold exceeded the amount of participants. - #[cfg_attr(feature = "std", error("invalid threshold (max {1}, got {0})"))] - InvalidThreshold(u16, u16), + #[error("invalid threshold (max {n}, got {t})")] + InvalidThreshold { + /// The specified threshold. + t: u16, + /// The specified total amount of participants. + n: u16, + }, + /// Invalid participant identifier. - #[cfg_attr( - feature = "std", - error("invalid participant (0 < participant <= {0}, yet participant is {1})") - )] - InvalidParticipant(u16, Participant), + #[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")] + InvalidParticipant { + /// The total amount of participants. + n: u16, + /// The specified participant. + participant: Participant, + }, + + /// An incorrect amount of participants was specified. + #[error("incorrect amount of verification shares (n = {n} yet {shares} provided)")] + IncorrectAmountOfVerificationShares { + /// The amount of participants. + n: u16, + /// The amount of shares provided. + shares: usize, + }, + + /// An inapplicable method of interpolation was specified. + #[error("inapplicable method of interpolation ({0})")] + InapplicableInterpolation(&'static str), + + /// An incorrect amount of participants was specified. + #[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")] + IncorrectAmountOfParticipants { + /// The threshold required. + t: u16, + /// The total amount of participants. + n: u16, + /// The amount of participants specified. + amount: usize, + }, - /// Invalid signing set. - #[cfg_attr(feature = "std", error("invalid signing set"))] - InvalidSigningSet, - /// Invalid amount of participants. - #[cfg_attr(feature = "std", error("invalid participant quantity (expected {0}, got {1})"))] - InvalidParticipantQuantity(usize, usize), /// A participant was duplicated. - #[cfg_attr(feature = "std", error("duplicated participant ({0})"))] + #[error("a participant ({0}) was duplicated")] DuplicatedParticipant(Participant), - /// A participant was missing. - #[cfg_attr(feature = "std", error("missing participant {0}"))] - MissingParticipant(Participant), - /// An invalid proof of knowledge was provided. - #[cfg_attr(feature = "std", error("invalid proof of knowledge (participant {0})"))] - InvalidCommitments(Participant), - /// An invalid DKG share was provided. - #[cfg_attr(feature = "std", error("invalid share (participant {participant}, blame {blame})"))] - InvalidShare { participant: Participant, blame: Option }, + /// Not participating in declared signing set. + #[error("not participating in declared signing set")] + NotParticipating, } -#[cfg(feature = "std")] -mod lib { - pub use super::*; +// Manually implements BorshDeserialize so we can enforce it's a valid index +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for Participant { + fn deserialize_reader(reader: &mut R) -> io::Result { + Participant::new(u16::deserialize_reader(reader)?) + .ok_or_else(|| io::Error::other("invalid participant")) + } +} - use core::ops::Deref; - use std::{io, sync::Arc, collections::HashMap}; +/// Parameters for a multisig. +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] +pub struct ThresholdParams { + /// Participants needed to sign on behalf of the group. + t: u16, + /// Amount of participants. + n: u16, + /// Index of the participant being acted for. + i: Participant, +} - use zeroize::Zeroizing; - - use ciphersuite::{ - group::{ - ff::{Field, PrimeField}, - GroupEncoding, - }, - Ciphersuite, - }; - - #[cfg(feature = "borsh")] - impl borsh::BorshDeserialize for Participant { - fn deserialize_reader(reader: &mut R) -> io::Result { - Participant::new(u16::deserialize_reader(reader)?) - .ok_or_else(|| io::Error::other("invalid participant")) +/// An iterator over all participant indexes. +struct AllParticipantIndexes { + i: u16, + n: u16, +} +impl Iterator for AllParticipantIndexes { + type Item = Participant; + fn next(&mut self) -> Option { + if self.i > self.n { + None?; } + let res = Participant::new(self.i).unwrap(); + + // If i == n == u16::MAX, we cause `i > n` by setting `n` to `0` so the iterator becomes empty + if self.i == u16::MAX { + self.n = 0; + } else { + self.i += 1; + } + + Some(res) + } +} + +impl ThresholdParams { + /// Create a new set of parameters. + pub const fn new(t: u16, n: u16, i: Participant) -> Result { + if (t == 0) || (n == 0) { + return Err(DkgError::ZeroParameter { t, n }); + } + + if t > n { + return Err(DkgError::InvalidThreshold { t, n }); + } + if i.0 > n { + return Err(DkgError::InvalidParticipant { n, participant: i }); + } + + Ok(ThresholdParams { t, n, i }) } - // Validate a map of values to have the expected included participants - pub(crate) fn validate_map( - map: &HashMap, - included: &[Participant], - ours: Participant, - ) -> Result<(), DkgError> { - if (map.len() + 1) != included.len() { - Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?; - } + /// The threshold for a multisig with these parameters. + pub const fn t(&self) -> u16 { + self.t + } + /// The amount of participants for a multisig with these parameters. + pub const fn n(&self) -> u16 { + self.n + } + /// The participant index of the share with these parameters. + pub const fn i(&self) -> Participant { + self.i + } - for included in included { - if *included == ours { - if map.contains_key(included) { - Err(DkgError::DuplicatedParticipant(*included))?; + /// An iterator over all participant indexes. + pub fn all_participant_indexes(&self) -> impl Iterator { + AllParticipantIndexes { i: 1, n: self.n } + } +} + +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for ThresholdParams { + fn deserialize_reader(reader: &mut R) -> io::Result { + let t = u16::deserialize_reader(reader)?; + let n = u16::deserialize_reader(reader)?; + let i = Participant::deserialize_reader(reader)?; + ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}"))) + } +} + +/// A method of interpolation. +#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] +pub enum Interpolation { + /// A list of constant coefficients, one for each of the secret key shares. + /* + There's no benefit to using a full linear combination here, as the additive term would have + an entirely known evaluation with a fixed, public coefficient of `1`. Accordingly, the entire + key can simply be offset with the additive term to achieve the same effect. + */ + Constant(Vec), + /// Lagrange interpolation. + Lagrange, +} + +impl Interpolation { + /// The interpolation factor for this participant, within this signing set. + fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F { + match self { + Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)], + Interpolation::Lagrange => { + let i_f = F::from(u64::from(u16::from(i))); + + let mut num = F::ONE; + let mut denom = F::ONE; + for l in included { + if i == *l { + continue; + } + + let share = F::from(u64::from(u16::from(*l))); + num *= share; + denom *= share - i_f; } - continue; - } - if !map.contains_key(included) { - Err(DkgError::MissingParticipant(*included))?; + // Safe as this will only be 0 if we're part of the above loop + // (which we have an if case to avoid) + num * denom.invert().unwrap() + } + } + } +} + +/// A key share for a thresholdized secret key. +/// +/// This is the 'core' structure containing all relevant data, expected to be wrapped into an +/// heap-allocated pointer to minimize copies on the stack (`ThresholdKeys`, the publicly exposed +/// type). +#[derive(Clone, PartialEq, Eq)] +struct ThresholdCore { + params: ThresholdParams, + group_key: C::G, + verification_shares: HashMap, + interpolation: Interpolation, + secret_share: Zeroizing, +} + +impl fmt::Debug for ThresholdCore { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("ThresholdCore") + .field("params", &self.params) + .field("group_key", &self.group_key) + .field("verification_shares", &self.verification_shares) + .field("interpolation", &self.interpolation) + .finish_non_exhaustive() + } +} + +impl Zeroize for ThresholdCore { + fn zeroize(&mut self) { + self.params.zeroize(); + self.group_key.zeroize(); + for share in self.verification_shares.values_mut() { + share.zeroize(); + } + self.interpolation.zeroize(); + self.secret_share.zeroize(); + } +} + +/// Threshold keys usable for signing. +#[derive(Clone, Debug, Zeroize)] +pub struct ThresholdKeys { + // Core keys. + #[zeroize(skip)] + core: Arc>>, + + // Scalar applied to these keys. + scalar: C::F, + // Offset applied to these keys. + offset: C::F, +} + +/// View of keys, interpolated and with the expected linear combination taken for usage. +#[derive(Clone)] +pub struct ThresholdView { + interpolation: Interpolation, + scalar: C::F, + offset: C::F, + group_key: C::G, + included: Vec, + secret_share: Zeroizing, + original_verification_shares: HashMap, + verification_shares: HashMap, +} + +impl fmt::Debug for ThresholdView { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("ThresholdView") + .field("interpolation", &self.interpolation) + .field("scalar", &self.scalar) + .field("offset", &self.offset) + .field("group_key", &self.group_key) + .field("included", &self.included) + .field("original_verification_shares", &self.original_verification_shares) + .field("verification_shares", &self.verification_shares) + .finish_non_exhaustive() + } +} + +impl Zeroize for ThresholdView { + fn zeroize(&mut self) { + self.scalar.zeroize(); + self.offset.zeroize(); + self.group_key.zeroize(); + self.included.zeroize(); + self.secret_share.zeroize(); + for share in self.original_verification_shares.values_mut() { + share.zeroize(); + } + for share in self.verification_shares.values_mut() { + share.zeroize(); + } + } +} + +impl ThresholdKeys { + /// Create a new set of ThresholdKeys. + pub fn new( + params: ThresholdParams, + interpolation: Interpolation, + secret_share: Zeroizing, + verification_shares: HashMap, + ) -> Result, DkgError> { + if verification_shares.len() != usize::from(params.n()) { + Err(DkgError::IncorrectAmountOfVerificationShares { + n: params.n(), + shares: verification_shares.len(), + })?; + } + for participant in verification_shares.keys().copied() { + if u16::from(participant) > params.n() { + Err(DkgError::InvalidParticipant { n: params.n(), participant })?; } } + match &interpolation { + Interpolation::Constant(_) => { + if params.t() != params.n() { + Err(DkgError::InapplicableInterpolation("constant interpolation for keys where t != n"))?; + } + } + Interpolation::Lagrange => {} + } + + let t = (1 ..= params.t()).map(Participant).collect::>(); + let group_key = + t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum(); + + Ok(ThresholdKeys { + core: Arc::new(Zeroizing::new(ThresholdCore { + params, + interpolation, + secret_share, + group_key, + verification_shares, + })), + scalar: C::F::ONE, + offset: C::F::ZERO, + }) + } + + /// Scale the keys by a given scalar to allow for various account and privacy schemes. + /// + /// This scalar is ephemeral and will not be included when these keys are serialized. The + /// scalar is applied on top of any already-existing scalar/offset. + /// + /// Returns `None` if the scalar is equal to `0`. + #[must_use] + pub fn scale(mut self, scalar: C::F) -> Option> { + if bool::from(scalar.is_zero()) { + None?; + } + self.scalar *= scalar; + self.offset *= scalar; + Some(self) + } + + /// Offset the keys by a given scalar to allow for various account and privacy schemes. + /// + /// This offset is ephemeral and will not be included when these keys are serialized. The + /// offset is applied on top of any already-existing scalar/offset. + #[must_use] + pub fn offset(mut self, offset: C::F) -> ThresholdKeys { + self.offset += offset; + self + } + + /// Return the current scalar in-use for these keys. + pub fn current_scalar(&self) -> C::F { + self.scalar + } + + /// Return the current offset in-use for these keys. + pub fn current_offset(&self) -> C::F { + self.offset + } + + /// Return the parameters for these keys. + pub fn params(&self) -> ThresholdParams { + self.core.params + } + + /// Return the original group key, without any tweaks applied. + pub fn original_group_key(&self) -> C::G { + self.core.group_key + } + + /// Return the interpolation method for these keys. + pub fn interpolation(&self) -> &Interpolation { + &self.core.interpolation + } + + /// Return the group key, with the expected linear combination taken. + pub fn group_key(&self) -> C::G { + (self.core.group_key * self.scalar) + (C::generator() * self.offset) + } + + /// Return the secret share for these keys. + pub fn secret_share(&self) -> &Zeroizing { + &self.core.secret_share + } + + /// Return the original (untweaked) verification share for the specified participant. + /// + /// This will panic if the participant index is invalid for these keys. + pub fn original_verification_share(&self, l: Participant) -> C::G { + self.core.verification_shares[&l] + } + + /// Obtain a view of these keys, interpolated for the specified signing set, with the specified + /// linear combination taken. + pub fn view(&self, mut included: Vec) -> Result, DkgError> { + if (included.len() < self.params().t.into()) || + (usize::from(self.params().n()) < included.len()) + { + Err(DkgError::IncorrectAmountOfParticipants { + t: self.params().t, + n: self.params().n, + amount: included.len(), + })?; + } + included.sort(); + { + let mut found = included[0] == self.params().i(); + for i in 1 .. included.len() { + if included[i - 1] == included[i] { + Err(DkgError::DuplicatedParticipant(included[i]))?; + } + found |= included[i] == self.params().i(); + } + if !found { + Err(DkgError::NotParticipating)?; + } + } + { + let last = *included.last().unwrap(); + if u16::from(last) > self.params().n() { + Err(DkgError::InvalidParticipant { n: self.params().n(), participant: last })?; + } + } + + // The interpolation occurs multiplicatively, letting us scale by the scalar now + let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref()); + let mut secret_share = Zeroizing::new( + self.core.interpolation.interpolation_factor(self.params().i(), &included) * + secret_share_scaled.deref(), + ); + + let mut verification_shares = HashMap::with_capacity(included.len()); + for i in &included { + let verification_share = self.core.verification_shares[i]; + let verification_share = verification_share * + self.scalar * + self.core.interpolation.interpolation_factor(*i, &included); + verification_shares.insert(*i, verification_share); + } + + /* + The offset is included by adding it to the participant with the lowest ID. + + This is done after interpolating to ensure, regardless of the method of interpolation, that + the method of interpolation does not scale the offset. For Lagrange interpolation, we could + add the offset to every key share before interpolating, yet for Constant interpolation, we + _have_ to add it as we do here (which also works even when we intend to perform Lagrange + interpolation). + */ + if included[0] == self.params().i() { + *secret_share += self.offset; + } + *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset; + + Ok(ThresholdView { + interpolation: self.core.interpolation.clone(), + scalar: self.scalar, + offset: self.offset, + group_key: self.group_key(), + secret_share, + original_verification_shares: self.core.verification_shares.clone(), + verification_shares, + included, + }) + } + + /// Write these keys to a type satisfying `std::io::Write`. + /// + /// This will not include the ephemeral scalar/offset. + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?; + writer.write_all(C::ID)?; + writer.write_all(&self.core.params.t.to_le_bytes())?; + writer.write_all(&self.core.params.n.to_le_bytes())?; + writer.write_all(&self.core.params.i.to_bytes())?; + match &self.core.interpolation { + Interpolation::Constant(c) => { + writer.write_all(&[0])?; + for c in c { + writer.write_all(c.to_repr().as_ref())?; + } + } + Interpolation::Lagrange => writer.write_all(&[1])?, + }; + let mut share_bytes = self.core.secret_share.to_repr(); + writer.write_all(share_bytes.as_ref())?; + share_bytes.as_mut().zeroize(); + for l in 1 ..= self.core.params.n { + writer.write_all( + self.core.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref(), + )?; + } Ok(()) } - /// Parameters for a multisig. - // These fields should not be made public as they should be static - #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] - #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))] - pub struct ThresholdParams { - /// Participants needed to sign on behalf of the group. - pub(crate) t: u16, - /// Amount of participants. - pub(crate) n: u16, - /// Index of the participant being acted for. - pub(crate) i: Participant, + /// Serialize these keys to a `Vec`. + /// + /// This will not include the ephemeral scalar/offset. + pub fn serialize(&self) -> Zeroizing> { + let mut serialized = Zeroizing::new(vec![]); + self.write::>(serialized.as_mut()).unwrap(); + serialized } - impl ThresholdParams { - /// Create a new set of parameters. - pub fn new(t: u16, n: u16, i: Participant) -> Result> { - if (t == 0) || (n == 0) { - Err(DkgError::ZeroParameter(t, n))?; + /// Read keys from a type satisfying `std::io::Read`. + pub fn read(reader: &mut R) -> io::Result> { + { + let different = || io::Error::other("deserializing ThresholdKeys for another curve"); + + let mut id_len = [0; 4]; + reader.read_exact(&mut id_len)?; + if u32::try_from(C::ID.len()).unwrap().to_le_bytes() != id_len { + Err(different())?; } - if t > n { - Err(DkgError::InvalidThreshold(t, n))?; - } - if u16::from(i) > n { - Err(DkgError::InvalidParticipant(n, i))?; - } - - Ok(ThresholdParams { t, n, i }) - } - - /// Return the threshold for a multisig with these parameters. - pub fn t(&self) -> u16 { - self.t - } - /// Return the amount of participants for a multisig with these parameters. - pub fn n(&self) -> u16 { - self.n - } - /// Return the participant index of the share with these parameters. - pub fn i(&self) -> Participant { - self.i - } - } - - #[cfg(feature = "borsh")] - impl borsh::BorshDeserialize for ThresholdParams { - fn deserialize_reader(reader: &mut R) -> io::Result { - let t = u16::deserialize_reader(reader)?; - let n = u16::deserialize_reader(reader)?; - let i = Participant::deserialize_reader(reader)?; - ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}"))) - } - } - - #[derive(Clone, PartialEq, Eq, Debug, Zeroize)] - pub(crate) enum Interpolation { - Constant(Vec), - Lagrange, - } - - impl Interpolation { - pub(crate) fn interpolation_factor(&self, i: Participant, included: &[Participant]) -> F { - match self { - Interpolation::Constant(c) => c[usize::from(u16::from(i) - 1)], - Interpolation::Lagrange => { - let i_f = F::from(u64::from(u16::from(i))); - - let mut num = F::ONE; - let mut denom = F::ONE; - for l in included { - if i == *l { - continue; - } - - let share = F::from(u64::from(u16::from(*l))); - num *= share; - denom *= share - i_f; - } - - // Safe as this will only be 0 if we're part of the above loop - // (which we have an if case to avoid) - num * denom.invert().unwrap() - } + let mut id = vec![0; C::ID.len()]; + reader.read_exact(&mut id)?; + if id != C::ID { + Err(different())?; } } - } - /// Keys and verification shares generated by a DKG. - /// Called core as they're expected to be wrapped into an Arc before usage in various operations. - #[derive(Clone, PartialEq, Eq)] - pub struct ThresholdCore { - /// Threshold Parameters. - pub(crate) params: ThresholdParams, - /// The interpolation method used. - pub(crate) interpolation: Interpolation, - - /// Secret share key. - pub(crate) secret_share: Zeroizing, - /// Group key. - pub(crate) group_key: C::G, - /// Verification shares. - pub(crate) verification_shares: HashMap, - } - - impl fmt::Debug for ThresholdCore { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt - .debug_struct("ThresholdCore") - .field("params", &self.params) - .field("interpolation", &self.interpolation) - .field("group_key", &self.group_key) - .field("verification_shares", &self.verification_shares) - .finish_non_exhaustive() - } - } - - impl Zeroize for ThresholdCore { - fn zeroize(&mut self) { - self.params.zeroize(); - self.interpolation.zeroize(); - self.secret_share.zeroize(); - self.group_key.zeroize(); - for share in self.verification_shares.values_mut() { - share.zeroize(); - } - } - } - - impl ThresholdCore { - pub(crate) fn new( - params: ThresholdParams, - interpolation: Interpolation, - secret_share: Zeroizing, - verification_shares: HashMap, - ) -> ThresholdCore { - let t = (1 ..= params.t()).map(Participant).collect::>(); - let group_key = - t.iter().map(|i| verification_shares[i] * interpolation.interpolation_factor(*i, &t)).sum(); - ThresholdCore { params, interpolation, secret_share, group_key, verification_shares } - } - - /// Parameters for these keys. - pub fn params(&self) -> ThresholdParams { - self.params - } - - /// Secret share for these keys. - pub fn secret_share(&self) -> &Zeroizing { - &self.secret_share - } - - /// Group key for these keys. - pub fn group_key(&self) -> C::G { - self.group_key - } - - pub(crate) fn verification_shares(&self) -> HashMap { - self.verification_shares.clone() - } - - /// Write these keys to a type satisfying std::io::Write. - pub fn write(&self, writer: &mut W) -> io::Result<()> { - writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?; - writer.write_all(C::ID)?; - writer.write_all(&self.params.t.to_le_bytes())?; - writer.write_all(&self.params.n.to_le_bytes())?; - writer.write_all(&self.params.i.to_bytes())?; - match &self.interpolation { - Interpolation::Constant(c) => { - writer.write_all(&[0])?; - for c in c { - writer.write_all(c.to_repr().as_ref())?; - } - } - Interpolation::Lagrange => writer.write_all(&[1])?, + let (t, n, i) = { + let mut read_u16 = || -> io::Result { + let mut value = [0; 2]; + reader.read_exact(&mut value)?; + Ok(u16::from_le_bytes(value)) }; - let mut share_bytes = self.secret_share.to_repr(); - writer.write_all(share_bytes.as_ref())?; - share_bytes.as_mut().zeroize(); - for l in 1 ..= self.params.n { - writer - .write_all(self.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref())?; - } - Ok(()) - } + ( + read_u16()?, + read_u16()?, + Participant::new(read_u16()?).ok_or(io::Error::other("invalid participant index"))?, + ) + }; - /// Serialize these keys to a `Vec`. - pub fn serialize(&self) -> Zeroizing> { - let mut serialized = Zeroizing::new(vec![]); - self.write::>(serialized.as_mut()).unwrap(); - serialized - } - - /// Read keys from a type satisfying std::io::Read. - pub fn read(reader: &mut R) -> io::Result> { - { - let different = || io::Error::other("deserializing ThresholdCore for another curve"); - - let mut id_len = [0; 4]; - reader.read_exact(&mut id_len)?; - if u32::try_from(C::ID.len()).unwrap().to_le_bytes() != id_len { - Err(different())?; + let mut interpolation = [0]; + reader.read_exact(&mut interpolation)?; + let interpolation = match interpolation[0] { + 0 => Interpolation::Constant({ + let mut res = Vec::with_capacity(usize::from(n)); + for _ in 0 .. n { + res.push(C::read_F(reader)?); } + res + }), + 1 => Interpolation::Lagrange, + _ => Err(io::Error::other("invalid interpolation method"))?, + }; - let mut id = vec![0; C::ID.len()]; - reader.read_exact(&mut id)?; - if id != C::ID { - Err(different())?; - } - } + let secret_share = Zeroizing::new(C::read_F(reader)?); - let (t, n, i) = { - let mut read_u16 = || -> io::Result { - let mut value = [0; 2]; - reader.read_exact(&mut value)?; - Ok(u16::from_le_bytes(value)) - }; - ( - read_u16()?, - read_u16()?, - Participant::new(read_u16()?).ok_or(io::Error::other("invalid participant index"))?, - ) - }; - - let mut interpolation = [0]; - reader.read_exact(&mut interpolation)?; - let interpolation = match interpolation[0] { - 0 => Interpolation::Constant({ - let mut res = Vec::with_capacity(usize::from(n)); - for _ in 0 .. n { - res.push(C::read_F(reader)?); - } - res - }), - 1 => Interpolation::Lagrange, - _ => Err(io::Error::other("invalid interpolation method"))?, - }; - - let secret_share = Zeroizing::new(C::read_F(reader)?); - - let mut verification_shares = HashMap::new(); - for l in (1 ..= n).map(Participant) { - verification_shares.insert(l, ::read_G(reader)?); - } - - Ok(ThresholdCore::new( - ThresholdParams::new(t, n, i).map_err(|_| io::Error::other("invalid parameters"))?, - interpolation, - secret_share, - verification_shares, - )) - } - } - - /// Threshold keys usable for signing. - #[derive(Clone, Debug, Zeroize)] - pub struct ThresholdKeys { - // Core keys. - // If this is the last reference, the underlying keys will be dropped. When that happens, the - // private key present within it will be zeroed out (as it's within Zeroizing). - #[zeroize(skip)] - pub(crate) core: Arc>, - - // Scalar applied to these keys. - pub(crate) scalar: C::F, - // Offset applied to these keys. - pub(crate) offset: C::F, - } - - /// View of keys, interpolated and with the expected linear combination taken for usage. - #[derive(Clone)] - pub struct ThresholdView { - interpolation: Interpolation, - scalar: C::F, - offset: C::F, - group_key: C::G, - included: Vec, - secret_share: Zeroizing, - original_verification_shares: HashMap, - verification_shares: HashMap, - } - - impl fmt::Debug for ThresholdView { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt - .debug_struct("ThresholdView") - .field("interpolation", &self.interpolation) - .field("scalar", &self.scalar) - .field("offset", &self.offset) - .field("group_key", &self.group_key) - .field("included", &self.included) - .field("original_verification_shares", &self.original_verification_shares) - .field("verification_shares", &self.verification_shares) - .finish_non_exhaustive() - } - } - - impl Zeroize for ThresholdView { - fn zeroize(&mut self) { - self.scalar.zeroize(); - self.offset.zeroize(); - self.group_key.zeroize(); - self.included.zeroize(); - self.secret_share.zeroize(); - for share in self.original_verification_shares.values_mut() { - share.zeroize(); - } - for share in self.verification_shares.values_mut() { - share.zeroize(); - } - } - } - - impl ThresholdKeys { - /// Create a new set of ThresholdKeys from a ThresholdCore. - pub fn new(core: ThresholdCore) -> ThresholdKeys { - ThresholdKeys { core: Arc::new(core), scalar: C::F::ONE, offset: C::F::ZERO } + let mut verification_shares = HashMap::new(); + for l in (1 ..= n).map(Participant) { + verification_shares.insert(l, ::read_G(reader)?); } - /// Scale the keys by a given scalar to allow for various account and privacy schemes. - /// - /// This scalar is ephemeral and will not be included when these keys are serialized. The - /// scalar is applied on top of any already-existing scalar/offset. - /// - /// Returns `None` if the scalar is equal to `0`. - #[must_use] - pub fn scale(mut self, scalar: C::F) -> Option> { - if bool::from(scalar.is_zero()) { - None?; - } - self.scalar *= scalar; - self.offset *= scalar; - Some(self) - } - - /// Offset the keys by a given scalar to allow for various account and privacy schemes. - /// - /// This offset is ephemeral and will not be included when these keys are serialized. The - /// offset is applied on top of any already-existing scalar/offset. - #[must_use] - pub fn offset(mut self, offset: C::F) -> ThresholdKeys { - self.offset += offset; - self - } - - /// Return the current scalar in-use for these keys. - pub fn current_scalar(&self) -> C::F { - self.scalar - } - - /// Return the current offset in-use for these keys. - pub fn current_offset(&self) -> C::F { - self.offset - } - - /// Return the parameters for these keys. - pub fn params(&self) -> ThresholdParams { - self.core.params - } - - /// Return the secret share for these keys. - pub fn secret_share(&self) -> &Zeroizing { - &self.core.secret_share - } - - /// Return the group key, with the expected linear combination taken. - pub fn group_key(&self) -> C::G { - (self.core.group_key * self.scalar) + (C::generator() * self.offset) - } - - /// Return all participants' verification shares without any offsetting. - pub(crate) fn verification_shares(&self) -> HashMap { - self.core.verification_shares() - } - - /// Serialize these keys to a `Vec`. - pub fn serialize(&self) -> Zeroizing> { - self.core.serialize() - } - - /// Obtain a view of these keys, interpolated for the specified signing set, with the specified - /// linear combination taken. - pub fn view(&self, mut included: Vec) -> Result, DkgError<()>> { - if (included.len() < self.params().t.into()) || - (usize::from(self.params().n()) < included.len()) - { - Err(DkgError::InvalidSigningSet)?; - } - included.sort(); - - // The interpolation occurs multiplicatively, letting us scale by the scalar now - let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref()); - let mut secret_share = Zeroizing::new( - self.core.interpolation.interpolation_factor(self.params().i(), &included) * - secret_share_scaled.deref(), - ); - - let mut verification_shares = self.verification_shares(); - for (i, share) in &mut verification_shares { - *share *= self.scalar * self.core.interpolation.interpolation_factor(*i, &included); - } - - /* - The offset is included by adding it to the participant with the lowest ID. - - This is done after interpolating to ensure, regardless of the method of interpolation, that - the method of interpolation does not scale the offset. For Lagrange interpolation, we could - add the offset to every key share before interpolating, yet for Constant interpolation, we - _have_ to add it as we do here (which also works even when we intend to perform Lagrange - interpolation). - */ - if included[0] == self.params().i() { - *secret_share += self.offset; - } - *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * self.offset; - - Ok(ThresholdView { - interpolation: self.core.interpolation.clone(), - scalar: self.scalar, - offset: self.offset, - group_key: self.group_key(), - secret_share, - original_verification_shares: self.verification_shares(), - verification_shares, - included, - }) - } - } - - impl From> for ThresholdKeys { - fn from(keys: ThresholdCore) -> ThresholdKeys { - ThresholdKeys::new(keys) - } - } - - impl ThresholdView { - /// Return the scalar applied to this view. - pub fn scalar(&self) -> C::F { - self.scalar - } - - /// Return the offset applied to this view. - pub fn offset(&self) -> C::F { - self.offset - } - - /// Return the group key. - pub fn group_key(&self) -> C::G { - self.group_key - } - - /// Return the included signers. - pub fn included(&self) -> &[Participant] { - &self.included - } - - /// Return the interpolation factor for a signer. - pub fn interpolation_factor(&self, participant: Participant) -> Option { - if !self.included.contains(&participant) { - None? - } - Some(self.interpolation.interpolation_factor(participant, &self.included)) - } - - /// Return the interpolated secret share, with the expected linear combination taken. - pub fn secret_share(&self) -> &Zeroizing { - &self.secret_share - } - - /// Return the original verification share for the specified participant. - pub fn original_verification_share(&self, l: Participant) -> C::G { - self.original_verification_shares[&l] - } - - /// Return the interpolated verification share, with the expected linear combination taken, - /// for the specified participant. - pub fn verification_share(&self, l: Participant) -> C::G { - self.verification_shares[&l] - } + ThresholdKeys::new( + ThresholdParams::new(t, n, i).map_err(io::Error::other)?, + interpolation, + secret_share, + verification_shares, + ) + .map_err(io::Error::other) + } +} + +impl ThresholdView { + /// Return the scalar applied to this view. + pub fn scalar(&self) -> C::F { + self.scalar + } + + /// Return the offset applied to this view. + pub fn offset(&self) -> C::F { + self.offset + } + + /// Return the group key. + pub fn group_key(&self) -> C::G { + self.group_key + } + + /// Return the included signers. + pub fn included(&self) -> &[Participant] { + &self.included + } + + /// Return the interpolation factor for a signer. + pub fn interpolation_factor(&self, participant: Participant) -> Option { + if !self.included.contains(&participant) { + None? + } + Some(self.interpolation.interpolation_factor(participant, &self.included)) + } + + /// Return the interpolated secret share, with the expected linear combination taken. + pub fn secret_share(&self) -> &Zeroizing { + &self.secret_share + } + + /// Return the original (untweaked) verification share for the specified participant. + /// + /// This will panic if the participant index is invalid for these keys. + pub fn original_verification_share(&self, l: Participant) -> C::G { + self.original_verification_shares[&l] + } + + /// Return the interpolated verification share, with the expected linear combination taken, + /// for the specified participant. + /// + /// This will panic if the participant was not included in the signing set. + pub fn verification_share(&self, l: Participant) -> C::G { + self.verification_shares[&l] } } -#[cfg(feature = "std")] -pub use lib::*; diff --git a/crypto/dkg/src/musig.rs b/crypto/dkg/src/musig.rs deleted file mode 100644 index 82a755db..00000000 --- a/crypto/dkg/src/musig.rs +++ /dev/null @@ -1,129 +0,0 @@ -#[cfg(feature = "std")] -use core::ops::Deref; -use std_shims::{vec, vec::Vec, collections::HashSet}; -#[cfg(feature = "std")] -use std_shims::collections::HashMap; - -#[cfg(feature = "std")] -use zeroize::Zeroizing; - -use ciphersuite::{ - group::{Group, GroupEncoding}, - Ciphersuite, -}; - -use crate::DkgError; -#[cfg(feature = "std")] -use crate::{Participant, ThresholdParams, Interpolation, ThresholdCore}; - -fn check_keys(keys: &[C::G]) -> Result> { - if keys.is_empty() { - Err(DkgError::InvalidSigningSet)?; - } - // Too many signers - let keys_len = u16::try_from(keys.len()).map_err(|_| DkgError::InvalidSigningSet)?; - - // Duplicated public keys - if keys.iter().map(|key| key.to_bytes().as_ref().to_vec()).collect::>().len() != - keys.len() - { - Err(DkgError::InvalidSigningSet)?; - } - - Ok(keys_len) -} - -// This function panics if called with keys whose length exceed 2**16. -// This is fine since it's internal and all calls occur after calling check_keys, which does check -// the keys' length. -fn binding_factor_transcript( - context: &[u8], - keys: &[C::G], -) -> Result, DkgError<()>> { - let mut transcript = vec![]; - transcript.push(u8::try_from(context.len()).map_err(|_| DkgError::InvalidSigningSet)?); - transcript.extend(context); - transcript.extend(u16::try_from(keys.len()).unwrap().to_le_bytes()); - for key in keys { - transcript.extend(key.to_bytes().as_ref()); - } - Ok(transcript) -} - -fn binding_factor(mut transcript: Vec, i: u16) -> C::F { - transcript.extend(i.to_le_bytes()); - C::hash_to_F(b"musig", &transcript) -} - -/// The group key resulting from using this library's MuSig key gen. -/// -/// This function will return an error if the context is longer than 255 bytes. -/// -/// Creating an aggregate key with a list containing duplicated public keys will return an error. -pub fn musig_key(context: &[u8], keys: &[C::G]) -> Result> { - let keys_len = check_keys::(keys)?; - let transcript = binding_factor_transcript::(context, keys)?; - let mut res = C::G::identity(); - for i in 1 ..= keys_len { - // TODO: Calculate this with a multiexp - res += keys[usize::from(i - 1)] * binding_factor::(transcript.clone(), i); - } - Ok(res) -} - -/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key. -/// -/// Creating an aggregate key with a list containing duplicated public keys returns an error. -#[cfg(feature = "std")] -pub fn musig( - context: &[u8], - private_key: &Zeroizing, - keys: &[C::G], -) -> Result, DkgError<()>> { - let keys_len = check_keys::(keys)?; - - let our_pub_key = C::generator() * private_key.deref(); - let Some(pos) = keys.iter().position(|key| *key == our_pub_key) else { - // Not present in signing set - Err(DkgError::InvalidSigningSet)? - }; - let params = ThresholdParams::new( - keys_len, - keys_len, - // These errors shouldn't be possible, as pos is bounded to len - 1 - // Since len is prior guaranteed to be within u16::MAX, pos + 1 must also be - Participant::new((pos + 1).try_into().map_err(|_| DkgError::InvalidSigningSet)?) - .ok_or(DkgError::InvalidSigningSet)?, - )?; - - // Calculate the binding factor per-key - let transcript = binding_factor_transcript::(context, keys)?; - let mut binding = Vec::with_capacity(keys.len()); - for i in 1 ..= keys_len { - binding.push(binding_factor::(transcript.clone(), i)); - } - - // Our secret share is our private key - let secret_share = private_key.clone(); - - // Calculate verification shares - let mut verification_shares = HashMap::new(); - let mut group_key = C::G::identity(); - for l in 1 ..= keys_len { - let key = keys[usize::from(l) - 1]; - // TODO: Use a multiexp for this - group_key += key * binding[usize::from(l - 1)]; - - // These errors also shouldn't be possible, for the same reasons as documented above - verification_shares.insert(Participant::new(l).ok_or(DkgError::InvalidSigningSet)?, key); - } - debug_assert_eq!(C::generator() * secret_share.deref(), verification_shares[¶ms.i()]); - debug_assert_eq!(musig_key::(context, keys).unwrap(), group_key); - - Ok(ThresholdCore::new( - params, - Interpolation::Constant(binding), - secret_share, - verification_shares, - )) -} diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs deleted file mode 100644 index 0078020a..00000000 --- a/crypto/dkg/src/tests/mod.rs +++ /dev/null @@ -1,102 +0,0 @@ -use core::ops::Deref; -use std::collections::HashMap; - -use zeroize::Zeroizing; -use rand_core::{RngCore, CryptoRng}; - -use ciphersuite::{group::ff::Field, Ciphersuite}; - -use crate::{Participant, ThresholdCore, ThresholdKeys, musig::musig as musig_fn}; - -mod musig; -pub use musig::test_musig; - -/// FROST key generation testing utility. -pub mod pedpop; -use pedpop::pedpop_gen; - -// Promotion test. -mod promote; -use promote::test_generator_promotion; - -/// Constant amount of participants to use when testing. -pub const PARTICIPANTS: u16 = 5; -/// Constant threshold of participants to use when testing. -pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1; - -/// Clone a map without a specific value. -pub fn clone_without( - map: &HashMap, - without: &K, -) -> HashMap { - let mut res = map.clone(); - res.remove(without).unwrap(); - res -} - -/// Recover the secret from a collection of keys. -/// -/// This will panic if no keys, an insufficient amount of keys, or the wrong keys are provided. -pub fn recover_key(keys: &HashMap>) -> C::F { - let first = keys.values().next().expect("no keys provided"); - assert!(keys.len() >= first.params().t().into(), "not enough keys provided"); - let included = keys.keys().copied().collect::>(); - - let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| { - accum + - (first.core.interpolation.interpolation_factor(*i, &included) * keys.secret_share().deref()) - }); - assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys"); - group_private -} - -/// Generate threshold keys for tests. -pub fn key_gen( - rng: &mut R, -) -> HashMap> { - let res = pedpop_gen(rng) - .drain() - .map(|(i, core)| { - assert_eq!( - &ThresholdCore::::read::<&[u8]>(&mut core.serialize().as_ref()).unwrap(), - &core - ); - (i, ThresholdKeys::new(core)) - }) - .collect(); - assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key()); - res -} - -/// Generate MuSig keys for tests. -pub fn musig_key_gen( - rng: &mut R, -) -> HashMap> { - let mut keys = vec![]; - let mut pub_keys = vec![]; - for _ in 0 .. PARTICIPANTS { - let key = Zeroizing::new(C::F::random(&mut *rng)); - pub_keys.push(C::generator() * *key); - keys.push(key); - } - - let mut res = HashMap::new(); - for key in keys { - let these_keys = musig_fn::(b"Test MuSig Key Gen", &key, &pub_keys).unwrap(); - res.insert(these_keys.params().i(), ThresholdKeys::new(these_keys)); - } - - assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key()); - res -} - -/// Run the test suite on a ciphersuite. -pub fn test_ciphersuite(rng: &mut R) { - key_gen::<_, C>(rng); - test_generator_promotion::<_, C>(rng); -} - -#[test] -fn test_with_ristretto() { - test_ciphersuite::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng); -} diff --git a/crypto/dkg/src/tests/musig.rs b/crypto/dkg/src/tests/musig.rs deleted file mode 100644 index 086b26be..00000000 --- a/crypto/dkg/src/tests/musig.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::collections::HashMap; - -use zeroize::Zeroizing; -use rand_core::{RngCore, CryptoRng}; - -use ciphersuite::{group::ff::Field, Ciphersuite}; - -use crate::{ - ThresholdKeys, - musig::{musig_key, musig}, - tests::{PARTICIPANTS, recover_key}, -}; - -/// Tests MuSig key generation. -pub fn test_musig(rng: &mut R) { - let mut keys = vec![]; - let mut pub_keys = vec![]; - for _ in 0 .. PARTICIPANTS { - let key = Zeroizing::new(C::F::random(&mut *rng)); - pub_keys.push(C::generator() * *key); - keys.push(key); - } - - const CONTEXT: &[u8] = b"MuSig Test"; - - // Empty signing set - musig::(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).unwrap_err(); - // Signing set we're not part of - musig::(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).unwrap_err(); - - // Test with n keys - { - let mut created_keys = HashMap::new(); - let mut verification_shares = HashMap::new(); - let group_key = musig_key::(CONTEXT, &pub_keys).unwrap(); - for (i, key) in keys.iter().enumerate() { - let these_keys = musig::(CONTEXT, key, &pub_keys).unwrap(); - assert_eq!(these_keys.params().t(), PARTICIPANTS); - assert_eq!(these_keys.params().n(), PARTICIPANTS); - assert_eq!(usize::from(these_keys.params().i().0), i + 1); - - verification_shares - .insert(these_keys.params().i(), C::generator() * **these_keys.secret_share()); - - assert_eq!(these_keys.group_key(), group_key); - - created_keys.insert(these_keys.params().i(), ThresholdKeys::new(these_keys)); - } - - for keys in created_keys.values() { - assert_eq!(keys.verification_shares(), verification_shares); - } - - assert_eq!(C::generator() * recover_key(&created_keys), group_key); - } -} - -#[test] -fn musig_literal() { - test_musig::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng) -} diff --git a/crypto/dkg/src/tests/pedpop.rs b/crypto/dkg/src/tests/pedpop.rs deleted file mode 100644 index 42d7af67..00000000 --- a/crypto/dkg/src/tests/pedpop.rs +++ /dev/null @@ -1,331 +0,0 @@ -use std::collections::HashMap; - -use rand_core::{RngCore, CryptoRng}; - -use ciphersuite::Ciphersuite; - -use crate::{ - Participant, ThresholdParams, ThresholdCore, - pedpop::{Commitments, KeyGenMachine, SecretShare, KeyMachine}, - encryption::{EncryptionKeyMessage, EncryptedMessage}, - tests::{THRESHOLD, PARTICIPANTS, clone_without}, -}; - -type PedPoPEncryptedMessage = EncryptedMessage::F>>; -type PedPoPSecretShares = HashMap>; - -const CONTEXT: [u8; 32] = *b"DKG Test Key Generation "; - -// Commit, then return commitment messages, enc keys, and shares -#[allow(clippy::type_complexity)] -fn commit_enc_keys_and_shares( - rng: &mut R, -) -> ( - HashMap>, - HashMap>>, - HashMap, - HashMap>, -) { - let mut machines = HashMap::new(); - let mut commitments = HashMap::new(); - let mut enc_keys = HashMap::new(); - for i in (1 ..= PARTICIPANTS).map(Participant) { - let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); - let machine = KeyGenMachine::::new(params, CONTEXT); - let (machine, these_commitments) = machine.generate_coefficients(rng); - machines.insert(i, machine); - - commitments.insert( - i, - EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params) - .unwrap(), - ); - enc_keys.insert(i, commitments[&i].enc_key()); - } - - let mut secret_shares = HashMap::new(); - let machines = machines - .drain() - .map(|(l, machine)| { - let (machine, mut shares) = - machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap(); - let shares = shares - .drain() - .map(|(l, share)| { - ( - l, - EncryptedMessage::read::<&[u8]>( - &mut share.serialize().as_ref(), - // Only t/n actually matters, so hardcode i to 1 here - ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: Participant(1) }, - ) - .unwrap(), - ) - }) - .collect::>(); - secret_shares.insert(l, shares); - (l, machine) - }) - .collect::>(); - - (machines, commitments, enc_keys, secret_shares) -} - -fn generate_secret_shares( - shares: &HashMap>, - recipient: Participant, -) -> PedPoPSecretShares { - let mut our_secret_shares = HashMap::new(); - for (i, shares) in shares { - if recipient == *i { - continue; - } - our_secret_shares.insert(*i, shares[&recipient].clone()); - } - our_secret_shares -} - -/// Fully perform the PedPoP key generation algorithm. -pub fn pedpop_gen( - rng: &mut R, -) -> HashMap> { - let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng); - - let mut verification_shares = None; - let mut group_key = None; - machines - .drain() - .map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete(); - - // Verify the verification_shares are agreed upon - if verification_shares.is_none() { - verification_shares = Some(these_keys.verification_shares()); - } - assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares()); - - // Verify the group keys are agreed upon - if group_key.is_none() { - group_key = Some(these_keys.group_key()); - } - assert_eq!(group_key.unwrap(), these_keys.group_key()); - - (i, these_keys) - }) - .collect::>() -} - -#[cfg(test)] -mod literal { - use rand_core::OsRng; - - use ciphersuite::Ristretto; - - use crate::{ - DkgError, - encryption::EncryptionKeyProof, - pedpop::{BlameMachine, AdditionalBlameMachine}, - }; - - use super::*; - - const ONE: Participant = Participant(1); - const TWO: Participant = Participant(2); - - fn test_blame( - commitment_msgs: &HashMap>>, - machines: Vec>, - msg: &PedPoPEncryptedMessage, - blame: &Option>, - ) { - for machine in machines { - let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone()); - assert_eq!(blamed, ONE); - // Verify additional blame also works - assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE); - - // Verify machines constructed with AdditionalBlameMachine::new work - assert_eq!( - AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame( - ONE, - TWO, - msg.clone(), - blame.clone() - ), - ONE, - ); - } - } - - // TODO: Write a macro which expands to the following - #[test] - fn invalid_encryption_pop_blame() { - let (mut machines, commitment_msgs, _, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - // Mutate the PoP of the encrypted message from 1 to 2 - secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop(); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == TWO { - assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: ONE, blame: None })); - // Explicitly declare we have a blame object, which happens to be None since invalid PoP - // is self-explainable - blame = Some(None); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); - } - - #[test] - fn invalid_ecdh_blame() { - let (mut machines, commitment_msgs, _, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - // Mutate the share to trigger a blame event - // Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass - // While here, 2 is malicious, this is so 1 creates the blame proof - // We then malleate 1's blame proof, so 1 ends up malicious - // Doesn't simply invalidate the PoP as that won't have a blame statement - // By mutating the encrypted data, we do ensure a blame statement is created - secret_shares - .get_mut(&TWO) - .unwrap() - .get_mut(&ONE) - .unwrap() - .invalidate_msg(&mut OsRng, CONTEXT, TWO); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == ONE { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - blame.as_mut().unwrap().as_mut().unwrap().invalidate_key(); - test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); - } - - // This should be largely equivalent to the prior test - #[test] - fn invalid_dleq_blame() { - let (mut machines, commitment_msgs, _, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - secret_shares - .get_mut(&TWO) - .unwrap() - .get_mut(&ONE) - .unwrap() - .invalidate_msg(&mut OsRng, CONTEXT, TWO); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == ONE { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq(); - test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap()); - } - - #[test] - fn invalid_share_serialization_blame() { - let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization( - &mut OsRng, - CONTEXT, - ONE, - enc_keys[&TWO], - ); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == TWO { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); - } - - #[test] - fn invalid_share_value_blame() { - let (mut machines, commitment_msgs, enc_keys, mut secret_shares) = - commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - - secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value( - &mut OsRng, - CONTEXT, - ONE, - enc_keys[&TWO], - ); - - let mut blame = None; - let machines = machines - .drain() - .filter_map(|(i, machine)| { - let our_secret_shares = generate_secret_shares(&secret_shares, i); - let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == TWO { - blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), - _ => panic!(), - }); - None - } else { - Some(machine.unwrap()) - } - }) - .collect::>(); - - test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap()); - } -} diff --git a/crypto/dkg/src/tests/promote.rs b/crypto/dkg/src/tests/promote.rs deleted file mode 100644 index 242f085b..00000000 --- a/crypto/dkg/src/tests/promote.rs +++ /dev/null @@ -1,66 +0,0 @@ -use core::{marker::PhantomData, ops::Deref}; -use std::collections::HashMap; - -use rand_core::{RngCore, CryptoRng}; - -use zeroize::Zeroize; - -use ciphersuite::{group::Group, Ciphersuite}; - -use crate::{ - promote::{GeneratorPromotion, GeneratorProof}, - tests::{clone_without, key_gen, recover_key}, -}; - -#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] -struct AltGenerator { - _curve: PhantomData, -} - -impl Ciphersuite for AltGenerator { - type F = C::F; - type G = C::G; - type H = C::H; - - const ID: &'static [u8] = b"Alternate Ciphersuite"; - - fn generator() -> Self::G { - C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") - } - - fn reduce_512(scalar: [u8; 64]) -> Self::F { - ::reduce_512(scalar) - } - - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { - ::hash_to_F(dst, data) - } -} - -// Test promotion of threshold keys to another generator -pub(crate) fn test_generator_promotion(rng: &mut R) { - let keys = key_gen::<_, C>(&mut *rng); - - let mut promotions = HashMap::new(); - let mut proofs = HashMap::new(); - for (i, keys) in &keys { - let (promotion, proof) = - GeneratorPromotion::<_, AltGenerator>::promote(&mut *rng, keys.clone()); - promotions.insert(*i, promotion); - proofs.insert(*i, GeneratorProof::::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap()); - } - - let new_group_key = AltGenerator::::generator() * recover_key(&keys); - for (i, promoting) in promotions.drain() { - let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); - assert_eq!(keys[&i].params(), promoted.params()); - assert_eq!(keys[&i].secret_share(), promoted.secret_share()); - assert_eq!(new_group_key, promoted.group_key()); - for (l, verification_share) in promoted.verification_shares() { - assert_eq!( - AltGenerator::::generator() * keys[&l].secret_share().deref(), - verification_share - ); - } - } -} diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 1d030621..0b2171d6 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -39,13 +39,13 @@ multiexp = { path = "../multiexp", version = "0.4", default-features = false, fe schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } -dkg = { path = "../dkg", version = "^0.5.1", default-features = false, features = ["std"] } +dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] } [dev-dependencies] hex = "0.4" serde_json = { version = "1", default-features = false, features = ["std"] } -dkg = { path = "../dkg", features = ["tests"] } +dkg = { path = "../dkg" } [features] ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] @@ -56,4 +56,4 @@ p256 = ["ciphersuite/p256"] ed448 = ["minimal-ed448", "ciphersuite/ed448"] -tests = ["hex", "rand_core/getrandom", "dkg/tests"] +tests = ["hex", "rand_core/getrandom"] diff --git a/tests/no-std/Cargo.toml b/tests/no-std/Cargo.toml index 36ba85f4..da21e0ff 100644 --- a/tests/no-std/Cargo.toml +++ b/tests/no-std/Cargo.toml @@ -30,6 +30,8 @@ dleq = { path = "../../crypto/dleq", default-features = false } schnorr-signatures = { path = "../../crypto/schnorr", default-features = false } dkg = { path = "../../crypto/dkg", default-features = false } +dkg-recovery = { path = "../../crypto/dkg/recovery", default-features = false } +dkg-musig = { path = "../../crypto/dkg/musig", default-features = false } # modular-frost = { path = "../../crypto/frost", default-features = false } # frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false } diff --git a/tests/no-std/src/lib.rs b/tests/no-std/src/lib.rs index 8339da2e..7b9c2cca 100644 --- a/tests/no-std/src/lib.rs +++ b/tests/no-std/src/lib.rs @@ -13,6 +13,8 @@ pub use dleq; pub use schnorr_signatures; pub use dkg; +pub use dkg_recovery; +pub use dkg_musig; /* pub use modular_frost; pub use frost_schnorrkel; From e87bbcda64ecea19c3e3e043e5378f416c85e3d6 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 01:54:52 -0400 Subject: [PATCH 073/116] Have modular-frost compile again --- Cargo.lock | 1 + crypto/frost/Cargo.toml | 6 ++- crypto/frost/src/lib.rs | 2 +- crypto/frost/src/tests/mod.rs | 68 +++++++++++++++++++++++++++---- crypto/frost/src/tests/nonces.rs | 4 +- crypto/frost/src/tests/vectors.rs | 10 ++--- 6 files changed, 71 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e4009f87..44d8697b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4877,6 +4877,7 @@ dependencies = [ "dalek-ff-group", "digest 0.10.7", "dkg", + "dkg-recovery", "flexible-transcript", "hex", "minimal-ed448", diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 0b2171d6..74b3318f 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -40,12 +40,14 @@ multiexp = { path = "../multiexp", version = "0.4", default-features = false, fe schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] } +dkg-recovery = { path = "../dkg/recovery", default-features = false, features = ["std"], optional = true } [dev-dependencies] hex = "0.4" serde_json = { version = "1", default-features = false, features = ["std"] } -dkg = { path = "../dkg" } +dkg = { path = "../dkg", default-features = false, features = ["std"] } +dkg-recovery = { path = "../dkg/recovery", default-features = false, features = ["std"] } [features] ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] @@ -56,4 +58,4 @@ p256 = ["ciphersuite/p256"] ed448 = ["minimal-ed448", "ciphersuite/ed448"] -tests = ["hex", "rand_core/getrandom"] +tests = ["hex", "rand_core/getrandom", "dkg-recovery"] diff --git a/crypto/frost/src/lib.rs b/crypto/frost/src/lib.rs index 47862c61..6baf8872 100644 --- a/crypto/frost/src/lib.rs +++ b/crypto/frost/src/lib.rs @@ -7,7 +7,7 @@ use std::collections::HashMap; use thiserror::Error; /// Distributed key generation protocol. -pub use dkg::{self, Participant, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView}; +pub use dkg::{self, Participant, ThresholdParams, ThresholdKeys, ThresholdView}; /// Curve trait and provided curves/HRAMs, forming various ciphersuites. pub mod curve; diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index 2bb9e3ea..1b2afa12 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -1,11 +1,18 @@ +use core::ops::Deref; use std::collections::HashMap; +use zeroize::{Zeroize, Zeroizing}; use rand_core::{RngCore, CryptoRng}; -pub use dkg::tests::{key_gen, musig_key_gen, recover_key}; +use ciphersuite::{ + group::ff::{Field, PrimeField}, + Ciphersuite, +}; +use dkg::Interpolation; +pub use dkg_recovery::recover_key; use crate::{ - Curve, Participant, ThresholdKeys, FrostError, + Curve, Participant, ThresholdParams, ThresholdKeys, FrostError, algorithm::{Algorithm, Hram, IetfSchnorr}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, }; @@ -26,6 +33,56 @@ pub const PARTICIPANTS: u16 = 5; /// Constant threshold of participants to use when signing. pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1; +/// Create a key, for testing purposes. +pub fn key_gen( + rng: &mut R, +) -> HashMap> { + let coefficients: [_; THRESHOLD as usize] = + core::array::from_fn(|_| Zeroizing::new(C::F::random(&mut *rng))); + + fn polynomial( + coefficients: &[Zeroizing], + l: Participant, + ) -> Zeroizing { + let l = F::from(u64::from(u16::from(l))); + // This should never be reached since Participant is explicitly non-zero + assert!(l != F::ZERO, "zero participant passed to polynomial"); + let mut share = Zeroizing::new(F::ZERO); + for (idx, coefficient) in coefficients.iter().rev().enumerate() { + *share += coefficient.deref(); + if idx != (coefficients.len() - 1) { + *share *= l; + } + } + share + } + + let group_key = C::generator() * *coefficients[0]; + let mut secret_shares = HashMap::with_capacity(PARTICIPANTS as usize); + let mut verification_shares = HashMap::with_capacity(PARTICIPANTS as usize); + for i in 1 ..= PARTICIPANTS { + let i = Participant::new(i).unwrap(); + let secret_share = polynomial(&coefficients, i); + secret_shares.insert(i, secret_share.clone()); + verification_shares.insert(i, C::generator() * *secret_share); + } + + let mut res = HashMap::with_capacity(PARTICIPANTS as usize); + for i in 1 ..= PARTICIPANTS { + let i = Participant::new(i).unwrap(); + let keys = ThresholdKeys::new( + ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(), + Interpolation::Lagrange, + secret_shares.remove(&i).unwrap(), + verification_shares.clone(), + ) + .unwrap(); + assert_eq!(keys.group_key(), group_key); + res.insert(i, keys); + } + res +} + /// Clone a map without a specific value. pub fn clone_without( map: &HashMap, @@ -238,12 +295,6 @@ pub fn test_schnorr>(rng: &mut R) { test_schnorr_with_keys::<_, _, H>(&mut *rng, &keys) } -/// Test a basic Schnorr signature, yet with MuSig. -pub fn test_musig_schnorr>(rng: &mut R) { - let keys = musig_key_gen(&mut *rng); - test_schnorr_with_keys::<_, _, H>(&mut *rng, &keys) -} - /// Test an offset Schnorr signature. pub fn test_offset_schnorr>(rng: &mut R) { const MSG: &[u8] = b"Hello, World!"; @@ -290,7 +341,6 @@ pub fn test_schnorr_blame>(rng: &mu /// Run a variety of tests against a ciphersuite. pub fn test_ciphersuite>(rng: &mut R) { test_schnorr::(rng); - test_musig_schnorr::(rng); test_offset_schnorr::(rng); test_schnorr_blame::(rng); diff --git a/crypto/frost/src/tests/nonces.rs b/crypto/frost/src/tests/nonces.rs index 7b1480e9..c37d618f 100644 --- a/crypto/frost/src/tests/nonces.rs +++ b/crypto/frost/src/tests/nonces.rs @@ -9,12 +9,10 @@ use transcript::{Transcript, RecommendedTranscript}; use ciphersuite::group::{ff::Field, Group, GroupEncoding}; -pub use dkg::tests::{key_gen, recover_key}; - use crate::{ Curve, Participant, ThresholdView, ThresholdKeys, FrostError, algorithm::Algorithm, - tests::{algorithm_machines, sign}, + tests::{key_gen, algorithm_machines, sign}, }; #[derive(Clone)] diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index dc0453a1..a5369a02 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -13,7 +13,7 @@ use ciphersuite::group::{ff::PrimeField, GroupEncoding}; use crate::{ curve::Curve, - Participant, ThresholdCore, ThresholdKeys, + Participant, ThresholdKeys, algorithm::{Hram, IetfSchnorr}, sign::{ Writable, Nonce, GeneratorCommitments, NonceCommitments, Commitments, Preprocess, @@ -115,7 +115,7 @@ fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap(vectors: &Vectors) -> HashMap::read::<&[u8]>(&mut serialized.as_ref()).unwrap(); + let these_keys = ThresholdKeys::::read::<&[u8]>(&mut serialized.as_ref()).unwrap(); assert_eq!(these_keys.params().t(), vectors.threshold); assert_eq!(usize::from(these_keys.params().n()), shares.len()); let participant = Participant::new(i).unwrap(); assert_eq!(these_keys.params().i(), participant); assert_eq!(these_keys.secret_share().deref(), &shares[usize::from(i - 1)]); assert_eq!(hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key); - keys.insert(participant, ThresholdKeys::new(these_keys)); + keys.insert(participant, these_keys); } keys @@ -157,7 +157,7 @@ pub fn test_with_vectors>( let secret = C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap(); assert_eq!(C::generator() * secret, group_key); - assert_eq!(recover_key(&keys), secret); + assert_eq!(*recover_key(&keys.values().cloned().collect::>()).unwrap(), secret); let mut machines = vec![]; for i in &vectors.included { From cfce2b26e23f5143f3d0088f85e80d0d6bbea01c Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 01:55:37 -0400 Subject: [PATCH 074/116] Update READMEs, targeting an 80-character line limit --- crypto/dkg/README.md | 11 ++++++----- crypto/dkg/musig/README.md | 6 +++--- crypto/dkg/pedpop/README.md | 4 ++-- crypto/dkg/promote/README.md | 5 +++-- crypto/dkg/recovery/README.md | 12 ++++++------ 5 files changed, 20 insertions(+), 18 deletions(-) diff --git a/crypto/dkg/README.md b/crypto/dkg/README.md index eaad6ed5..90a70097 100644 --- a/crypto/dkg/README.md +++ b/crypto/dkg/README.md @@ -1,11 +1,12 @@ # Distributed Key Generation -A crate implementing a type for keys, presumably the result of a distributed key generation -protocol, and utilities from there. +A crate implementing a type for keys, presumably the result of a distributed +key generation protocol, and utilities from there. -This crate used to host implementations of distributed key generation protocols as well (hence the -name). Those have been smashed into their own crates, such as -[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop) +This crate used to host implementations of distributed key generation protocols +as well (hence the name). Those have been smashed into their own crates, such +as [`dkg-musig`](https://docs.rs/dkg-musig) and +[`dkg-pedpop`](https://docs.rs/dkg-pedpop). Before being smashed, this crate was [audited by Cypher Stack in March 2023]( https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf diff --git a/crypto/dkg/musig/README.md b/crypto/dkg/musig/README.md index 9720e6f0..b36d03ae 100644 --- a/crypto/dkg/musig/README.md +++ b/crypto/dkg/musig/README.md @@ -1,9 +1,9 @@ # Distributed Key Generation - MuSig -This implements the MuSig key aggregation protocol for the [`dkg`](https://docs.rs/dkg) crate's -types. +This implements the MuSig key aggregation protocol for the +[`dkg`](https://docs.rs/dkg) crate's types. -This crate was originally part of the `dkg` crate, which was +This crate was originally part of (in some form) the `dkg` crate, which was [audited by Cypher Stack in March 2023]( https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf ), culminating in commit diff --git a/crypto/dkg/pedpop/README.md b/crypto/dkg/pedpop/README.md index 4ff801e0..4b72cfe0 100644 --- a/crypto/dkg/pedpop/README.md +++ b/crypto/dkg/pedpop/README.md @@ -1,7 +1,7 @@ # Distributed Key Generation - PedPoP -This implements the PedPoP distributed key generation protocol for the [`dkg`](https://docs.rs/dkg) -crate's types. +This implements the PedPoP distributed key generation protocol for the +[`dkg`](https://docs.rs/dkg) crate's types. This crate was originally part of the `dkg` crate, which was [audited by Cypher Stack in March 2023]( diff --git a/crypto/dkg/promote/README.md b/crypto/dkg/promote/README.md index a5f8a9e6..8a726aac 100644 --- a/crypto/dkg/promote/README.md +++ b/crypto/dkg/promote/README.md @@ -1,7 +1,8 @@ # Distributed Key Generation - Promote -This crate implements 'promotions' for keys from the [`dkg`](https://docs.rs/dkg) crate. A promotion -takes a set of keys and maps it to a different `Ciphersuite`. +This crate implements 'promotions' for keys from the +[`dkg`](https://docs.rs/dkg) crate. A promotion takes a set of keys and maps it +to a different `Ciphersuite`. This crate was originally part of the `dkg` crate, which was [audited by Cypher Stack in March 2023]( diff --git a/crypto/dkg/recovery/README.md b/crypto/dkg/recovery/README.md index eaad6ed5..f9e83ac5 100644 --- a/crypto/dkg/recovery/README.md +++ b/crypto/dkg/recovery/README.md @@ -1,11 +1,11 @@ -# Distributed Key Generation +# Distributed Key Generation - Recovery -A crate implementing a type for keys, presumably the result of a distributed key generation -protocol, and utilities from there. +A utility function to recover a key from its secret shares. -This crate used to host implementations of distributed key generation protocols as well (hence the -name). Those have been smashed into their own crates, such as -[`dkg-musig`](https://docs.rs/dkg-musig) and [`dkg-pedpop`](https://docs.rs/dkg-pedpop) +Keys likely SHOULD NOT ever be recovered, making this primarily intended for +testing purposes. Instead, the shares of the key should be used to produce +shares for the desired action, allowing using the key while never +reconstructing it. Before being smashed, this crate was [audited by Cypher Stack in March 2023]( https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf From b6edc94bcd1cc1bc3e41235a23be3860bb8ac581 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 02:13:12 -0400 Subject: [PATCH 075/116] Add dealer key generation crate --- .github/workflows/crypto-tests.yml | 1 + Cargo.lock | 13 ++++++ Cargo.toml | 1 + crypto/dkg/dealer/Cargo.toml | 36 ++++++++++++++++ crypto/dkg/dealer/LICENSE | 21 +++++++++ crypto/dkg/dealer/README.md | 13 ++++++ crypto/dkg/dealer/src/lib.rs | 68 ++++++++++++++++++++++++++++++ crypto/dkg/recovery/Cargo.toml | 2 +- crypto/frost/Cargo.toml | 4 +- crypto/frost/README.md | 4 ++ crypto/frost/src/tests/mod.rs | 58 +++---------------------- tests/no-std/Cargo.toml | 1 + tests/no-std/src/lib.rs | 1 + 13 files changed, 170 insertions(+), 53 deletions(-) create mode 100644 crypto/dkg/dealer/Cargo.toml create mode 100644 crypto/dkg/dealer/LICENSE create mode 100644 crypto/dkg/dealer/README.md create mode 100644 crypto/dkg/dealer/src/lib.rs diff --git a/.github/workflows/crypto-tests.yml b/.github/workflows/crypto-tests.yml index 2e853e71..cf3f00b4 100644 --- a/.github/workflows/crypto-tests.yml +++ b/.github/workflows/crypto-tests.yml @@ -37,6 +37,7 @@ jobs: -p dleq \ -p dkg \ -p dkg-recovery \ + -p dkg-dealer \ -p dkg-promote \ -p dkg-musig \ -p dkg-pedpop \ diff --git a/Cargo.lock b/Cargo.lock index 44d8697b..c121c8a3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2220,6 +2220,17 @@ dependencies = [ "zeroize", ] +[[package]] +name = "dkg-dealer" +version = "0.6.0" +dependencies = [ + "ciphersuite", + "dkg", + "rand_core", + "std-shims", + "zeroize", +] + [[package]] name = "dkg-musig" version = "0.6.0" @@ -4877,6 +4888,7 @@ dependencies = [ "dalek-ff-group", "digest 0.10.7", "dkg", + "dkg-dealer", "dkg-recovery", "flexible-transcript", "hex", @@ -8373,6 +8385,7 @@ dependencies = [ "ciphersuite", "dalek-ff-group", "dkg", + "dkg-dealer", "dkg-musig", "dkg-recovery", "dleq", diff --git a/Cargo.toml b/Cargo.toml index d1b1862e..db9f078f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ members = [ "crypto/dleq", "crypto/dkg", "crypto/dkg/recovery", + "crypto/dkg/dealer", "crypto/dkg/promote", "crypto/dkg/musig", "crypto/dkg/pedpop", diff --git a/crypto/dkg/dealer/Cargo.toml b/crypto/dkg/dealer/Cargo.toml new file mode 100644 index 00000000..b1f35e89 --- /dev/null +++ b/crypto/dkg/dealer/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "dkg-dealer" +version = "0.6.0" +description = "Produce dkg::ThresholdKeys with a dealer key generation" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/dealer" +authors = ["Luke Parker "] +keywords = ["dkg", "multisig", "threshold", "ff", "group"] +edition = "2021" +rust-version = "1.80" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +zeroize = { version = "^1.5", default-features = false } +rand_core = { version = "0.6", default-features = false } + +std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false } + +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } +dkg = { path = "../", default-features = false } + +[features] +std = [ + "zeroize/std", + "rand_core/std", + "std-shims/std", + "ciphersuite/std", + "dkg/std", +] +default = ["std"] diff --git a/crypto/dkg/dealer/LICENSE b/crypto/dkg/dealer/LICENSE new file mode 100644 index 00000000..6f7adff3 --- /dev/null +++ b/crypto/dkg/dealer/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2025 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/dkg/dealer/README.md b/crypto/dkg/dealer/README.md new file mode 100644 index 00000000..be597461 --- /dev/null +++ b/crypto/dkg/dealer/README.md @@ -0,0 +1,13 @@ +# Distributed Key Generation - Dealer + +This crate implements a dealer key generation protocol for the +[`dkg`](https://docs.rs/dkg) crate's types. This provides a single point of +failure when the key is being generated and is NOT recommended for use outside +of tests. + +This crate was originally part of (in some form) the `dkg` crate, which was +[audited by Cypher Stack in March 2023]( + https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf +), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06]( + https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06 +). Any subsequent changes have not undergone auditing. diff --git a/crypto/dkg/dealer/src/lib.rs b/crypto/dkg/dealer/src/lib.rs new file mode 100644 index 00000000..f00d5d85 --- /dev/null +++ b/crypto/dkg/dealer/src/lib.rs @@ -0,0 +1,68 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] +#![no_std] + +use core::ops::Deref; +use std_shims::{vec::Vec, collections::HashMap}; + +use zeroize::{Zeroize, Zeroizing}; +use rand_core::{RngCore, CryptoRng}; + +use ciphersuite::{ + group::ff::{Field, PrimeField}, + Ciphersuite, +}; +pub use dkg::*; + +/// Create a key via a dealer key generation protocol. +pub fn key_gen( + rng: &mut R, + threshold: u16, + participants: u16, +) -> Result>, DkgError> { + let mut coefficients = Vec::with_capacity(usize::from(participants)); + // `.max(1)` so we always generate the 0th coefficient which we'll share + for _ in 0 .. threshold.max(1) { + coefficients.push(Zeroizing::new(C::F::random(&mut *rng))); + } + + fn polynomial( + coefficients: &[Zeroizing], + l: Participant, + ) -> Zeroizing { + let l = F::from(u64::from(u16::from(l))); + // This should never be reached since Participant is explicitly non-zero + assert!(l != F::ZERO, "zero participant passed to polynomial"); + let mut share = Zeroizing::new(F::ZERO); + for (idx, coefficient) in coefficients.iter().rev().enumerate() { + *share += coefficient.deref(); + if idx != (coefficients.len() - 1) { + *share *= l; + } + } + share + } + + let group_key = C::generator() * coefficients[0].deref(); + let mut secret_shares = HashMap::with_capacity(participants as usize); + let mut verification_shares = HashMap::with_capacity(participants as usize); + for i in 1 ..= participants { + let i = Participant::new(i).expect("non-zero u16 wasn't a valid Participant index"); + let secret_share = polynomial(&coefficients, i); + secret_shares.insert(i, secret_share.clone()); + verification_shares.insert(i, C::generator() * *secret_share); + } + + let mut res = HashMap::with_capacity(participants as usize); + for (i, secret_share) in secret_shares { + let keys = ThresholdKeys::new( + ThresholdParams::new(threshold, participants, i)?, + Interpolation::Lagrange, + secret_share, + verification_shares.clone(), + )?; + debug_assert_eq!(keys.group_key(), group_key); + res.insert(i, keys); + } + Ok(res) +} diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml index e2e7485c..96df11fc 100644 --- a/crypto/dkg/recovery/Cargo.toml +++ b/crypto/dkg/recovery/Cargo.toml @@ -21,7 +21,7 @@ zeroize = { version = "^1.5", default-features = false } thiserror = { version = "2", default-features = false } -ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } +ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } dkg = { path = "../", default-features = false } [features] diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 74b3318f..9e2f6ddd 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -41,6 +41,7 @@ schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5 dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] } dkg-recovery = { path = "../dkg/recovery", default-features = false, features = ["std"], optional = true } +dkg-dealer = { path = "../dkg/dealer", default-features = false, features = ["std"], optional = true } [dev-dependencies] hex = "0.4" @@ -48,6 +49,7 @@ serde_json = { version = "1", default-features = false, features = ["std"] } dkg = { path = "../dkg", default-features = false, features = ["std"] } dkg-recovery = { path = "../dkg/recovery", default-features = false, features = ["std"] } +dkg-dealer = { path = "../dkg/dealer", default-features = false, features = ["std"] } [features] ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] @@ -58,4 +60,4 @@ p256 = ["ciphersuite/p256"] ed448 = ["minimal-ed448", "ciphersuite/ed448"] -tests = ["hex", "rand_core/getrandom", "dkg-recovery"] +tests = ["hex", "rand_core/getrandom", "dkg-dealer" ,"dkg-recovery"] diff --git a/crypto/frost/README.md b/crypto/frost/README.md index e6ed2b0a..bf290acf 100644 --- a/crypto/frost/README.md +++ b/crypto/frost/README.md @@ -12,6 +12,10 @@ This library offers ciphersuites compatible with the [IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version 15 is supported. +A variety of testing utilities are provided under the `tests` feature. These +are provided with no guarantees and may have completely arbitrary behavior, +including panicking for completely well-reasoned input. + This library was [audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf), culminating in commit diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index 1b2afa12..79ac9ded 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -1,18 +1,12 @@ -use core::ops::Deref; use std::collections::HashMap; -use zeroize::{Zeroize, Zeroizing}; use rand_core::{RngCore, CryptoRng}; -use ciphersuite::{ - group::ff::{Field, PrimeField}, - Ciphersuite, -}; -use dkg::Interpolation; +use ciphersuite::Ciphersuite; pub use dkg_recovery::recover_key; use crate::{ - Curve, Participant, ThresholdParams, ThresholdKeys, FrostError, + Curve, Participant, ThresholdKeys, FrostError, algorithm::{Algorithm, Hram, IetfSchnorr}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, }; @@ -37,49 +31,11 @@ pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1; pub fn key_gen( rng: &mut R, ) -> HashMap> { - let coefficients: [_; THRESHOLD as usize] = - core::array::from_fn(|_| Zeroizing::new(C::F::random(&mut *rng))); - - fn polynomial( - coefficients: &[Zeroizing], - l: Participant, - ) -> Zeroizing { - let l = F::from(u64::from(u16::from(l))); - // This should never be reached since Participant is explicitly non-zero - assert!(l != F::ZERO, "zero participant passed to polynomial"); - let mut share = Zeroizing::new(F::ZERO); - for (idx, coefficient) in coefficients.iter().rev().enumerate() { - *share += coefficient.deref(); - if idx != (coefficients.len() - 1) { - *share *= l; - } - } - share - } - - let group_key = C::generator() * *coefficients[0]; - let mut secret_shares = HashMap::with_capacity(PARTICIPANTS as usize); - let mut verification_shares = HashMap::with_capacity(PARTICIPANTS as usize); - for i in 1 ..= PARTICIPANTS { - let i = Participant::new(i).unwrap(); - let secret_share = polynomial(&coefficients, i); - secret_shares.insert(i, secret_share.clone()); - verification_shares.insert(i, C::generator() * *secret_share); - } - - let mut res = HashMap::with_capacity(PARTICIPANTS as usize); - for i in 1 ..= PARTICIPANTS { - let i = Participant::new(i).unwrap(); - let keys = ThresholdKeys::new( - ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(), - Interpolation::Lagrange, - secret_shares.remove(&i).unwrap(), - verification_shares.clone(), - ) - .unwrap(); - assert_eq!(keys.group_key(), group_key); - res.insert(i, keys); - } + let res = dkg_dealer::key_gen::(rng, THRESHOLD, PARTICIPANTS).unwrap(); + assert_eq!( + C::generator() * *recover_key(&res.values().cloned().collect::>()).unwrap(), + res.values().next().unwrap().group_key() + ); res } diff --git a/tests/no-std/Cargo.toml b/tests/no-std/Cargo.toml index da21e0ff..fa0649f5 100644 --- a/tests/no-std/Cargo.toml +++ b/tests/no-std/Cargo.toml @@ -31,6 +31,7 @@ schnorr-signatures = { path = "../../crypto/schnorr", default-features = false } dkg = { path = "../../crypto/dkg", default-features = false } dkg-recovery = { path = "../../crypto/dkg/recovery", default-features = false } +dkg-dealer = { path = "../../crypto/dkg/dealer", default-features = false } dkg-musig = { path = "../../crypto/dkg/musig", default-features = false } # modular-frost = { path = "../../crypto/frost", default-features = false } # frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false } diff --git a/tests/no-std/src/lib.rs b/tests/no-std/src/lib.rs index 7b9c2cca..fe0cff64 100644 --- a/tests/no-std/src/lib.rs +++ b/tests/no-std/src/lib.rs @@ -14,6 +14,7 @@ pub use schnorr_signatures; pub use dkg; pub use dkg_recovery; +pub use dkg_dealer; pub use dkg_musig; /* pub use modular_frost; From 72e80c1a3dabd39f609469419e306723661a1899 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 02:21:31 -0400 Subject: [PATCH 076/116] Update everything which uses dkg to the new APIs --- Cargo.lock | 3 ++- processor/Cargo.toml | 1 + processor/src/key_gen.rs | 5 ++--- substrate/validator-sets/primitives/Cargo.toml | 4 ++-- substrate/validator-sets/primitives/src/lib.rs | 11 ++++++++--- tests/coordinator/Cargo.toml | 2 +- tests/processor/Cargo.toml | 2 +- tests/processor/src/tests/batch.rs | 2 +- tests/processor/src/tests/key_gen.rs | 2 +- tests/processor/src/tests/mod.rs | 9 +++++++++ tests/processor/src/tests/send.rs | 2 +- 11 files changed, 29 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c121c8a3..c0c27a8b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8483,6 +8483,7 @@ dependencies = [ "ciphersuite", "const-hex", "dalek-ff-group", + "dkg-pedpop", "dockertest", "env_logger", "ethereum-serai", @@ -8667,7 +8668,7 @@ version = "0.1.0" dependencies = [ "borsh", "ciphersuite", - "dkg", + "dkg-musig", "parity-scale-codec", "scale-info", "serai-primitives", diff --git a/processor/Cargo.toml b/processor/Cargo.toml index e881a85e..b68d8a89 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -37,6 +37,7 @@ serde_json = { version = "1", default-features = false, features = ["std"] } ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std"] } +dkg-pedpop = { path = "../crypto/dkg/pedpop", default-features = false } frost = { package = "modular-frost", path = "../crypto/frost", default-features = false, features = ["ristretto"] } frost-schnorrkel = { path = "../crypto/schnorrkel", default-features = false } diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index 297db194..894fec78 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -7,11 +7,10 @@ use rand_chacha::ChaCha20Rng; use transcript::{Transcript, RecommendedTranscript}; use ciphersuite::group::GroupEncoding; +use dkg_pedpop::*; use frost::{ curve::{Ciphersuite, Ristretto}, - dkg::{ - DkgError, Participant, ThresholdParams, ThresholdCore, ThresholdKeys, encryption::*, pedpop::*, - }, + dkg::{DkgError, Participant, ThresholdParams, ThresholdCore, ThresholdKeys}, }; use log::info; diff --git a/substrate/validator-sets/primitives/Cargo.toml b/substrate/validator-sets/primitives/Cargo.toml index 844e6134..41883059 100644 --- a/substrate/validator-sets/primitives/Cargo.toml +++ b/substrate/validator-sets/primitives/Cargo.toml @@ -19,7 +19,7 @@ workspace = true zeroize = { version = "^1.5", features = ["derive"], optional = true } ciphersuite = { path = "../../../crypto/ciphersuite", version = "0.4", default-features = false, features = ["alloc", "ristretto"] } -dkg = { path = "../../../crypto/dkg", version = "0.5", default-features = false } +dkg-musig = { path = "../../../crypto/dkg/musig", default-features = false } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true } serde = { version = "1", default-features = false, features = ["derive", "alloc"], optional = true } @@ -33,7 +33,7 @@ sp-std = { git = "https://github.com/serai-dex/substrate", default-features = fa serai-primitives = { path = "../../primitives", default-features = false } [features] -std = ["zeroize", "ciphersuite/std", "dkg/std", "borsh?/std", "serde?/std", "scale/std", "scale-info/std", "sp-core/std", "sp-std/std", "serai-primitives/std"] +std = ["zeroize", "ciphersuite/std", "dkg-musig/std", "borsh?/std", "serde?/std", "scale/std", "scale-info/std", "sp-core/std", "sp-std/std", "serai-primitives/std"] borsh = ["dep:borsh", "serai-primitives/borsh"] serde = ["dep:serde", "serai-primitives/serde"] default = ["std"] diff --git a/substrate/validator-sets/primitives/src/lib.rs b/substrate/validator-sets/primitives/src/lib.rs index 9944d485..581491f0 100644 --- a/substrate/validator-sets/primitives/src/lib.rs +++ b/substrate/validator-sets/primitives/src/lib.rs @@ -107,8 +107,13 @@ impl Zeroize for KeyPair { } /// The MuSig context for a validator set. -pub fn musig_context(set: ValidatorSet) -> Vec { - [b"ValidatorSets-musig_key".as_ref(), &set.encode()].concat() +pub fn musig_context(set: ValidatorSet) -> [u8; 32] { + let mut context = [0; 32]; + const DST: &[u8] = b"ValidatorSets-musig_key"; + context[.. DST.len()].copy_from_slice(DST); + let set = set.encode(); + context[DST.len() .. (DST.len() + set.len())].copy_from_slice(set.len()); + context } /// The MuSig public key for a validator set. @@ -122,7 +127,7 @@ pub fn musig_key(set: ValidatorSet, set_keys: &[Public]) -> Public { .expect("invalid participant"), ); } - Public(dkg::musig::musig_key::(&musig_context(set), &keys).unwrap().to_bytes()) + Public(dkg_musig::musig_key_vartime::(musig_context(set), &keys).unwrap().to_bytes()) } /// The message for the set_keys signature. diff --git a/tests/coordinator/Cargo.toml b/tests/coordinator/Cargo.toml index 89b168c0..edc3c112 100644 --- a/tests/coordinator/Cargo.toml +++ b/tests/coordinator/Cargo.toml @@ -26,7 +26,7 @@ rand_core = { version = "0.6", default-features = false } blake2 = "0.10" ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["ristretto", "secp256k1"] } schnorrkel = "0.11" -dkg = { path = "../../crypto/dkg", default-features = false, features = ["tests"] } +dkg = { path = "../../crypto/dkg", default-features = false } messages = { package = "serai-processor-messages", path = "../../processor/messages" } diff --git a/tests/processor/Cargo.toml b/tests/processor/Cargo.toml index 395bcad8..9da9a347 100644 --- a/tests/processor/Cargo.toml +++ b/tests/processor/Cargo.toml @@ -24,7 +24,7 @@ rand_core = { version = "0.6", default-features = false, features = ["getrandom" curve25519-dalek = "4" ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["secp256k1", "ristretto"] } -dkg = { path = "../../crypto/dkg", default-features = false, features = ["tests"] } +dkg = { path = "../../crypto/dkg", default-features = false } bitcoin-serai = { path = "../../networks/bitcoin" } diff --git a/tests/processor/src/tests/batch.rs b/tests/processor/src/tests/batch.rs index 4a34500e..fb6803c8 100644 --- a/tests/processor/src/tests/batch.rs +++ b/tests/processor/src/tests/batch.rs @@ -3,7 +3,7 @@ use std::{ time::{SystemTime, Duration}, }; -use dkg::{Participant, tests::clone_without}; +use dkg::Participant; use messages::{coordinator::*, SubstrateContext}; diff --git a/tests/processor/src/tests/key_gen.rs b/tests/processor/src/tests/key_gen.rs index ec616b51..abaddfcf 100644 --- a/tests/processor/src/tests/key_gen.rs +++ b/tests/processor/src/tests/key_gen.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, time::SystemTime}; -use dkg::{Participant, ThresholdParams, tests::clone_without}; +use dkg::{Participant, ThresholdParams}; use serai_client::{ primitives::{BlockHash, PublicKey, EXTERNAL_NETWORKS}, diff --git a/tests/processor/src/tests/mod.rs b/tests/processor/src/tests/mod.rs index 0347a3dd..42cfef69 100644 --- a/tests/processor/src/tests/mod.rs +++ b/tests/processor/src/tests/mod.rs @@ -15,6 +15,15 @@ mod send; pub(crate) const COORDINATORS: usize = 4; pub(crate) const THRESHOLD: usize = ((COORDINATORS * 2) / 3) + 1; +fn clone_without( + map: &HashMap, + without: &K, +) -> HashMap { + let mut res = map.clone(); + res.remove(without).unwrap(); + res +} + fn new_test( network: ExternalNetworkId, ) -> (Vec<(Handles, ::F)>, DockerTest) { diff --git a/tests/processor/src/tests/send.rs b/tests/processor/src/tests/send.rs index e50edc3f..1e5f55ce 100644 --- a/tests/processor/src/tests/send.rs +++ b/tests/processor/src/tests/send.rs @@ -3,7 +3,7 @@ use std::{ time::{SystemTime, Duration}, }; -use dkg::{Participant, tests::clone_without}; +use dkg::Participant; use messages::{sign::SignId, SubstrateContext}; From 82b543ef751112ae43978efcf9bea816ddc82b64 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 02:22:16 -0400 Subject: [PATCH 077/116] Fix clippy lint for ed448 on optional compilation path --- crypto/ed448/src/backend.rs | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/crypto/ed448/src/backend.rs b/crypto/ed448/src/backend.rs index 327fcf97..f68f9898 100644 --- a/crypto/ed448/src/backend.rs +++ b/crypto/ed448/src/backend.rs @@ -2,11 +2,19 @@ use zeroize::Zeroize; // Use black_box when possible #[rustversion::since(1.66)] -use core::hint::black_box; -#[rustversion::before(1.66)] -fn black_box(val: T) -> T { - val +mod black_box { + pub(crate) fn black_box(val: T) -> T { + #[allow(clippy::incompatible_msrv)] + core::hint::black_box(val) + } } +#[rustversion::before(1.66)] +mod black_box { + pub(crate) fn black_box(val: T) -> T { + val + } +} +use black_box::black_box; pub(crate) fn u8_from_bool(bit_ref: &mut bool) -> u8 { let bit_ref = black_box(bit_ref); From a8b8844e3fc97aa07e7097e8563c930312bd6d9d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 04:35:29 -0400 Subject: [PATCH 078/116] Fix MSRV for simple-request --- common/request/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/request/Cargo.toml b/common/request/Cargo.toml index e5018056..5f3bb445 100644 --- a/common/request/Cargo.toml +++ b/common/request/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/common/simple-requ authors = ["Luke Parker "] keywords = ["http", "https", "async", "request", "ssl"] edition = "2021" -rust-version = "1.64" +rust-version = "1.70" [package.metadata.docs.rs] all-features = true From cc662cb5910b4eaa503d02a748118c9b02822fb3 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 04:49:18 -0400 Subject: [PATCH 079/116] Version bumps, add necessary version specifications --- Cargo.lock | 4 ++-- crypto/dalek-ff-group/Cargo.toml | 2 +- crypto/dkg/dealer/Cargo.toml | 2 +- crypto/dkg/musig/Cargo.toml | 2 +- crypto/dkg/pedpop/Cargo.toml | 2 +- crypto/dkg/promote/Cargo.toml | 2 +- crypto/dkg/recovery/Cargo.toml | 2 +- crypto/ed448/Cargo.toml | 2 +- crypto/frost/Cargo.toml | 6 +++--- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0c27a8b..25765c2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1979,7 +1979,7 @@ dependencies = [ [[package]] name = "dalek-ff-group" -version = "0.4.1" +version = "0.4.2" dependencies = [ "crypto-bigint", "curve25519-dalek", @@ -4812,7 +4812,7 @@ dependencies = [ [[package]] name = "minimal-ed448" -version = "0.4.0" +version = "0.4.1" dependencies = [ "crypto-bigint", "ff", diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index e5793c98..24b28fcc 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dalek-ff-group" -version = "0.4.1" +version = "0.4.2" description = "ff/group bindings around curve25519-dalek" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-group" diff --git a/crypto/dkg/dealer/Cargo.toml b/crypto/dkg/dealer/Cargo.toml index b1f35e89..9bc2d5d5 100644 --- a/crypto/dkg/dealer/Cargo.toml +++ b/crypto/dkg/dealer/Cargo.toml @@ -23,7 +23,7 @@ rand_core = { version = "0.6", default-features = false } std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false } ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } -dkg = { path = "../", default-features = false } +dkg = { path = "../", version = "0.6", default-features = false } [features] std = [ diff --git a/crypto/dkg/musig/Cargo.toml b/crypto/dkg/musig/Cargo.toml index e2a971e7..42e508a1 100644 --- a/crypto/dkg/musig/Cargo.toml +++ b/crypto/dkg/musig/Cargo.toml @@ -27,7 +27,7 @@ std-shims = { version = "0.1", path = "../../../common/std-shims", default-featu multiexp = { path = "../../multiexp", version = "0.4", default-features = false } ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } -dkg = { path = "../", default-features = false } +dkg = { path = "../", version = "0.6", default-features = false } [dev-dependencies] rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } diff --git a/crypto/dkg/pedpop/Cargo.toml b/crypto/dkg/pedpop/Cargo.toml index cfc128d1..358b7e00 100644 --- a/crypto/dkg/pedpop/Cargo.toml +++ b/crypto/dkg/pedpop/Cargo.toml @@ -30,7 +30,7 @@ ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features schnorr = { package = "schnorr-signatures", path = "../../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } -dkg = { path = "../", default-features = false, features = ["std"] } +dkg = { path = "../", version = "0.6", default-features = false, features = ["std"] } [dev-dependencies] rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } diff --git a/crypto/dkg/promote/Cargo.toml b/crypto/dkg/promote/Cargo.toml index e5f57ce9..9bd4f452 100644 --- a/crypto/dkg/promote/Cargo.toml +++ b/crypto/dkg/promote/Cargo.toml @@ -25,7 +25,7 @@ transcript = { package = "flexible-transcript", path = "../../transcript", versi ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } -dkg = { path = "../", default-features = false, features = ["std"] } +dkg = { path = "../", version = "0.6", default-features = false, features = ["std"] } [dev-dependencies] zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml index 96df11fc..17d7b0c3 100644 --- a/crypto/dkg/recovery/Cargo.toml +++ b/crypto/dkg/recovery/Cargo.toml @@ -22,7 +22,7 @@ zeroize = { version = "^1.5", default-features = false } thiserror = { version = "2", default-features = false } ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false } -dkg = { path = "../", default-features = false } +dkg = { path = "../", version = "0.6", default-features = false } [features] std = [ diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index 2302d7b3..7deba509 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "minimal-ed448" -version = "0.4.0" +version = "0.4.1" description = "Unaudited, inefficient implementation of Ed448 in Rust" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ed448" diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 9e2f6ddd..0174fb8e 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "modular-frost" -version = "0.9.0" +version = "0.10.0" description = "Modular implementation of FROST over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost" @@ -40,8 +40,8 @@ multiexp = { path = "../multiexp", version = "0.4", default-features = false, fe schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] } -dkg-recovery = { path = "../dkg/recovery", default-features = false, features = ["std"], optional = true } -dkg-dealer = { path = "../dkg/dealer", default-features = false, features = ["std"], optional = true } +dkg-recovery = { path = "../dkg/recovery", version = "0.6", default-features = false, features = ["std"], optional = true } +dkg-dealer = { path = "../dkg/dealer", version = "0.6", default-features = false, features = ["std"], optional = true } [dev-dependencies] hex = "0.4" From cb489f9cef3f8eed8242cd6c0c60accf0ea72fa9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 04:55:27 -0400 Subject: [PATCH 080/116] Other version bumps --- Cargo.lock | 211 +++++++++++++++++++++++++---------- crypto/schnorrkel/Cargo.toml | 4 +- networks/bitcoin/Cargo.toml | 4 +- 3 files changed, 159 insertions(+), 60 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25765c2d..9104cb8b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1063,12 +1063,12 @@ checksum = "340e09e8399c7bd8912f495af6aa58bea0c9214773417ffaa8f6460f93aaee56" [[package]] name = "bitcoin-serai" -version = "0.3.0" +version = "0.4.0" dependencies = [ "bitcoin", "hex", "k256", - "modular-frost", + "modular-frost 0.10.0", "rand_core", "secp256k1", "serde", @@ -1547,6 +1547,24 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ciphersuite" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b7efe73ee654c605e965df90f3f938607ea601446414a681d3889f2b98c6799" +dependencies = [ + "dalek-ff-group", + "digest 0.10.7", + "ff", + "flexible-transcript", + "group", + "rand_core", + "sha2", + "std-shims", + "subtle", + "zeroize", +] + [[package]] name = "clang-sys" version = "1.8.1" @@ -2209,12 +2227,30 @@ dependencies = [ "syn 2.0.87", ] +[[package]] +name = "dkg" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9227585f6f00523c55bec967434dc1db2ee8f54baf902d76ed45d2c6bd37425f" +dependencies = [ + "chacha20", + "ciphersuite 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dleq 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "flexible-transcript", + "multiexp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core", + "schnorr-signatures 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "std-shims", + "thiserror 1.0.64", + "zeroize", +] + [[package]] name = "dkg" version = "0.6.0" dependencies = [ "borsh", - "ciphersuite", + "ciphersuite 0.4.1", "std-shims", "thiserror 2.0.14", "zeroize", @@ -2224,8 +2260,8 @@ dependencies = [ name = "dkg-dealer" version = "0.6.0" dependencies = [ - "ciphersuite", - "dkg", + "ciphersuite 0.4.1", + "dkg 0.6.0", "rand_core", "std-shims", "zeroize", @@ -2235,10 +2271,10 @@ dependencies = [ name = "dkg-musig" version = "0.6.0" dependencies = [ - "ciphersuite", - "dkg", + "ciphersuite 0.4.1", + "dkg 0.6.0", "dkg-recovery", - "multiexp", + "multiexp 0.4.0", "rand_core", "std-shims", "thiserror 2.0.14", @@ -2250,13 +2286,13 @@ name = "dkg-pedpop" version = "0.6.0" dependencies = [ "chacha20", - "ciphersuite", - "dkg", - "dleq", + "ciphersuite 0.4.1", + "dkg 0.6.0", + "dleq 0.4.1", "flexible-transcript", - "multiexp", + "multiexp 0.4.0", "rand_core", - "schnorr-signatures", + "schnorr-signatures 0.5.1", "thiserror 2.0.14", "zeroize", ] @@ -2265,10 +2301,10 @@ dependencies = [ name = "dkg-promote" version = "0.6.0" dependencies = [ - "ciphersuite", - "dkg", + "ciphersuite 0.4.1", + "dkg 0.6.0", "dkg-recovery", - "dleq", + "dleq 0.4.1", "flexible-transcript", "rand_core", "thiserror 2.0.14", @@ -2279,8 +2315,8 @@ dependencies = [ name = "dkg-recovery" version = "0.6.0" dependencies = [ - "ciphersuite", - "dkg", + "ciphersuite 0.4.1", + "dkg 0.6.0", "thiserror 2.0.14", "zeroize", ] @@ -2297,13 +2333,28 @@ dependencies = [ "group", "hex-literal", "k256", - "multiexp", + "multiexp 0.4.0", "rand_core", "rustversion", "thiserror 2.0.14", "zeroize", ] +[[package]] +name = "dleq" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8d3af08cb0a3253930b6b9629fd61202ea34da117940b59ab58f3e6f5e1ba6c" +dependencies = [ + "digest 0.10.7", + "ff", + "flexible-transcript", + "group", + "rand_core", + "rustversion", + "zeroize", +] + [[package]] name = "dockertest" version = "0.5.0" @@ -2533,7 +2584,7 @@ dependencies = [ "flexible-transcript", "group", "k256", - "modular-frost", + "modular-frost 0.10.0", "rand_core", "thiserror 1.0.64", "tokio", @@ -2915,14 +2966,14 @@ dependencies = [ [[package]] name = "frost-schnorrkel" -version = "0.1.2" +version = "0.2.0" dependencies = [ - "ciphersuite", + "ciphersuite 0.4.1", "flexible-transcript", "group", - "modular-frost", + "modular-frost 0.10.0", "rand_core", - "schnorr-signatures", + "schnorr-signatures 0.5.1", "schnorrkel", "zeroize", ] @@ -4883,20 +4934,40 @@ dependencies = [ [[package]] name = "modular-frost" version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f85df4c42042f3264b98a24cc309ad9add42d1fede4c78cbf00377237bc2c946" dependencies = [ - "ciphersuite", + "ciphersuite 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "dalek-ff-group", "digest 0.10.7", - "dkg", + "dkg 0.5.1", + "flexible-transcript", + "multiexp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha", + "rand_core", + "schnorr-signatures 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "subtle", + "thiserror 1.0.64", + "zeroize", +] + +[[package]] +name = "modular-frost" +version = "0.10.0" +dependencies = [ + "ciphersuite 0.4.1", + "dalek-ff-group", + "digest 0.10.7", + "dkg 0.6.0", "dkg-dealer", "dkg-recovery", "flexible-transcript", "hex", "minimal-ed448", - "multiexp", + "multiexp 0.4.0", "rand_chacha", "rand_core", - "schnorr-signatures", + "schnorr-signatures 0.5.1", "serde_json", "subtle", "thiserror 2.0.14", @@ -4953,7 +5024,7 @@ dependencies = [ "dalek-ff-group", "flexible-transcript", "group", - "modular-frost", + "modular-frost 0.9.0", "monero-generators", "monero-io", "monero-primitives", @@ -5072,7 +5143,7 @@ dependencies = [ "flexible-transcript", "group", "hex", - "modular-frost", + "modular-frost 0.9.0", "monero-address", "monero-clsag", "monero-oxide", @@ -5130,6 +5201,20 @@ dependencies = [ "zeroize", ] +[[package]] +name = "multiexp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a383da1ae933078ddb1e4141f1dd617b512b4183779d6977e6451b0e644806" +dependencies = [ + "ff", + "group", + "rand_core", + "rustversion", + "std-shims", + "zeroize", +] + [[package]] name = "multihash" version = "0.18.1" @@ -7846,17 +7931,31 @@ dependencies = [ name = "schnorr-signatures" version = "0.5.1" dependencies = [ - "ciphersuite", + "ciphersuite 0.4.1", "dalek-ff-group", "flexible-transcript", "hex", - "multiexp", + "multiexp 0.4.0", "rand_core", "sha2", "std-shims", "zeroize", ] +[[package]] +name = "schnorr-signatures" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9262fa48d8270b9d937aa68fb09fe3281aded9a671d999e8b82ce1065e952d6" +dependencies = [ + "ciphersuite 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "flexible-transcript", + "multiexp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core", + "std-shims", + "zeroize", +] + [[package]] name = "schnorrkel" version = "0.11.4" @@ -8048,12 +8147,12 @@ dependencies = [ "async-lock", "bitcoin", "blake2", - "ciphersuite", + "ciphersuite 0.4.1", "dockertest", "frame-system", "frost-schnorrkel", "hex", - "modular-frost", + "modular-frost 0.10.0", "monero-wallet", "multiaddr", "parity-scale-codec", @@ -8107,7 +8206,7 @@ dependencies = [ "async-trait", "blake2", "borsh", - "ciphersuite", + "ciphersuite 0.4.1", "env_logger", "flexible-transcript", "frost-schnorrkel", @@ -8115,10 +8214,10 @@ dependencies = [ "hex", "libp2p", "log", - "modular-frost", + "modular-frost 0.10.0", "parity-scale-codec", "rand_core", - "schnorr-signatures", + "schnorr-signatures 0.5.1", "serai-client", "serai-db", "serai-env", @@ -8139,8 +8238,8 @@ dependencies = [ "async-trait", "blake2", "borsh", - "ciphersuite", - "dkg", + "ciphersuite 0.4.1", + "dkg 0.6.0", "dockertest", "hex", "parity-scale-codec", @@ -8346,14 +8445,14 @@ name = "serai-message-queue" version = "0.1.0" dependencies = [ "borsh", - "ciphersuite", + "ciphersuite 0.4.1", "env_logger", "flexible-transcript", "hex", "log", "once_cell", "rand_core", - "schnorr-signatures", + "schnorr-signatures 0.5.1", "serai-db", "serai-env", "serai-primitives", @@ -8366,7 +8465,7 @@ dependencies = [ name = "serai-message-queue-tests" version = "0.1.0" dependencies = [ - "ciphersuite", + "ciphersuite 0.4.1", "dockertest", "hex", "rand_core", @@ -8382,17 +8481,17 @@ name = "serai-no-std-tests" version = "0.1.0" dependencies = [ "bitcoin-serai", - "ciphersuite", + "ciphersuite 0.4.1", "dalek-ff-group", - "dkg", + "dkg 0.6.0", "dkg-dealer", "dkg-musig", "dkg-recovery", - "dleq", + "dleq 0.4.1", "flexible-transcript", "minimal-ed448", - "multiexp", - "schnorr-signatures", + "multiexp 0.4.0", + "schnorr-signatures 0.5.1", ] [[package]] @@ -8445,7 +8544,7 @@ dependencies = [ name = "serai-orchestrator" version = "0.0.1" dependencies = [ - "ciphersuite", + "ciphersuite 0.4.1", "flexible-transcript", "hex", "home", @@ -8480,7 +8579,7 @@ dependencies = [ "async-trait", "bitcoin-serai", "borsh", - "ciphersuite", + "ciphersuite 0.4.1", "const-hex", "dalek-ff-group", "dkg-pedpop", @@ -8492,7 +8591,7 @@ dependencies = [ "hex", "k256", "log", - "modular-frost", + "modular-frost 0.10.0", "monero-simple-request-rpc", "monero-wallet", "parity-scale-codec", @@ -8518,7 +8617,7 @@ name = "serai-processor-messages" version = "0.1.0" dependencies = [ "borsh", - "dkg", + "dkg 0.6.0", "parity-scale-codec", "serai-coins-primitives", "serai-in-instructions-primitives", @@ -8532,9 +8631,9 @@ version = "0.1.0" dependencies = [ "bitcoin-serai", "borsh", - "ciphersuite", + "ciphersuite 0.4.1", "curve25519-dalek", - "dkg", + "dkg 0.6.0", "dockertest", "ethereum-serai", "hex", @@ -8667,7 +8766,7 @@ name = "serai-validator-sets-primitives" version = "0.1.0" dependencies = [ "borsh", - "ciphersuite", + "ciphersuite 0.4.1", "dkg-musig", "parity-scale-codec", "scale-info", @@ -10408,7 +10507,7 @@ version = "0.1.0" dependencies = [ "async-trait", "blake2", - "ciphersuite", + "ciphersuite 0.4.1", "flexible-transcript", "futures-channel", "futures-util", @@ -10417,7 +10516,7 @@ dependencies = [ "parity-scale-codec", "rand", "rand_chacha", - "schnorr-signatures", + "schnorr-signatures 0.5.1", "serai-db", "subtle", "tendermint-machine", diff --git a/crypto/schnorrkel/Cargo.toml b/crypto/schnorrkel/Cargo.toml index 70b96612..23d87f79 100644 --- a/crypto/schnorrkel/Cargo.toml +++ b/crypto/schnorrkel/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "frost-schnorrkel" -version = "0.1.2" +version = "0.2.0" description = "modular-frost Algorithm compatible with Schnorrkel" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorrkel" @@ -26,7 +26,7 @@ group = "0.13" ciphersuite = { path = "../ciphersuite", version = "^0.4.1", features = ["std", "ristretto"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1" } -frost = { path = "../frost", package = "modular-frost", version = "^0.9.0", features = ["ristretto"] } +frost = { path = "../frost", package = "modular-frost", version = "^0.10.0", features = ["ristretto"] } schnorrkel = { version = "0.11" } diff --git a/networks/bitcoin/Cargo.toml b/networks/bitcoin/Cargo.toml index 02f834d8..7ab21eae 100644 --- a/networks/bitcoin/Cargo.toml +++ b/networks/bitcoin/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bitcoin-serai" -version = "0.3.0" +version = "0.4.0" description = "A Bitcoin library for FROST-signing transactions" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/networks/bitcoin" @@ -26,7 +26,7 @@ rand_core = { version = "0.6", default-features = false } bitcoin = { version = "0.32", default-features = false } k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] } -frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.9", default-features = false, features = ["secp256k1"], optional = true } +frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.10", default-features = false, features = ["secp256k1"], optional = true } hex = { version = "0.4", default-features = false, optional = true } serde = { version = "1", default-features = false, features = ["derive"], optional = true } From cb0deadf9a5ec3f072bc95d6fd501b5b0061de43 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 04:59:23 -0400 Subject: [PATCH 081/116] Version bump flexible-transcript --- crypto/transcript/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crypto/transcript/Cargo.toml b/crypto/transcript/Cargo.toml index 566ad56b..39d84bb9 100644 --- a/crypto/transcript/Cargo.toml +++ b/crypto/transcript/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flexible-transcript" -version = "0.3.2" +version = "0.3.3" description = "A simple transcript trait definition, along with viable options" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/transcript" From 7c8f13ab280bdcd697d0764c63b9d13c29100fec Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 05:01:27 -0400 Subject: [PATCH 082/116] Raise flexible-transcript requirement as required --- crypto/dkg/pedpop/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crypto/dkg/pedpop/Cargo.toml b/crypto/dkg/pedpop/Cargo.toml index 358b7e00..375c9629 100644 --- a/crypto/dkg/pedpop/Cargo.toml +++ b/crypto/dkg/pedpop/Cargo.toml @@ -22,7 +22,7 @@ thiserror = { version = "2", default-features = false, features = ["std"] } zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } rand_core = { version = "0.6", default-features = false, features = ["std"] } -transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] } +transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.3", default-features = false, features = ["std", "recommended"] } chacha20 = { version = "0.9", default-features = false, features = ["std", "zeroize"] } multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["std"] } From 104c0d44921fcedc84e504708035fc69522582dd Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 05:26:41 -0400 Subject: [PATCH 083/116] Rename `ThresholdKeys::secret_share` to `ThresholdKeys::original_secret_share` --- Cargo.lock | 40 +++++++++++++++---------------- crypto/dkg/Cargo.toml | 2 +- crypto/dkg/musig/src/tests.rs | 2 +- crypto/dkg/promote/Cargo.toml | 4 ++-- crypto/dkg/promote/src/lib.rs | 6 ++--- crypto/dkg/promote/src/tests.rs | 7 ++++-- crypto/dkg/src/lib.rs | 6 ++--- crypto/frost/Cargo.toml | 4 ++-- crypto/frost/src/sign.rs | 7 ++++-- crypto/frost/src/tests/vectors.rs | 15 ++++++++---- 10 files changed, 53 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9104cb8b..e1681e15 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1068,7 +1068,7 @@ dependencies = [ "bitcoin", "hex", "k256", - "modular-frost 0.10.0", + "modular-frost 0.10.1", "rand_core", "secp256k1", "serde", @@ -2247,7 +2247,7 @@ dependencies = [ [[package]] name = "dkg" -version = "0.6.0" +version = "0.6.1" dependencies = [ "borsh", "ciphersuite 0.4.1", @@ -2261,7 +2261,7 @@ name = "dkg-dealer" version = "0.6.0" dependencies = [ "ciphersuite 0.4.1", - "dkg 0.6.0", + "dkg 0.6.1", "rand_core", "std-shims", "zeroize", @@ -2272,7 +2272,7 @@ name = "dkg-musig" version = "0.6.0" dependencies = [ "ciphersuite 0.4.1", - "dkg 0.6.0", + "dkg 0.6.1", "dkg-recovery", "multiexp 0.4.0", "rand_core", @@ -2287,7 +2287,7 @@ version = "0.6.0" dependencies = [ "chacha20", "ciphersuite 0.4.1", - "dkg 0.6.0", + "dkg 0.6.1", "dleq 0.4.1", "flexible-transcript", "multiexp 0.4.0", @@ -2299,10 +2299,10 @@ dependencies = [ [[package]] name = "dkg-promote" -version = "0.6.0" +version = "0.6.1" dependencies = [ "ciphersuite 0.4.1", - "dkg 0.6.0", + "dkg 0.6.1", "dkg-recovery", "dleq 0.4.1", "flexible-transcript", @@ -2316,7 +2316,7 @@ name = "dkg-recovery" version = "0.6.0" dependencies = [ "ciphersuite 0.4.1", - "dkg 0.6.0", + "dkg 0.6.1", "thiserror 2.0.14", "zeroize", ] @@ -2584,7 +2584,7 @@ dependencies = [ "flexible-transcript", "group", "k256", - "modular-frost 0.10.0", + "modular-frost 0.10.1", "rand_core", "thiserror 1.0.64", "tokio", @@ -2748,7 +2748,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flexible-transcript" -version = "0.3.2" +version = "0.3.3" dependencies = [ "blake2", "digest 0.10.7", @@ -2971,7 +2971,7 @@ dependencies = [ "ciphersuite 0.4.1", "flexible-transcript", "group", - "modular-frost 0.10.0", + "modular-frost 0.10.1", "rand_core", "schnorr-signatures 0.5.1", "schnorrkel", @@ -4953,12 +4953,12 @@ dependencies = [ [[package]] name = "modular-frost" -version = "0.10.0" +version = "0.10.1" dependencies = [ "ciphersuite 0.4.1", "dalek-ff-group", "digest 0.10.7", - "dkg 0.6.0", + "dkg 0.6.1", "dkg-dealer", "dkg-recovery", "flexible-transcript", @@ -8152,7 +8152,7 @@ dependencies = [ "frame-system", "frost-schnorrkel", "hex", - "modular-frost 0.10.0", + "modular-frost 0.10.1", "monero-wallet", "multiaddr", "parity-scale-codec", @@ -8214,7 +8214,7 @@ dependencies = [ "hex", "libp2p", "log", - "modular-frost 0.10.0", + "modular-frost 0.10.1", "parity-scale-codec", "rand_core", "schnorr-signatures 0.5.1", @@ -8239,7 +8239,7 @@ dependencies = [ "blake2", "borsh", "ciphersuite 0.4.1", - "dkg 0.6.0", + "dkg 0.6.1", "dockertest", "hex", "parity-scale-codec", @@ -8483,7 +8483,7 @@ dependencies = [ "bitcoin-serai", "ciphersuite 0.4.1", "dalek-ff-group", - "dkg 0.6.0", + "dkg 0.6.1", "dkg-dealer", "dkg-musig", "dkg-recovery", @@ -8591,7 +8591,7 @@ dependencies = [ "hex", "k256", "log", - "modular-frost 0.10.0", + "modular-frost 0.10.1", "monero-simple-request-rpc", "monero-wallet", "parity-scale-codec", @@ -8617,7 +8617,7 @@ name = "serai-processor-messages" version = "0.1.0" dependencies = [ "borsh", - "dkg 0.6.0", + "dkg 0.6.1", "parity-scale-codec", "serai-coins-primitives", "serai-in-instructions-primitives", @@ -8633,7 +8633,7 @@ dependencies = [ "borsh", "ciphersuite 0.4.1", "curve25519-dalek", - "dkg 0.6.0", + "dkg 0.6.1", "dockertest", "ethereum-serai", "hex", diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 51dc9162..1ac689b8 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dkg" -version = "0.6.0" +version = "0.6.1" description = "Distributed key generation over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" diff --git a/crypto/dkg/musig/src/tests.rs b/crypto/dkg/musig/src/tests.rs index a48dda68..a89404a1 100644 --- a/crypto/dkg/musig/src/tests.rs +++ b/crypto/dkg/musig/src/tests.rs @@ -47,7 +47,7 @@ pub fn test_musig() { verification_shares.insert( these_keys.params().i(), - ::generator() * **these_keys.secret_share(), + ::generator() * **these_keys.original_secret_share(), ); assert_eq!(these_keys.group_key(), group_key); diff --git a/crypto/dkg/promote/Cargo.toml b/crypto/dkg/promote/Cargo.toml index 9bd4f452..5f2ff181 100644 --- a/crypto/dkg/promote/Cargo.toml +++ b/crypto/dkg/promote/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dkg-promote" -version = "0.6.0" +version = "0.6.1" description = "Promotions for keys from the dkg crate" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/promote" @@ -25,7 +25,7 @@ transcript = { package = "flexible-transcript", path = "../../transcript", versi ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] } -dkg = { path = "../", version = "0.6", default-features = false, features = ["std"] } +dkg = { path = "../", version = "0.6.1", default-features = false, features = ["std"] } [dev-dependencies] zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } diff --git a/crypto/dkg/promote/src/lib.rs b/crypto/dkg/promote/src/lib.rs index 6fb08807..d379ea0f 100644 --- a/crypto/dkg/promote/src/lib.rs +++ b/crypto/dkg/promote/src/lib.rs @@ -104,12 +104,12 @@ impl> GeneratorPromotion< ) -> (GeneratorPromotion, GeneratorProof) { // Do a DLEqProof for the new generator let proof = GeneratorProof { - share: C2::generator() * base.secret_share().deref(), + share: C2::generator() * base.original_secret_share().deref(), proof: DLEqProof::prove( rng, &mut transcript(&base.original_group_key(), base.params().i()), &[C1::generator(), C2::generator()], - base.secret_share(), + base.original_secret_share(), ), }; @@ -159,7 +159,7 @@ impl> GeneratorPromotion< ThresholdKeys::new( params, self.base.interpolation().clone(), - self.base.secret_share().clone(), + self.base.original_secret_share().clone(), verification_shares, ) .unwrap(), diff --git a/crypto/dkg/promote/src/tests.rs b/crypto/dkg/promote/src/tests.rs index 1cae60d9..b46dfbbf 100644 --- a/crypto/dkg/promote/src/tests.rs +++ b/crypto/dkg/promote/src/tests.rs @@ -99,13 +99,16 @@ fn test_generator_promotion() { for (i, promoting) in promotions.drain() { let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap(); assert_eq!(keys[usize::from(u16::from(i) - 1)].params(), promoted.params()); - assert_eq!(keys[usize::from(u16::from(i) - 1)].secret_share(), promoted.secret_share()); + assert_eq!( + keys[usize::from(u16::from(i) - 1)].original_secret_share(), + promoted.original_secret_share() + ); assert_eq!(new_group_key, promoted.group_key()); for l in 0 .. PARTICIPANTS { let verification_share = promoted.original_verification_share(Participant::new(l + 1).unwrap()); assert_eq!( - AltGenerator::::generator() * **keys[usize::from(l)].secret_share(), + AltGenerator::::generator() * **keys[usize::from(l)].original_secret_share(), verification_share ); } diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index b98236a8..064a6a10 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -444,8 +444,8 @@ impl ThresholdKeys { (self.core.group_key * self.scalar) + (C::generator() * self.offset) } - /// Return the secret share for these keys. - pub fn secret_share(&self) -> &Zeroizing { + /// Return the underlying secret share for these keys, without any tweaks applied. + pub fn original_secret_share(&self) -> &Zeroizing { &self.core.secret_share } @@ -489,7 +489,7 @@ impl ThresholdKeys { } // The interpolation occurs multiplicatively, letting us scale by the scalar now - let secret_share_scaled = Zeroizing::new(self.scalar * self.secret_share().deref()); + let secret_share_scaled = Zeroizing::new(self.scalar * self.original_secret_share().deref()); let mut secret_share = Zeroizing::new( self.core.interpolation.interpolation_factor(self.params().i(), &included) * secret_share_scaled.deref(), diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 0174fb8e..a0d52366 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "modular-frost" -version = "0.10.0" +version = "0.10.1" description = "Modular implementation of FROST over ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost" @@ -39,7 +39,7 @@ multiexp = { path = "../multiexp", version = "0.4", default-features = false, fe schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false, features = ["std"] } -dkg = { path = "../dkg", version = "0.6", default-features = false, features = ["std"] } +dkg = { path = "../dkg", version = "0.6.1", default-features = false, features = ["std"] } dkg-recovery = { path = "../dkg/recovery", version = "0.6", default-features = false, features = ["std"], optional = true } dkg-dealer = { path = "../dkg/dealer", version = "0.6", default-features = false, features = ["std"], optional = true } diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 4f5f59bd..b3be81fd 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -125,8 +125,11 @@ impl> AlgorithmMachine { let mut params = self.params; let mut rng = ChaCha20Rng::from_seed(*seed.0); - let (nonces, commitments) = - Commitments::new::<_>(&mut rng, params.keys.secret_share(), ¶ms.algorithm.nonces()); + let (nonces, commitments) = Commitments::new::<_>( + &mut rng, + params.keys.original_secret_share(), + ¶ms.algorithm.nonces(), + ); let addendum = params.algorithm.preprocess_addendum(&mut rng, ¶ms.keys); let preprocess = Preprocess { commitments, addendum }; diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index a5369a02..d5cda345 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -133,7 +133,7 @@ fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap>( // Calculate the expected nonces let mut expected = (C::generator() * - C::random_nonce(keys[i].secret_share(), &mut TransparentRng(vec![randomness.0])).deref()) + C::random_nonce( + keys[i].original_secret_share(), + &mut TransparentRng(vec![randomness.0]), + ) + .deref()) .to_bytes() .as_ref() .to_vec(); expected.extend( (C::generator() * - C::random_nonce(keys[i].secret_share(), &mut TransparentRng(vec![randomness.1])) - .deref()) + C::random_nonce( + keys[i].original_secret_share(), + &mut TransparentRng(vec![randomness.1]), + ) + .deref()) .to_bytes() .as_ref(), ); From 153f6f2f2f536d3722b657100a0add5fa960fd2e Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 06:33:41 -0400 Subject: [PATCH 084/116] Update to a monero-oxide patched to dkg 0.6 --- Cargo.lock | 243 +++++++++++------------------------- processor/Cargo.toml | 4 +- substrate/client/Cargo.toml | 2 +- tests/full-stack/Cargo.toml | 4 +- tests/processor/Cargo.toml | 4 +- 5 files changed, 79 insertions(+), 178 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1681e15..92273d03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1068,7 +1068,7 @@ dependencies = [ "bitcoin", "hex", "k256", - "modular-frost 0.10.1", + "modular-frost", "rand_core", "secp256k1", "serde", @@ -1547,24 +1547,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ciphersuite" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b7efe73ee654c605e965df90f3f938607ea601446414a681d3889f2b98c6799" -dependencies = [ - "dalek-ff-group", - "digest 0.10.7", - "ff", - "flexible-transcript", - "group", - "rand_core", - "sha2", - "std-shims", - "subtle", - "zeroize", -] - [[package]] name = "clang-sys" version = "1.8.1" @@ -2227,30 +2209,12 @@ dependencies = [ "syn 2.0.87", ] -[[package]] -name = "dkg" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9227585f6f00523c55bec967434dc1db2ee8f54baf902d76ed45d2c6bd37425f" -dependencies = [ - "chacha20", - "ciphersuite 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dleq 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "flexible-transcript", - "multiexp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core", - "schnorr-signatures 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "std-shims", - "thiserror 1.0.64", - "zeroize", -] - [[package]] name = "dkg" version = "0.6.1" dependencies = [ "borsh", - "ciphersuite 0.4.1", + "ciphersuite", "std-shims", "thiserror 2.0.14", "zeroize", @@ -2260,8 +2224,8 @@ dependencies = [ name = "dkg-dealer" version = "0.6.0" dependencies = [ - "ciphersuite 0.4.1", - "dkg 0.6.1", + "ciphersuite", + "dkg", "rand_core", "std-shims", "zeroize", @@ -2271,10 +2235,10 @@ dependencies = [ name = "dkg-musig" version = "0.6.0" dependencies = [ - "ciphersuite 0.4.1", - "dkg 0.6.1", + "ciphersuite", + "dkg", "dkg-recovery", - "multiexp 0.4.0", + "multiexp", "rand_core", "std-shims", "thiserror 2.0.14", @@ -2286,13 +2250,13 @@ name = "dkg-pedpop" version = "0.6.0" dependencies = [ "chacha20", - "ciphersuite 0.4.1", - "dkg 0.6.1", - "dleq 0.4.1", + "ciphersuite", + "dkg", + "dleq", "flexible-transcript", - "multiexp 0.4.0", + "multiexp", "rand_core", - "schnorr-signatures 0.5.1", + "schnorr-signatures", "thiserror 2.0.14", "zeroize", ] @@ -2301,10 +2265,10 @@ dependencies = [ name = "dkg-promote" version = "0.6.1" dependencies = [ - "ciphersuite 0.4.1", - "dkg 0.6.1", + "ciphersuite", + "dkg", "dkg-recovery", - "dleq 0.4.1", + "dleq", "flexible-transcript", "rand_core", "thiserror 2.0.14", @@ -2315,8 +2279,8 @@ dependencies = [ name = "dkg-recovery" version = "0.6.0" dependencies = [ - "ciphersuite 0.4.1", - "dkg 0.6.1", + "ciphersuite", + "dkg", "thiserror 2.0.14", "zeroize", ] @@ -2333,28 +2297,13 @@ dependencies = [ "group", "hex-literal", "k256", - "multiexp 0.4.0", + "multiexp", "rand_core", "rustversion", "thiserror 2.0.14", "zeroize", ] -[[package]] -name = "dleq" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8d3af08cb0a3253930b6b9629fd61202ea34da117940b59ab58f3e6f5e1ba6c" -dependencies = [ - "digest 0.10.7", - "ff", - "flexible-transcript", - "group", - "rand_core", - "rustversion", - "zeroize", -] - [[package]] name = "dockertest" version = "0.5.0" @@ -2584,7 +2533,7 @@ dependencies = [ "flexible-transcript", "group", "k256", - "modular-frost 0.10.1", + "modular-frost", "rand_core", "thiserror 1.0.64", "tokio", @@ -2968,12 +2917,12 @@ dependencies = [ name = "frost-schnorrkel" version = "0.2.0" dependencies = [ - "ciphersuite 0.4.1", + "ciphersuite", "flexible-transcript", "group", - "modular-frost 0.10.1", + "modular-frost", "rand_core", - "schnorr-signatures 0.5.1", + "schnorr-signatures", "schnorrkel", "zeroize", ] @@ -4931,43 +4880,23 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "modular-frost" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f85df4c42042f3264b98a24cc309ad9add42d1fede4c78cbf00377237bc2c946" -dependencies = [ - "ciphersuite 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dalek-ff-group", - "digest 0.10.7", - "dkg 0.5.1", - "flexible-transcript", - "multiexp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha", - "rand_core", - "schnorr-signatures 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "subtle", - "thiserror 1.0.64", - "zeroize", -] - [[package]] name = "modular-frost" version = "0.10.1" dependencies = [ - "ciphersuite 0.4.1", + "ciphersuite", "dalek-ff-group", "digest 0.10.7", - "dkg 0.6.1", + "dkg", "dkg-dealer", "dkg-recovery", "flexible-transcript", "hex", "minimal-ed448", - "multiexp 0.4.0", + "multiexp", "rand_chacha", "rand_core", - "schnorr-signatures 0.5.1", + "schnorr-signatures", "serde_json", "subtle", "thiserror 2.0.14", @@ -4977,20 +4906,20 @@ dependencies = [ [[package]] name = "monero-address" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "monero-io", "monero-primitives", "std-shims", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] [[package]] name = "monero-borromean" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "monero-generators", @@ -5003,7 +4932,7 @@ dependencies = [ [[package]] name = "monero-bulletproofs" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "monero-generators", @@ -5011,20 +4940,20 @@ dependencies = [ "monero-primitives", "rand_core", "std-shims", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] [[package]] name = "monero-clsag" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "dalek-ff-group", "flexible-transcript", "group", - "modular-frost 0.9.0", + "modular-frost", "monero-generators", "monero-io", "monero-primitives", @@ -5032,14 +4961,14 @@ dependencies = [ "rand_core", "std-shims", "subtle", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] [[package]] name = "monero-generators" version = "0.4.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "dalek-ff-group", @@ -5053,7 +4982,7 @@ dependencies = [ [[package]] name = "monero-io" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "std-shims", @@ -5062,21 +4991,21 @@ dependencies = [ [[package]] name = "monero-mlsag" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "monero-generators", "monero-io", "monero-primitives", "std-shims", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] [[package]] name = "monero-oxide" version = "0.1.4-alpha" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "hex-literal", @@ -5094,7 +5023,7 @@ dependencies = [ [[package]] name = "monero-primitives" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "monero-generators", @@ -5107,7 +5036,7 @@ dependencies = [ [[package]] name = "monero-rpc" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "hex", @@ -5116,14 +5045,14 @@ dependencies = [ "serde", "serde_json", "std-shims", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] [[package]] name = "monero-simple-request-rpc" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "digest_auth", "hex", @@ -5136,14 +5065,14 @@ dependencies = [ [[package]] name = "monero-wallet" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=f19b0f57fe7cbbd643b51091c63de29afb0976e4#f19b0f57fe7cbbd643b51091c63de29afb0976e4" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" dependencies = [ "curve25519-dalek", "dalek-ff-group", "flexible-transcript", "group", "hex", - "modular-frost 0.9.0", + "modular-frost", "monero-address", "monero-clsag", "monero-oxide", @@ -5153,7 +5082,7 @@ dependencies = [ "rand_core", "rand_distr", "std-shims", - "thiserror 1.0.64", + "thiserror 2.0.14", "zeroize", ] @@ -5201,20 +5130,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "multiexp" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25a383da1ae933078ddb1e4141f1dd617b512b4183779d6977e6451b0e644806" -dependencies = [ - "ff", - "group", - "rand_core", - "rustversion", - "std-shims", - "zeroize", -] - [[package]] name = "multihash" version = "0.18.1" @@ -7931,31 +7846,17 @@ dependencies = [ name = "schnorr-signatures" version = "0.5.1" dependencies = [ - "ciphersuite 0.4.1", + "ciphersuite", "dalek-ff-group", "flexible-transcript", "hex", - "multiexp 0.4.0", + "multiexp", "rand_core", "sha2", "std-shims", "zeroize", ] -[[package]] -name = "schnorr-signatures" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9262fa48d8270b9d937aa68fb09fe3281aded9a671d999e8b82ce1065e952d6" -dependencies = [ - "ciphersuite 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "flexible-transcript", - "multiexp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core", - "std-shims", - "zeroize", -] - [[package]] name = "schnorrkel" version = "0.11.4" @@ -8147,12 +8048,12 @@ dependencies = [ "async-lock", "bitcoin", "blake2", - "ciphersuite 0.4.1", + "ciphersuite", "dockertest", "frame-system", "frost-schnorrkel", "hex", - "modular-frost 0.10.1", + "modular-frost", "monero-wallet", "multiaddr", "parity-scale-codec", @@ -8206,7 +8107,7 @@ dependencies = [ "async-trait", "blake2", "borsh", - "ciphersuite 0.4.1", + "ciphersuite", "env_logger", "flexible-transcript", "frost-schnorrkel", @@ -8214,10 +8115,10 @@ dependencies = [ "hex", "libp2p", "log", - "modular-frost 0.10.1", + "modular-frost", "parity-scale-codec", "rand_core", - "schnorr-signatures 0.5.1", + "schnorr-signatures", "serai-client", "serai-db", "serai-env", @@ -8238,8 +8139,8 @@ dependencies = [ "async-trait", "blake2", "borsh", - "ciphersuite 0.4.1", - "dkg 0.6.1", + "ciphersuite", + "dkg", "dockertest", "hex", "parity-scale-codec", @@ -8445,14 +8346,14 @@ name = "serai-message-queue" version = "0.1.0" dependencies = [ "borsh", - "ciphersuite 0.4.1", + "ciphersuite", "env_logger", "flexible-transcript", "hex", "log", "once_cell", "rand_core", - "schnorr-signatures 0.5.1", + "schnorr-signatures", "serai-db", "serai-env", "serai-primitives", @@ -8465,7 +8366,7 @@ dependencies = [ name = "serai-message-queue-tests" version = "0.1.0" dependencies = [ - "ciphersuite 0.4.1", + "ciphersuite", "dockertest", "hex", "rand_core", @@ -8481,17 +8382,17 @@ name = "serai-no-std-tests" version = "0.1.0" dependencies = [ "bitcoin-serai", - "ciphersuite 0.4.1", + "ciphersuite", "dalek-ff-group", - "dkg 0.6.1", + "dkg", "dkg-dealer", "dkg-musig", "dkg-recovery", - "dleq 0.4.1", + "dleq", "flexible-transcript", "minimal-ed448", - "multiexp 0.4.0", - "schnorr-signatures 0.5.1", + "multiexp", + "schnorr-signatures", ] [[package]] @@ -8544,7 +8445,7 @@ dependencies = [ name = "serai-orchestrator" version = "0.0.1" dependencies = [ - "ciphersuite 0.4.1", + "ciphersuite", "flexible-transcript", "hex", "home", @@ -8579,7 +8480,7 @@ dependencies = [ "async-trait", "bitcoin-serai", "borsh", - "ciphersuite 0.4.1", + "ciphersuite", "const-hex", "dalek-ff-group", "dkg-pedpop", @@ -8591,7 +8492,7 @@ dependencies = [ "hex", "k256", "log", - "modular-frost 0.10.1", + "modular-frost", "monero-simple-request-rpc", "monero-wallet", "parity-scale-codec", @@ -8617,7 +8518,7 @@ name = "serai-processor-messages" version = "0.1.0" dependencies = [ "borsh", - "dkg 0.6.1", + "dkg", "parity-scale-codec", "serai-coins-primitives", "serai-in-instructions-primitives", @@ -8631,9 +8532,9 @@ version = "0.1.0" dependencies = [ "bitcoin-serai", "borsh", - "ciphersuite 0.4.1", + "ciphersuite", "curve25519-dalek", - "dkg 0.6.1", + "dkg", "dockertest", "ethereum-serai", "hex", @@ -8766,7 +8667,7 @@ name = "serai-validator-sets-primitives" version = "0.1.0" dependencies = [ "borsh", - "ciphersuite 0.4.1", + "ciphersuite", "dkg-musig", "parity-scale-codec", "scale-info", @@ -10507,7 +10408,7 @@ version = "0.1.0" dependencies = [ "async-trait", "blake2", - "ciphersuite 0.4.1", + "ciphersuite", "flexible-transcript", "futures-channel", "futures-util", @@ -10516,7 +10417,7 @@ dependencies = [ "parity-scale-codec", "rand", "rand_chacha", - "schnorr-signatures 0.5.1", + "schnorr-signatures", "serai-db", "subtle", "tendermint-machine", diff --git a/processor/Cargo.toml b/processor/Cargo.toml index b68d8a89..c67f24f4 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -53,8 +53,8 @@ ethereum-serai = { path = "../networks/ethereum", default-features = false, opti # Monero dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"], optional = true } -monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4", default-features = false, optional = true } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", default-features = false, optional = true } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } # Application log = { version = "0.4", default-features = false, features = ["std"] } diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 80ca60eb..5685cc30 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -39,7 +39,7 @@ simple-request = { path = "../../common/request", version = "0.1", optional = tr bitcoin = { version = "0.32", optional = true } ciphersuite = { path = "../../crypto/ciphersuite", version = "0.4", optional = true } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4", version = "0.1.0", default-features = false, features = ["std"], optional = true } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", version = "0.1.0", default-features = false, features = ["std"], optional = true } [dev-dependencies] rand_core = "0.6" diff --git a/tests/full-stack/Cargo.toml b/tests/full-stack/Cargo.toml index ddcbbbdc..aed9526a 100644 --- a/tests/full-stack/Cargo.toml +++ b/tests/full-stack/Cargo.toml @@ -27,8 +27,8 @@ rand_core = { version = "0.6", default-features = false } curve25519-dalek = { version = "4", features = ["rand_core"] } bitcoin-serai = { path = "../../networks/bitcoin" } -monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } scale = { package = "parity-scale-codec", version = "3" } serde = "1" diff --git a/tests/processor/Cargo.toml b/tests/processor/Cargo.toml index 9da9a347..918899a2 100644 --- a/tests/processor/Cargo.toml +++ b/tests/processor/Cargo.toml @@ -31,8 +31,8 @@ bitcoin-serai = { path = "../../networks/bitcoin" } k256 = "0.13" ethereum-serai = { path = "../../networks/ethereum" } -monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "f19b0f57fe7cbbd643b51091c63de29afb0976e4" } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } messages = { package = "serai-processor-messages", path = "../../processor/messages" } From 5e60ea971845fd547c5679ab4ee75a41fa557753 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 06:39:39 -0400 Subject: [PATCH 085/116] Don't offset nonces yet negate to achieve an even Y coordinate Replaces an iterative loop with an immediate result, if action is necessary. --- Cargo.lock | 1 + crypto/frost/src/algorithm.rs | 2 ++ networks/bitcoin/Cargo.toml | 2 ++ networks/bitcoin/src/crypto.rs | 36 +++++++++++----------------- networks/bitcoin/src/tests/crypto.rs | 7 +++--- networks/bitcoin/src/wallet/mod.rs | 17 +++++++------ networks/bitcoin/tests/wallet.rs | 2 +- 7 files changed, 33 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 92273d03..c10e41fd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1075,6 +1075,7 @@ dependencies = [ "serde_json", "simple-request", "std-shims", + "subtle", "thiserror 1.0.64", "tokio", "zeroize", diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index b595e03b..7e4d6167 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -135,6 +135,8 @@ pub trait Hram: Send + Sync + Clone { } /// Schnorr signature algorithm ((R, s) where s = r + cx). +/// +/// `verify`, `verify_share` must be called after `sign_share` is called. #[derive(Clone)] pub struct Schnorr> { transcript: T, diff --git a/networks/bitcoin/Cargo.toml b/networks/bitcoin/Cargo.toml index 7ab21eae..338237b2 100644 --- a/networks/bitcoin/Cargo.toml +++ b/networks/bitcoin/Cargo.toml @@ -20,6 +20,7 @@ std-shims = { version = "0.1.1", path = "../../common/std-shims", default-featur thiserror = { version = "1", default-features = false, optional = true } +subtle = { version = "2", default-features = false } zeroize = { version = "^1.5", default-features = false } rand_core = { version = "0.6", default-features = false } @@ -46,6 +47,7 @@ std = [ "thiserror", + "subtle/std", "zeroize/std", "rand_core/std", diff --git a/networks/bitcoin/src/crypto.rs b/networks/bitcoin/src/crypto.rs index 11510909..12aa2c1e 100644 --- a/networks/bitcoin/src/crypto.rs +++ b/networks/bitcoin/src/crypto.rs @@ -1,3 +1,5 @@ +use subtle::{Choice, ConstantTimeEq, ConditionallySelectable}; + use k256::{ elliptic_curve::sec1::{Tag, ToEncodedPoint}, ProjectivePoint, @@ -17,17 +19,9 @@ pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey { XOnlyPublicKey::from_slice(&x(key)).expect("x_only was passed a point which was infinity or odd") } -/// Make a point even by adding the generator until it is even. -/// -/// Returns the even point and the amount of additions required. -#[cfg(any(feature = "std", feature = "hazmat"))] -pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) { - let mut c = 0; - while key.to_encoded_point(true).tag() == Tag::CompressedOddY { - key += ProjectivePoint::GENERATOR; - c += 1; - } - (key, c) +/// Return if a point must be negated to have an even Y coordinate and be eligible for use. +pub(crate) fn needs_negation(key: &ProjectivePoint) -> Choice { + u8::from(key.to_encoded_point(true).tag()).ct_eq(&u8::from(Tag::CompressedOddY)) } #[cfg(feature = "std")] @@ -60,25 +54,27 @@ mod frost_crypto { #[allow(non_snake_case)] impl HramTrait for Hram { fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar { - // Convert the nonce to be even - let (R, _) = make_even(*R); - const TAG_HASH: Sha256 = Sha256::const_hash(b"BIP0340/challenge"); let mut data = Sha256::engine(); data.input(TAG_HASH.as_ref()); data.input(TAG_HASH.as_ref()); - data.input(&x(&R)); + data.input(&x(R)); data.input(&x(A)); data.input(m); - Scalar::reduce(U256::from_be_slice(Sha256::from_engine(data).as_ref())) + let c = Scalar::reduce(U256::from_be_slice(Sha256::from_engine(data).as_ref())); + // If the nonce was odd, sign `r - cx` instead of `r + cx`, allowing us to negate `s` at the + // end to sign as `-r + cx` + <_>::conditional_select(&c, &-c, needs_negation(R)) } } /// BIP-340 Schnorr signature algorithm. /// - /// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic. + /// This must be used with a ThresholdKeys whose group key is even. If it is odd, this may panic. + /// + /// `verify`, `verify_share` must be called after `sign_share` is called. #[derive(Clone)] pub struct Schnorr(FrostSchnorr); impl Schnorr { @@ -141,11 +137,7 @@ mod frost_crypto { sum: Scalar, ) -> Option { self.0.verify(group_key, nonces, sum).map(|mut sig| { - // Make the R of the final signature even - let offset; - (sig.R, offset) = make_even(sig.R); - // s = r + cx. Since we added to the r, add to s - sig.s += Scalar::from(offset); + sig.s = <_>::conditional_select(&sum, &-sum, needs_negation(&sig.R)); // Convert to a Bitcoin signature by dropping the byte for the point's sign bit sig.serialize()[1 ..].try_into().unwrap() }) diff --git a/networks/bitcoin/src/tests/crypto.rs b/networks/bitcoin/src/tests/crypto.rs index 57a0eb3e..cd2ce298 100644 --- a/networks/bitcoin/src/tests/crypto.rs +++ b/networks/bitcoin/src/tests/crypto.rs @@ -2,7 +2,6 @@ use rand_core::OsRng; use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature}; -use k256::Scalar; use frost::{ curve::Secp256k1, Participant, @@ -11,7 +10,8 @@ use frost::{ use crate::{ bitcoin::hashes::{Hash as HashTrait, sha256::Hash}, - crypto::{x_only, make_even, Schnorr}, + crypto::{x_only, Schnorr}, + wallet::tweak_keys, }; #[test] @@ -20,8 +20,7 @@ fn test_algorithm() { const MESSAGE: &[u8] = b"Hello, World!"; for keys in keys.values_mut() { - let (_, offset) = make_even(keys.group_key()); - *keys = keys.offset(Scalar::from(offset)); + *keys = tweak_keys(keys.clone()); } let algo = Schnorr::new(); diff --git a/networks/bitcoin/src/wallet/mod.rs b/networks/bitcoin/src/wallet/mod.rs index 1a078958..7e985db0 100644 --- a/networks/bitcoin/src/wallet/mod.rs +++ b/networks/bitcoin/src/wallet/mod.rs @@ -26,7 +26,7 @@ use bitcoin::{hashes::Hash, consensus::encode::Decodable, TapTweakHash}; use crate::crypto::x_only; #[cfg(feature = "std")] -use crate::crypto::make_even; +use crate::crypto::needs_negation; #[cfg(feature = "std")] mod send; @@ -43,7 +43,7 @@ pub use send::*; /// existence of the unspendable script path may not provable, without an understanding of the /// algorithm used here. #[cfg(feature = "std")] -pub fn tweak_keys(keys: &ThresholdKeys) -> ThresholdKeys { +pub fn tweak_keys(keys: ThresholdKeys) -> ThresholdKeys { // Adds the unspendable script path per // https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki#cite_note-23 let keys = { @@ -64,11 +64,14 @@ pub fn tweak_keys(keys: &ThresholdKeys) -> ThresholdKeys { ))) }; - // This doesn't risk re-introducing a script path as you'd have to find a preimage for the tweak - // hash with whatever increment, or manipulate the key so that the tweak hash and increment - // equals the desired offset, yet manipulating the key would change the tweak hash - let (_, offset) = make_even(keys.group_key()); - keys.offset(Scalar::from(offset)) + let needs_negation = needs_negation(&keys.group_key()); + keys + .scale(<_ as subtle::ConditionallySelectable>::conditional_select( + &Scalar::ONE, + &-Scalar::ONE, + needs_negation, + )) + .expect("scaling keys by 1 or -1 yet interpreted as 0?") } /// Return the Taproot address payload for a public key. diff --git a/networks/bitcoin/tests/wallet.rs b/networks/bitcoin/tests/wallet.rs index 45371414..83344048 100644 --- a/networks/bitcoin/tests/wallet.rs +++ b/networks/bitcoin/tests/wallet.rs @@ -80,7 +80,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint) fn keys() -> (HashMap>, ProjectivePoint) { let mut keys = key_gen(&mut OsRng); for keys in keys.values_mut() { - *keys = tweak_keys(keys); + *keys = tweak_keys(keys.clone()); } let key = keys.values().next().unwrap().group_key(); (keys, key) From ceede14f5cf7d77e7d332760ed7bcf54c093eaed Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 07:19:40 -0400 Subject: [PATCH 086/116] Fix misc compilation errors --- Cargo.lock | 3 + coordinator/Cargo.toml | 1 + coordinator/src/tests/tributary/dkg.rs | 4 +- coordinator/src/tributary/signing_protocol.rs | 10 +--- networks/ethereum/src/tests/mod.rs | 2 +- processor/Cargo.toml | 1 + processor/src/key_gen.rs | 59 +++++++------------ processor/src/networks/bitcoin.rs | 2 +- processor/src/networks/ethereum.rs | 2 +- processor/src/networks/monero.rs | 2 +- processor/src/tests/batch_signer.rs | 2 +- processor/src/tests/cosigner.rs | 2 +- processor/src/tests/signer.rs | 2 +- processor/src/tests/wallet.rs | 2 +- substrate/client/Cargo.toml | 1 + .../client/tests/common/genesis_liquidity.rs | 4 +- .../client/tests/common/validator_sets.rs | 5 +- .../validator-sets/primitives/src/lib.rs | 2 +- tests/processor/src/tests/mod.rs | 2 + 19 files changed, 47 insertions(+), 61 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c10e41fd..3576aa75 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8050,6 +8050,7 @@ dependencies = [ "bitcoin", "blake2", "ciphersuite", + "dkg-musig", "dockertest", "frame-system", "frost-schnorrkel", @@ -8109,6 +8110,7 @@ dependencies = [ "blake2", "borsh", "ciphersuite", + "dkg-musig", "env_logger", "flexible-transcript", "frost-schnorrkel", @@ -8480,6 +8482,7 @@ version = "0.1.0" dependencies = [ "async-trait", "bitcoin-serai", + "blake2", "borsh", "ciphersuite", "const-hex", diff --git a/coordinator/Cargo.toml b/coordinator/Cargo.toml index ae4e2be7..1067fbb0 100644 --- a/coordinator/Cargo.toml +++ b/coordinator/Cargo.toml @@ -27,6 +27,7 @@ blake2 = { version = "0.10", default-features = false, features = ["std"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std", "recommended"] } ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std"] } +dkg-musig = { path = "../crypto/dkg/musig", default-features = false, features = ["std"] } frost = { package = "modular-frost", path = "../crypto/frost" } frost-schnorrkel = { path = "../crypto/schnorrkel" } diff --git a/coordinator/src/tests/tributary/dkg.rs b/coordinator/src/tests/tributary/dkg.rs index adaa6643..7999d58a 100644 --- a/coordinator/src/tests/tributary/dkg.rs +++ b/coordinator/src/tests/tributary/dkg.rs @@ -361,8 +361,8 @@ async fn dkg_test() { assert!(signature.verify( &*serai_client::validator_sets::primitives::set_keys_message(&set, &[], &key_pair), &serai_client::Public( - frost::dkg::musig::musig_key::( - &serai_client::validator_sets::primitives::musig_context(set.into()), + dkg_musig::musig_key_vartime::( + serai_client::validator_sets::primitives::musig_context(set.into()), &self.spec.validators().into_iter().map(|(validator, _)| validator).collect::>() ) .unwrap() diff --git a/coordinator/src/tributary/signing_protocol.rs b/coordinator/src/tributary/signing_protocol.rs index 20dda48e..dbb61585 100644 --- a/coordinator/src/tributary/signing_protocol.rs +++ b/coordinator/src/tributary/signing_protocol.rs @@ -67,12 +67,8 @@ use ciphersuite::{ group::{ff::PrimeField, GroupEncoding}, Ciphersuite, Ristretto, }; -use frost::{ - FrostError, - dkg::{Participant, musig::musig}, - ThresholdKeys, - sign::*, -}; +use dkg_musig::musig; +use frost::{FrostError, dkg::Participant, ThresholdKeys, sign::*}; use frost_schnorrkel::Schnorrkel; use scale::Encode; @@ -119,7 +115,7 @@ impl SigningProtocol<'_, T, C> { let algorithm = Schnorrkel::new(b"substrate"); let keys: ThresholdKeys = - musig(&musig_context(self.spec.set().into()), self.key, participants) + musig(musig_context(self.spec.set().into()), self.key.clone(), participants) .expect("signing for a set we aren't in/validator present multiple times") .into(); diff --git a/networks/ethereum/src/tests/mod.rs b/networks/ethereum/src/tests/mod.rs index dcdbedce..cbe1bb44 100644 --- a/networks/ethereum/src/tests/mod.rs +++ b/networks/ethereum/src/tests/mod.rs @@ -37,7 +37,7 @@ pub fn key_gen() -> (HashMap>, PublicKey) group_key += ProjectivePoint::GENERATOR; } for keys in keys.values_mut() { - *keys = keys.offset(offset); + *keys = keys.clone().offset(offset); } let public_key = PublicKey::new(group_key).unwrap(); diff --git a/processor/Cargo.toml b/processor/Cargo.toml index c67f24f4..d0a650d0 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -34,6 +34,7 @@ borsh = { version = "1", default-features = false, features = ["std", "derive", serde_json = { version = "1", default-features = false, features = ["std"] } # Cryptography +blake2 = { version = "0.10", default-features = false, features = ["std"] } ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std"] } diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index 894fec78..8d3310a1 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -10,7 +10,7 @@ use ciphersuite::group::GroupEncoding; use dkg_pedpop::*; use frost::{ curve::{Ciphersuite, Ristretto}, - dkg::{DkgError, Participant, ThresholdParams, ThresholdCore, ThresholdKeys}, + dkg::{Participant, ThresholdParams, ThresholdKeys}, }; use log::info; @@ -54,8 +54,8 @@ impl GeneratedKeysDb { let mut substrate_keys = vec![]; let mut network_keys = vec![]; while !keys_ref.is_empty() { - substrate_keys.push(ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap())); - let mut these_network_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap()); + substrate_keys.push(ThresholdKeys::read(&mut keys_ref).unwrap()); + let mut these_network_keys = ThresholdKeys::read(&mut keys_ref).unwrap(); N::tweak_keys(&mut these_network_keys); network_keys.push(these_network_keys); } @@ -65,7 +65,7 @@ impl GeneratedKeysDb { fn save_keys( txn: &mut impl DbTxn, id: &KeyGenId, - substrate_keys: &[ThresholdCore], + substrate_keys: &[ThresholdKeys], network_keys: &[ThresholdKeys], ) { let mut keys = Zeroizing::new(vec![]); @@ -181,15 +181,19 @@ impl KeyGen { ) -> ProcessorMessage { const SUBSTRATE_KEY_CONTEXT: &str = "substrate"; const NETWORK_KEY_CONTEXT: &str = "network"; - let context = |id: &KeyGenId, key| { + let context = |id: &KeyGenId, key| -> [u8; 32] { // TODO2: Also embed the chain ID/genesis block - format!( - "Serai Key Gen. Session: {:?}, Network: {:?}, Attempt: {}, Key: {}", - id.session, - N::NETWORK, - id.attempt, - key, + ::digest( + format!( + "Serai Key Gen. Session: {:?}, Network: {:?}, Attempt: {}, Key: {}", + id.session, + N::NETWORK, + id.attempt, + key, + ) + .as_bytes(), ) + .into() }; let rng = |label, id: KeyGenId| { @@ -246,19 +250,10 @@ impl KeyGen { match machine.generate_secret_shares(rng, commitments) { Ok(res) => Ok(res), Err(e) => match e { - DkgError::ZeroParameter(_, _) | - DkgError::InvalidThreshold(_, _) | - DkgError::InvalidParticipant(_, _) | - DkgError::InvalidSigningSet | - DkgError::InvalidShare { .. } => unreachable!("{e:?}"), - DkgError::InvalidParticipantQuantity(_, _) | - DkgError::DuplicatedParticipant(_) | - DkgError::MissingParticipant(_) => { - panic!("coordinator sent invalid DKG commitments: {e:?}") - } - DkgError::InvalidCommitments(i) => { + PedPoPError::InvalidCommitments(i) => { Err(ProcessorMessage::InvalidCommitments { id, faulty: i })? } + _ => panic!("unknown error: {e:?}"), }, } } @@ -396,7 +391,7 @@ impl KeyGen { m: usize, machine: KeyMachine, shares_ref: &mut HashMap, - ) -> Result, ProcessorMessage> { + ) -> Result, ProcessorMessage> { let params = ThresholdParams::new( params.t(), params.n(), @@ -421,17 +416,7 @@ impl KeyGen { (match machine.calculate_share(rng, shares) { Ok(res) => res, Err(e) => match e { - DkgError::ZeroParameter(_, _) | - DkgError::InvalidThreshold(_, _) | - DkgError::InvalidParticipant(_, _) | - DkgError::InvalidSigningSet | - DkgError::InvalidCommitments(_) => unreachable!("{e:?}"), - DkgError::InvalidParticipantQuantity(_, _) | - DkgError::DuplicatedParticipant(_) | - DkgError::MissingParticipant(_) => { - panic!("coordinator sent invalid DKG shares: {e:?}") - } - DkgError::InvalidShare { participant, blame } => { + PedPoPError::InvalidShare { participant, blame } => { Err(ProcessorMessage::InvalidShare { id, accuser: params.i(), @@ -439,6 +424,7 @@ impl KeyGen { blame: Some(blame.map(|blame| blame.serialize())).flatten(), })? } + _ => panic!("unknown error: {e:?}"), }, }) .complete(), @@ -468,7 +454,7 @@ impl KeyGen { Ok(keys) => keys, Err(msg) => return msg, }; - let these_network_keys = + let mut these_network_keys = match handle_machine(&mut rng, id, params, m, machines.1, &mut shares_ref) { Ok(keys) => keys, Err(msg) => return msg, @@ -487,7 +473,6 @@ impl KeyGen { } } - let mut these_network_keys = ThresholdKeys::new(these_network_keys); N::tweak_keys(&mut these_network_keys); substrate_keys.push(these_substrate_keys); @@ -556,7 +541,6 @@ impl KeyGen { blame.clone().and_then(|blame| EncryptionKeyProof::read(&mut blame.as_slice()).ok()); let substrate_blame = AdditionalBlameMachine::new( - &mut rand_core::OsRng, context(&id, SUBSTRATE_KEY_CONTEXT), params.n(), substrate_commitment_msgs, @@ -564,7 +548,6 @@ impl KeyGen { .unwrap() .blame(accuser, accused, substrate_share, substrate_blame); let network_blame = AdditionalBlameMachine::new( - &mut rand_core::OsRng, context(&id, NETWORK_KEY_CONTEXT), params.n(), network_commitment_msgs, diff --git a/processor/src/networks/bitcoin.rs b/processor/src/networks/bitcoin.rs index 5702f5ed..a423f9d2 100644 --- a/processor/src/networks/bitcoin.rs +++ b/processor/src/networks/bitcoin.rs @@ -648,7 +648,7 @@ impl Network for Bitcoin { const MAX_OUTPUTS: usize = MAX_OUTPUTS; fn tweak_keys(keys: &mut ThresholdKeys) { - *keys = tweak_keys(keys); + *keys = tweak_keys(keys.clone()); // Also create a scanner to assert these keys, and all expected paths, are usable scanner(keys.group_key()); } diff --git a/processor/src/networks/ethereum.rs b/processor/src/networks/ethereum.rs index f4788849..7aba2071 100644 --- a/processor/src/networks/ethereum.rs +++ b/processor/src/networks/ethereum.rs @@ -408,7 +408,7 @@ impl Network for Ethereum { fn tweak_keys(keys: &mut ThresholdKeys) { while PublicKey::new(keys.group_key()).is_none() { - *keys = keys.offset(::F::ONE); + *keys = keys.clone().offset(::F::ONE); } } diff --git a/processor/src/networks/monero.rs b/processor/src/networks/monero.rs index 6813a76f..09962721 100644 --- a/processor/src/networks/monero.rs +++ b/processor/src/networks/monero.rs @@ -666,7 +666,7 @@ impl Network for Monero { keys: ThresholdKeys, transaction: SignableTransaction, ) -> Result { - match transaction.0.clone().multisig(&keys) { + match transaction.0.clone().multisig(keys) { Ok(machine) => Ok(machine), Err(e) => panic!("failed to create a multisig machine for TX: {e}"), } diff --git a/processor/src/tests/batch_signer.rs b/processor/src/tests/batch_signer.rs index 8da67ef1..ab3863e9 100644 --- a/processor/src/tests/batch_signer.rs +++ b/processor/src/tests/batch_signer.rs @@ -6,7 +6,7 @@ use ciphersuite::group::GroupEncoding; use frost::{ curve::Ristretto, Participant, - dkg::tests::{key_gen, clone_without}, + tests::{key_gen, clone_without}, }; use sp_application_crypto::{RuntimePublic, sr25519::Public}; diff --git a/processor/src/tests/cosigner.rs b/processor/src/tests/cosigner.rs index a66161bf..57845cff 100644 --- a/processor/src/tests/cosigner.rs +++ b/processor/src/tests/cosigner.rs @@ -6,7 +6,7 @@ use ciphersuite::group::GroupEncoding; use frost::{ curve::Ristretto, Participant, - dkg::tests::{key_gen, clone_without}, + tests::{key_gen, clone_without}, }; use sp_application_crypto::{RuntimePublic, sr25519::Public}; diff --git a/processor/src/tests/signer.rs b/processor/src/tests/signer.rs index 26b26b35..41053c02 100644 --- a/processor/src/tests/signer.rs +++ b/processor/src/tests/signer.rs @@ -6,7 +6,7 @@ use rand_core::{RngCore, OsRng}; use ciphersuite::group::GroupEncoding; use frost::{ Participant, ThresholdKeys, - dkg::tests::{key_gen, clone_without}, + tests::{key_gen, clone_without}, }; use serai_db::{DbTxn, Db, MemDb}; diff --git a/processor/src/tests/wallet.rs b/processor/src/tests/wallet.rs index 74d7ccc0..b46e0548 100644 --- a/processor/src/tests/wallet.rs +++ b/processor/src/tests/wallet.rs @@ -4,7 +4,7 @@ use std::collections::HashMap; use rand_core::OsRng; use ciphersuite::group::GroupEncoding; -use frost::{Participant, dkg::tests::key_gen}; +use frost::{Participant, tests::key_gen}; use tokio::time::timeout; diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 5685cc30..9fe5c1ce 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -48,6 +48,7 @@ hex = "0.4" blake2 = "0.10" ciphersuite = { path = "../../crypto/ciphersuite", features = ["ristretto"] } +dkg-musig = { path = "../../crypto/dkg/musig" } frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] } schnorrkel = { path = "../../crypto/schnorrkel", package = "frost-schnorrkel" } diff --git a/substrate/client/tests/common/genesis_liquidity.rs b/substrate/client/tests/common/genesis_liquidity.rs index 55824d36..a4b96b19 100644 --- a/substrate/client/tests/common/genesis_liquidity.rs +++ b/substrate/client/tests/common/genesis_liquidity.rs @@ -4,7 +4,7 @@ use rand_core::{RngCore, OsRng}; use zeroize::Zeroizing; use ciphersuite::{Ciphersuite, Ristretto}; -use frost::dkg::musig::musig; +use dkg_musig::musig; use schnorrkel::Schnorrkel; use sp_core::{sr25519::Signature, Pair as PairTrait}; @@ -99,7 +99,7 @@ async fn set_values(serai: &Serai, values: &Values) { assert_eq!(Ristretto::generator() * secret_key, public_key); let threshold_keys = - musig::(&musig_context(set), &Zeroizing::new(secret_key), &[public_key]).unwrap(); + musig::(musig_context(set), Zeroizing::new(secret_key), &[public_key]).unwrap(); let sig = frost::tests::sign_without_caching( &mut OsRng, diff --git a/substrate/client/tests/common/validator_sets.rs b/substrate/client/tests/common/validator_sets.rs index 20f7e951..1ce1e105 100644 --- a/substrate/client/tests/common/validator_sets.rs +++ b/substrate/client/tests/common/validator_sets.rs @@ -10,7 +10,7 @@ use sp_core::{ }; use ciphersuite::{Ciphersuite, Ristretto}; -use frost::dkg::musig::musig; +use dkg_musig::musig; use schnorrkel::Schnorrkel; use serai_client::{ @@ -46,8 +46,7 @@ pub async fn set_keys( assert_eq!(Ristretto::generator() * secret_key, pub_keys[i]); threshold_keys.push( - musig::(&musig_context(set.into()), &Zeroizing::new(secret_key), &pub_keys) - .unwrap(), + musig::(musig_context(set.into()), Zeroizing::new(secret_key), &pub_keys).unwrap(), ); } diff --git a/substrate/validator-sets/primitives/src/lib.rs b/substrate/validator-sets/primitives/src/lib.rs index 581491f0..9ff9f18b 100644 --- a/substrate/validator-sets/primitives/src/lib.rs +++ b/substrate/validator-sets/primitives/src/lib.rs @@ -112,7 +112,7 @@ pub fn musig_context(set: ValidatorSet) -> [u8; 32] { const DST: &[u8] = b"ValidatorSets-musig_key"; context[.. DST.len()].copy_from_slice(DST); let set = set.encode(); - context[DST.len() .. (DST.len() + set.len())].copy_from_slice(set.len()); + context[DST.len() .. (DST.len() + set.len())].copy_from_slice(&set); context } diff --git a/tests/processor/src/tests/mod.rs b/tests/processor/src/tests/mod.rs index 42cfef69..e2b6fcdc 100644 --- a/tests/processor/src/tests/mod.rs +++ b/tests/processor/src/tests/mod.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use ciphersuite::{Ciphersuite, Ristretto}; use dockertest::DockerTest; From 95c30720d26421b837f2522f1c10bc341c33f8c0 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 18 Aug 2025 13:02:35 -0400 Subject: [PATCH 087/116] Update how x coordinates are handled in bitcoin-serai --- networks/bitcoin/src/crypto.rs | 24 +++++++++++++++--------- networks/bitcoin/src/wallet/mod.rs | 6 +++--- networks/bitcoin/src/wallet/send.rs | 2 +- 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/networks/bitcoin/src/crypto.rs b/networks/bitcoin/src/crypto.rs index 12aa2c1e..36f85f29 100644 --- a/networks/bitcoin/src/crypto.rs +++ b/networks/bitcoin/src/crypto.rs @@ -7,15 +7,18 @@ use k256::{ use bitcoin::key::XOnlyPublicKey; -/// Get the x coordinate of a non-infinity, even point. Panics on invalid input. -pub fn x(key: &ProjectivePoint) -> [u8; 32] { +/// Get the x coordinate of a non-infinity point. +/// +/// Panics on invalid input. +fn x(key: &ProjectivePoint) -> [u8; 32] { let encoded = key.to_encoded_point(true); - assert_eq!(encoded.tag(), Tag::CompressedEvenY, "x coordinate of odd key"); (*encoded.x().expect("point at infinity")).into() } -/// Convert a non-infinity even point to a XOnlyPublicKey. Panics on invalid input. -pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey { +/// Convert a non-infinity point to a XOnlyPublicKey (dropping its sign). +/// +/// Panics on invalid input. +pub(crate) fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey { XOnlyPublicKey::from_slice(&x(key)).expect("x_only was passed a point which was infinity or odd") } @@ -46,9 +49,9 @@ mod frost_crypto { /// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm. /// - /// If passed an odd nonce, it will have the generator added until it is even. + /// If passed an odd nonce, the challenge will be negated. /// - /// If the key is odd, this will panic. + /// If either `R` or `A` is the point at infinity, this will panic. #[derive(Clone, Copy, Debug)] pub struct Hram; #[allow(non_snake_case)] @@ -72,9 +75,12 @@ mod frost_crypto { /// BIP-340 Schnorr signature algorithm. /// - /// This must be used with a ThresholdKeys whose group key is even. If it is odd, this may panic. + /// This may panic if called with nonces/a group key which are the point at infinity (which have + /// a negligible probability for a well-reasoned caller, even with malicious participants + /// present). /// - /// `verify`, `verify_share` must be called after `sign_share` is called. + /// `verify`, `verify_share` MUST be called after `sign_share` is called. Otherwise, this library + /// MAY panic. #[derive(Clone)] pub struct Schnorr(FrostSchnorr); impl Schnorr { diff --git a/networks/bitcoin/src/wallet/mod.rs b/networks/bitcoin/src/wallet/mod.rs index 7e985db0..1dc385df 100644 --- a/networks/bitcoin/src/wallet/mod.rs +++ b/networks/bitcoin/src/wallet/mod.rs @@ -39,9 +39,9 @@ pub use send::*; /// from being spent via a script. To have keys which have spendable script paths, further offsets /// from this position must be used. /// -/// After adding an unspendable script path, the key is incremented until its even. This means the -/// existence of the unspendable script path may not provable, without an understanding of the -/// algorithm used here. +/// After adding an unspendable script path, the key is negated if odd. +/// +/// This has a neligible probability of returning keys whose group key is the point at infinity. #[cfg(feature = "std")] pub fn tweak_keys(keys: ThresholdKeys) -> ThresholdKeys { // Adds the unspendable script path per diff --git a/networks/bitcoin/src/wallet/send.rs b/networks/bitcoin/src/wallet/send.rs index 276f536e..52824280 100644 --- a/networks/bitcoin/src/wallet/send.rs +++ b/networks/bitcoin/src/wallet/send.rs @@ -288,7 +288,7 @@ impl SignableTransaction { /// A FROST signing machine to produce a Bitcoin transaction. /// /// This does not support caching its preprocess. When sign is called, the message must be empty. -/// This will panic if either `cache` is called or the message isn't empty. +/// This will panic if either `cache`, `from_cache` is called or the message isn't empty. pub struct TransactionMachine { tx: SignableTransaction, sigs: Vec>, From 2c4de3bab4b939a7f60107f13ba95d31ccddb007 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 12:51:16 -0400 Subject: [PATCH 088/116] Bump version of ff-group-tests --- Cargo.lock | 2 +- crypto/ff-group-tests/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3576aa75..c953da7a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2634,7 +2634,7 @@ dependencies = [ [[package]] name = "ff-group-tests" -version = "0.13.1" +version = "0.13.2" dependencies = [ "bls12_381", "ff", diff --git a/crypto/ff-group-tests/Cargo.toml b/crypto/ff-group-tests/Cargo.toml index aa328fa1..a9b784ca 100644 --- a/crypto/ff-group-tests/Cargo.toml +++ b/crypto/ff-group-tests/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ff-group-tests" -version = "0.13.1" +version = "0.13.2" description = "A collection of sanity tests for implementors of ff/group APIs" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ff-group-tests" From 961f46bc04f29464d7cc7bced9cfec76887093d2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 13:17:39 -0400 Subject: [PATCH 089/116] Add `const fn` to create a dalek-ff-group FieldElement --- crypto/dalek-ff-group/src/field.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 10ca67d9..cedf98b5 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -216,6 +216,13 @@ impl PrimeFieldBits for FieldElement { } impl FieldElement { + /// Create a FieldElement from a `crypto_bigint::U256`. + /// + /// This will reduce the `U256` by the modulus, into a member of the field. + pub const fn from_u256(u256: &U256) -> Self { + FieldElement(Residue::new(u256)) + } + /// Interpret the value as a little-endian integer, square it, and reduce it into a FieldElement. pub fn from_square(value: [u8; 32]) -> FieldElement { let value = U256::from_le_bytes(value); From f2c13a0040d6b6c2de24af2af9bca1080109ef87 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 13:36:01 -0400 Subject: [PATCH 090/116] Expose Once within std-shims, bump spin to 0.9 This is technically a semver break due to bumping spin to 0.10, with the types from spin being directly exposed. Long-term, we should not directly expose spin but instead have our own types which are thin wrappers around spin (clearly defining our API and allowing upgrading internals without breaking semver). --- Cargo.lock | 8 +++++++- common/std-shims/Cargo.toml | 2 +- common/std-shims/README.md | 7 ++++++- common/std-shims/src/sync.rs | 4 ++-- 4 files changed, 16 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c953da7a..98f41495 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9613,6 +9613,12 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +[[package]] +name = "spin" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591" + [[package]] name = "spki" version = "0.7.3" @@ -9689,7 +9695,7 @@ name = "std-shims" version = "0.1.2" dependencies = [ "hashbrown 0.14.5", - "spin 0.9.8", + "spin 0.10.0", ] [[package]] diff --git a/common/std-shims/Cargo.toml b/common/std-shims/Cargo.toml index ef746a64..5900ca6a 100644 --- a/common/std-shims/Cargo.toml +++ b/common/std-shims/Cargo.toml @@ -17,7 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] -spin = { version = "0.9", default-features = false, features = ["use_ticket_mutex", "lazy"] } +spin = { version = "0.10", default-features = false, features = ["use_ticket_mutex", "once", "lazy"] } hashbrown = { version = "0.14", default-features = false, features = ["ahash", "inline-more"] } [features] diff --git a/common/std-shims/README.md b/common/std-shims/README.md index 88f8eadd..b5bc121d 100644 --- a/common/std-shims/README.md +++ b/common/std-shims/README.md @@ -3,4 +3,9 @@ A crate which passes through to std when the default `std` feature is enabled, yet provides a series of shims when it isn't. -`HashSet` and `HashMap` are provided via `hashbrown`. +No guarantee of one-to-one parity is provided. The shims provided aim to be sufficient for the +average case. + +`HashSet` and `HashMap` are provided via `hashbrown`. Synchronization primitives are provided via +`spin` (avoiding a requirement on `critical-section`). +types are not guaranteed to be diff --git a/common/std-shims/src/sync.rs b/common/std-shims/src/sync.rs index 8193bcfb..14d32721 100644 --- a/common/std-shims/src/sync.rs +++ b/common/std-shims/src/sync.rs @@ -26,6 +26,6 @@ mod mutex_shim { pub use mutex_shim::{ShimMutex as Mutex, MutexGuard}; #[cfg(feature = "std")] -pub use std::sync::LazyLock; +pub use std::sync::{OnceLock, LazyLock}; #[cfg(not(feature = "std"))] -pub use spin::Lazy as LazyLock; +pub use spin::{Once as OnceLock, Lazy as LazyLock}; From cfd1cb3a3771e0a04c7db8e7884663da2e107ffa Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 13:48:54 -0400 Subject: [PATCH 091/116] Add FieldElement::wide_reduce to dalek-ff-group --- crypto/dalek-ff-group/src/field.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index cedf98b5..e0ef473a 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -223,6 +223,13 @@ impl FieldElement { FieldElement(Residue::new(u256)) } + /// Create a `FieldElement` from the reduction of a 512-bit number. + /// + /// The bytes are interpreted in little-endian format. + pub fn wide_reduce(value: [u8; 64]) -> Self { + FieldElement(reduce(U512::from_le_bytes(value))) + } + /// Interpret the value as a little-endian integer, square it, and reduce it into a FieldElement. pub fn from_square(value: [u8; 32]) -> FieldElement { let value = U256::from_le_bytes(value); From ca85f9ba0ca6816f420948f8e8f89db63c2605de Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 15:24:33 -0400 Subject: [PATCH 092/116] Remove the poorly-designed reduce_512 API Unused and unpublished. This was only added in the FCMP++ branch as a quick fix for performance reasons. Finding a better API is still a tricky question, but this API is _bad_. --- crypto/ciphersuite/src/dalek.rs | 6 ------ crypto/ciphersuite/src/ed448.rs | 6 ------ crypto/ciphersuite/src/kp256.rs | 16 ---------------- crypto/ciphersuite/src/lib.rs | 6 ------ crypto/dkg/promote/src/tests.rs | 4 ---- 5 files changed, 38 deletions(-) diff --git a/crypto/ciphersuite/src/dalek.rs b/crypto/ciphersuite/src/dalek.rs index a04195b2..bd9c70c1 100644 --- a/crypto/ciphersuite/src/dalek.rs +++ b/crypto/ciphersuite/src/dalek.rs @@ -28,12 +28,6 @@ macro_rules! dalek_curve { $Point::generator() } - fn reduce_512(mut scalar: [u8; 64]) -> Self::F { - let res = Scalar::from_bytes_mod_order_wide(&scalar); - scalar.zeroize(); - res - } - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { Scalar::from_hash(Sha512::new_with_prefix(&[dst, data].concat())) } diff --git a/crypto/ciphersuite/src/ed448.rs b/crypto/ciphersuite/src/ed448.rs index 0b19ffa5..8a927251 100644 --- a/crypto/ciphersuite/src/ed448.rs +++ b/crypto/ciphersuite/src/ed448.rs @@ -66,12 +66,6 @@ impl Ciphersuite for Ed448 { Point::generator() } - fn reduce_512(mut scalar: [u8; 64]) -> Self::F { - let res = Self::hash_to_F(b"Ciphersuite-reduce_512", &scalar); - scalar.zeroize(); - res - } - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_ref().try_into().unwrap()) } diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index a1f64ae4..ceb8ee84 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -31,22 +31,6 @@ macro_rules! kp_curve { $lib::ProjectivePoint::GENERATOR } - fn reduce_512(scalar: [u8; 64]) -> Self::F { - let mut modulus = [0; 64]; - modulus[32 ..].copy_from_slice(&(Self::F::ZERO - Self::F::ONE).to_bytes()); - let modulus = U512::from_be_slice(&modulus).checked_add(&U512::ONE).unwrap(); - - let mut wide = - U512::from_be_bytes(scalar).rem(&NonZero::new(modulus).unwrap()).to_be_bytes(); - - let mut array = *GenericArray::from_slice(&wide[32 ..]); - let res = $lib::Scalar::from_repr(array).unwrap(); - - wide.zeroize(); - array.zeroize(); - res - } - fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { // While one of these two libraries does support directly hashing to the Scalar field, the // other doesn't. While that's probably an oversight, this is a universally working method diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index 6519a413..fd0c9194 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -62,12 +62,6 @@ pub trait Ciphersuite: // While group does provide this in its API, privacy coins may want to use a custom basepoint fn generator() -> Self::G; - /// Reduce 512 bits into a uniform scalar. - /// - /// If 512 bits is insufficient to perform a reduction into a uniform scalar, the ciphersuite - /// will perform a hash to sample the necessary bits. - fn reduce_512(scalar: [u8; 64]) -> Self::F; - /// Hash the provided domain-separation tag and message to a scalar. Ciphersuites MAY naively /// prefix the tag to the message, enabling transpotion between the two. Accordingly, this /// function should NOT be used in any scheme where one tag is a valid substring of another diff --git a/crypto/dkg/promote/src/tests.rs b/crypto/dkg/promote/src/tests.rs index b46dfbbf..a748f61d 100644 --- a/crypto/dkg/promote/src/tests.rs +++ b/crypto/dkg/promote/src/tests.rs @@ -29,10 +29,6 @@ impl Ciphersuite for AltGenerator { C::G::generator() * ::hash_to_F(b"DKG Promotion Test", b"generator") } - fn reduce_512(scalar: [u8; 64]) -> Self::F { - ::reduce_512(scalar) - } - fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F { ::hash_to_F(dst, data) } From f32e0609f133da442906be4079588417d5b8d58f Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 15:25:40 -0400 Subject: [PATCH 093/116] Add warning to dalek-ff-group --- crypto/dalek-ff-group/src/lib.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 0e96bfa6..b24b1e56 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -359,7 +359,12 @@ macro_rules! dalek_group { $BASEPOINT_POINT: ident, $BASEPOINT_TABLE: ident ) => { - /// Wrapper around the dalek Point type. For Ed25519, this is restricted to the prime subgroup. + /// Wrapper around the dalek Point type. + /// + /// All operations will be restricted to a prime-order subgroup (equivalent to the group itself + /// in the case of Ristretto). The exposure of the internal element does allow bypassing this + /// however, which may lead to undefined/computationally-unsafe behavior, and is entirely at + /// the user's risk. #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct $Point(pub $DPoint); deref_borrow!($Point, $DPoint); From 1b37dd2951b0ae3c4ad42049ae940e25e672eacf Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 16:12:57 -0400 Subject: [PATCH 094/116] Shim std::sync::LazyLock for Rust < 1.80 Allows downgrading some crypto crates' MSRV to 1.79 as well. --- Cargo.lock | 1 + common/std-shims/Cargo.toml | 5 +++-- common/std-shims/src/sync.rs | 38 +++++++++++++++++++++++++++++++++-- crypto/ciphersuite/Cargo.toml | 2 +- crypto/dkg/Cargo.toml | 2 +- crypto/dkg/dealer/Cargo.toml | 2 +- crypto/dkg/musig/Cargo.toml | 2 +- crypto/multiexp/Cargo.toml | 2 +- crypto/schnorr/Cargo.toml | 2 +- 9 files changed, 46 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 98f41495..85dc0610 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9695,6 +9695,7 @@ name = "std-shims" version = "0.1.2" dependencies = [ "hashbrown 0.14.5", + "rustversion", "spin 0.10.0", ] diff --git a/common/std-shims/Cargo.toml b/common/std-shims/Cargo.toml index 5900ca6a..ae56004e 100644 --- a/common/std-shims/Cargo.toml +++ b/common/std-shims/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "std-shims" -version = "0.1.2" +version = "0.1.3" description = "A series of std shims to make alloc more feasible" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/common/std-shims" authors = ["Luke Parker "] keywords = ["nostd", "no_std", "alloc", "io"] edition = "2021" -rust-version = "1.80" +rust-version = "1.70" [package.metadata.docs.rs] all-features = true @@ -17,6 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] +rustversion = { version = "1", default-features = false } spin = { version = "0.10", default-features = false, features = ["use_ticket_mutex", "once", "lazy"] } hashbrown = { version = "0.14", default-features = false, features = ["ahash", "inline-more"] } diff --git a/common/std-shims/src/sync.rs b/common/std-shims/src/sync.rs index 14d32721..949bf57e 100644 --- a/common/std-shims/src/sync.rs +++ b/common/std-shims/src/sync.rs @@ -26,6 +26,40 @@ mod mutex_shim { pub use mutex_shim::{ShimMutex as Mutex, MutexGuard}; #[cfg(feature = "std")] -pub use std::sync::{OnceLock, LazyLock}; +pub use std::sync::OnceLock; #[cfg(not(feature = "std"))] -pub use spin::{Once as OnceLock, Lazy as LazyLock}; +pub use spin::Once as OnceLock; + +#[rustversion::before(1.80)] +mod before_1_80_lazylock { + use core::ops::Deref; + use super::{Mutex, OnceLock}; + + /// Shim for `std::sync::LazyLock`. + pub struct LazyLock T> { + f: Mutex>, + once: OnceLock, + } + impl T> LazyLock { + /// Shim for `std::sync::LazyLock::new`. + pub const fn new(f: F) -> Self { + Self { f: Mutex::new(Some(f)), once: OnceLock::new() } + } + /// Shim for `std::sync::LazyLock::get_or_init`. + pub fn get(&self) -> &T { + // Since this initializer will only be called once, the value in the Mutex will be `Some` + self.once.get_or_init(|| (self.f.lock().take().unwrap())()) + } + } + impl T> Deref for LazyLock { + type Target = T; + fn deref(&self) -> &T { + self.get() + } + } +} +#[rustversion::before(1.80)] +pub use before_1_80_lazylock::LazyLock; + +#[rustversion::since(1.80)] +pub use std::sync::LazyLock; diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index b666dbaa..5fe4550c 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite authors = ["Luke Parker "] keywords = ["ciphersuite", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 1ac689b8..57baaebb 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/dealer/Cargo.toml b/crypto/dkg/dealer/Cargo.toml index 9bc2d5d5..ee008ab9 100644 --- a/crypto/dkg/dealer/Cargo.toml +++ b/crypto/dkg/dealer/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/dealer" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/musig/Cargo.toml b/crypto/dkg/musig/Cargo.toml index 42e508a1..1dfde36b 100644 --- a/crypto/dkg/musig/Cargo.toml +++ b/crypto/dkg/musig/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/musig" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true diff --git a/crypto/multiexp/Cargo.toml b/crypto/multiexp/Cargo.toml index 36efbfe2..228b85ab 100644 --- a/crypto/multiexp/Cargo.toml +++ b/crypto/multiexp/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/multiexp" authors = ["Luke Parker "] keywords = ["multiexp", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 06a9710e..2ea04f5b 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr" authors = ["Luke Parker "] keywords = ["schnorr", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.79" [package.metadata.docs.rs] all-features = true From f6d4d1b0845f39ff17a45126aaae75cebd9d58dc Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 16:22:54 -0400 Subject: [PATCH 095/116] Remove unused import, fix dirty Cargo.lock --- Cargo.lock | 2 +- crypto/ciphersuite/src/kp256.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 85dc0610..88ef9312 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9692,7 +9692,7 @@ dependencies = [ [[package]] name = "std-shims" -version = "0.1.2" +version = "0.1.3" dependencies = [ "hashbrown 0.14.5", "rustversion", diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index ceb8ee84..37fdb2e4 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -6,7 +6,7 @@ use group::ff::PrimeField; use elliptic_curve::{ generic_array::GenericArray, - bigint::{NonZero, CheckedAdd, Encoding, U384, U512}, + bigint::{NonZero, CheckedAdd, Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}, }; From 1e0240123df7b47de5c19720b5a9b34fdfa30d20 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 17:40:19 -0400 Subject: [PATCH 096/116] Shim LazyLock when before 1.70 --- common/std-shims/Cargo.toml | 2 +- common/std-shims/src/sync.rs | 118 +++++++++++++++++++++++++-------- crypto/ciphersuite/Cargo.toml | 2 +- crypto/dkg/Cargo.toml | 2 +- crypto/dkg/dealer/Cargo.toml | 2 +- crypto/dkg/recovery/Cargo.toml | 2 +- 6 files changed, 94 insertions(+), 34 deletions(-) diff --git a/common/std-shims/Cargo.toml b/common/std-shims/Cargo.toml index ae56004e..0525f515 100644 --- a/common/std-shims/Cargo.toml +++ b/common/std-shims/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/common/std-shims" authors = ["Luke Parker "] keywords = ["nostd", "no_std", "alloc", "io"] edition = "2021" -rust-version = "1.70" +rust-version = "1.64" [package.metadata.docs.rs] all-features = true diff --git a/common/std-shims/src/sync.rs b/common/std-shims/src/sync.rs index 949bf57e..65313b5c 100644 --- a/common/std-shims/src/sync.rs +++ b/common/std-shims/src/sync.rs @@ -25,41 +25,101 @@ mod mutex_shim { } pub use mutex_shim::{ShimMutex as Mutex, MutexGuard}; -#[cfg(feature = "std")] -pub use std::sync::OnceLock; #[cfg(not(feature = "std"))] pub use spin::Once as OnceLock; +#[cfg(feature = "std")] +mod std_oncelock { + #[rustversion::before(1.70)] + mod before_1_70_oncelock { + use core::cell::Cell; + use std::sync::RwLock; -#[rustversion::before(1.80)] -mod before_1_80_lazylock { - use core::ops::Deref; - use super::{Mutex, OnceLock}; + /// Shim for `std::sync::OnceLock`. + pub struct OnceLock { + value: Cell<*mut T>, + init: RwLock, + } + // We use the `RwLock` (which is `Sync`) to control access to the `!Sync` `RefCell` + unsafe impl Sync for OnceLock {} - /// Shim for `std::sync::LazyLock`. - pub struct LazyLock T> { - f: Mutex>, - once: OnceLock, - } - impl T> LazyLock { - /// Shim for `std::sync::LazyLock::new`. - pub const fn new(f: F) -> Self { - Self { f: Mutex::new(Some(f)), once: OnceLock::new() } + impl OnceLock { + /// Shim for `std::sync::OnceLock::new`. + pub const fn new() -> Self { + Self { value: Cell::new(core::ptr::null_mut()), init: RwLock::new(false) } + } + /// Shim for `std::sync::OnceLock::get_or_init`. + pub fn get_or_init(&'a self, f: F) -> &'a T + where + F: FnOnce() -> T, + { + let initialized = *self.init.read().unwrap(); + if !initialized { + // Obtain an exclusive reference + let mut initialized = self.init.write().unwrap(); + // If this still isn't initialized (by someone who first obtained an exlusive reference) + if !*initialized { + // Set the value and mark it initialized + self.value.set(Box::into_raw(Box::new(f()))); + *initialized = true; + } + } + // SAFETY: We always initialize the value before this and it's only written to once + unsafe { &*self.value.get() } + } } - /// Shim for `std::sync::LazyLock::get_or_init`. - pub fn get(&self) -> &T { - // Since this initializer will only be called once, the value in the Mutex will be `Some` - self.once.get_or_init(|| (self.f.lock().take().unwrap())()) - } - } - impl T> Deref for LazyLock { - type Target = T; - fn deref(&self) -> &T { - self.get() + + // SAFETY: `OnceLock` doesn't implement `Clone` so this doesn't risk dropping the `Box` + // multiple times + impl Drop for OnceLock { + fn drop(&mut self) { + unsafe { drop(Box::from_raw(self.value.get())) } + } } } + #[rustversion::before(1.70)] + pub use before_1_70_oncelock::OnceLock; + #[rustversion::since(1.70)] + pub use std::sync::OnceLock; } -#[rustversion::before(1.80)] -pub use before_1_80_lazylock::LazyLock; +#[cfg(feature = "std")] +pub use std_oncelock::OnceLock; -#[rustversion::since(1.80)] -pub use std::sync::LazyLock; +#[cfg(not(feature = "std"))] +pub use spin::Lazy as LazyLock; +#[cfg(feature = "std")] +mod std_lazylock { + #[rustversion::before(1.80)] + mod before_1_80_lazylock { + use core::ops::Deref; + use crate::sync::{Mutex, OnceLock}; + + /// Shim for `std::sync::LazyLock`. + pub struct LazyLock T> { + f: Mutex>, + once: OnceLock, + } + impl T> LazyLock { + /// Shim for `std::sync::LazyLock::new`. + pub const fn new(f: F) -> Self { + Self { f: Mutex::new(Some(f)), once: OnceLock::new() } + } + /// Shim for `std::sync::LazyLock::get`. + pub fn get(&self) -> &T { + // Since this initializer will only be called once, the value in the Mutex will be `Some` + self.once.get_or_init(|| (self.f.lock().take().unwrap())()) + } + } + impl T> Deref for LazyLock { + type Target = T; + fn deref(&self) -> &T { + self.get() + } + } + } + #[rustversion::before(1.80)] + pub use before_1_80_lazylock::LazyLock; + #[rustversion::since(1.80)] + pub use std::sync::LazyLock; +} +#[cfg(feature = "std")] +pub use std_lazylock::LazyLock; diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 5fe4550c..9fcf60a6 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite authors = ["Luke Parker "] keywords = ["ciphersuite", "ff", "group"] edition = "2021" -rust-version = "1.79" +rust-version = "1.74" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 57baaebb..d3529b7f 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.79" +rust-version = "1.74" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/dealer/Cargo.toml b/crypto/dkg/dealer/Cargo.toml index ee008ab9..2790e7d6 100644 --- a/crypto/dkg/dealer/Cargo.toml +++ b/crypto/dkg/dealer/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/dealer" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.79" +rust-version = "1.74" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml index 17d7b0c3..68aefc4c 100644 --- a/crypto/dkg/recovery/Cargo.toml +++ b/crypto/dkg/recovery/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recover authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.80" +rust-version = "1.74" [package.metadata.docs.rs] all-features = true From da3a85efe5a03572ebcfafa4ef4cfb3e66a8b6e7 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 17:50:04 -0400 Subject: [PATCH 097/116] Only drop OnceLock value if initialized --- common/std-shims/src/sync.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/common/std-shims/src/sync.rs b/common/std-shims/src/sync.rs index 65313b5c..6fe8e80d 100644 --- a/common/std-shims/src/sync.rs +++ b/common/std-shims/src/sync.rs @@ -48,7 +48,7 @@ mod std_oncelock { Self { value: Cell::new(core::ptr::null_mut()), init: RwLock::new(false) } } /// Shim for `std::sync::OnceLock::get_or_init`. - pub fn get_or_init(&'a self, f: F) -> &'a T + pub fn get_or_init(&self, f: F) -> &T where F: FnOnce() -> T, { @@ -72,7 +72,9 @@ mod std_oncelock { // multiple times impl Drop for OnceLock { fn drop(&mut self) { - unsafe { drop(Box::from_raw(self.value.get())) } + if *self.init.read().unwrap() { + unsafe { drop(Box::from_raw(self.value.get())) } + } } } } From 432daae1d1c5646a83c5a86d7c16a18e63aa474b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 18:04:29 -0400 Subject: [PATCH 098/116] Polyfill extension traits for div_ceil and io::Error::other --- common/std-shims/src/lib.rs | 61 +++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/common/std-shims/src/lib.rs b/common/std-shims/src/lib.rs index bccda3a0..2c2e1090 100644 --- a/common/std-shims/src/lib.rs +++ b/common/std-shims/src/lib.rs @@ -11,3 +11,64 @@ pub mod io; pub use alloc::vec; pub use alloc::str; pub use alloc::string; + +pub mod prelude { + #[rustversion::before(1.73)] + #[doc(hidden)] + pub trait StdShimsDivCeil { + fn div_ceil(self, rhs: Self) -> Self; + } + #[rustversion::before(1.73)] + mod impl_divceil { + use super::StdShimsDivCeil; + impl StdShimsDivCeil for u8 { + fn div_ceil(self, rhs: Self) -> Self { + (self + (rhs - 1)) / rhs + } + } + impl StdShimsDivCeil for u16 { + fn div_ceil(self, rhs: Self) -> Self { + (self + (rhs - 1)) / rhs + } + } + impl StdShimsDivCeil for u32 { + fn div_ceil(self, rhs: Self) -> Self { + (self + (rhs - 1)) / rhs + } + } + impl StdShimsDivCeil for u64 { + fn div_ceil(self, rhs: Self) -> Self { + (self + (rhs - 1)) / rhs + } + } + impl StdShimsDivCeil for u128 { + fn div_ceil(self, rhs: Self) -> Self { + (self + (rhs - 1)) / rhs + } + } + impl StdShimsDivCeil for usize { + fn div_ceil(self, rhs: Self) -> Self { + (self + (rhs - 1)) / rhs + } + } + } + + #[cfg(feature = "std")] + #[rustversion::before(1.74)] + #[doc(hidden)] + pub trait StdShimsIoErrorOther { + fn other(error: E) -> Self + where + E: Into>; + } + #[cfg(feature = "std")] + #[rustversion::before(1.74)] + impl StdShimsIoErrorOther for std::io::Error { + fn other(error: E) -> Self + where + E: Into>, + { + std::io::Error::new(std::io::ErrorKind::Other, error) + } + } +} From e5ccfac19e7c04032cd88ae6e76bbd55cf4e7c9c Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 18:10:33 -0400 Subject: [PATCH 099/116] Replace bespoke LazyLock/OnceLock with spin re-exports Presumably notably slower on platforms with std, yet only when compiled with old versions of Rust for which the option is this or no support anyways. --- common/std-shims/src/sync.rs | 99 +++--------------------------------- 1 file changed, 8 insertions(+), 91 deletions(-) diff --git a/common/std-shims/src/sync.rs b/common/std-shims/src/sync.rs index 6fe8e80d..0945a125 100644 --- a/common/std-shims/src/sync.rs +++ b/common/std-shims/src/sync.rs @@ -27,101 +27,18 @@ pub use mutex_shim::{ShimMutex as Mutex, MutexGuard}; #[cfg(not(feature = "std"))] pub use spin::Once as OnceLock; +#[rustversion::before(1.70)] #[cfg(feature = "std")] -mod std_oncelock { - #[rustversion::before(1.70)] - mod before_1_70_oncelock { - use core::cell::Cell; - use std::sync::RwLock; - - /// Shim for `std::sync::OnceLock`. - pub struct OnceLock { - value: Cell<*mut T>, - init: RwLock, - } - // We use the `RwLock` (which is `Sync`) to control access to the `!Sync` `RefCell` - unsafe impl Sync for OnceLock {} - - impl OnceLock { - /// Shim for `std::sync::OnceLock::new`. - pub const fn new() -> Self { - Self { value: Cell::new(core::ptr::null_mut()), init: RwLock::new(false) } - } - /// Shim for `std::sync::OnceLock::get_or_init`. - pub fn get_or_init(&self, f: F) -> &T - where - F: FnOnce() -> T, - { - let initialized = *self.init.read().unwrap(); - if !initialized { - // Obtain an exclusive reference - let mut initialized = self.init.write().unwrap(); - // If this still isn't initialized (by someone who first obtained an exlusive reference) - if !*initialized { - // Set the value and mark it initialized - self.value.set(Box::into_raw(Box::new(f()))); - *initialized = true; - } - } - // SAFETY: We always initialize the value before this and it's only written to once - unsafe { &*self.value.get() } - } - } - - // SAFETY: `OnceLock` doesn't implement `Clone` so this doesn't risk dropping the `Box` - // multiple times - impl Drop for OnceLock { - fn drop(&mut self) { - if *self.init.read().unwrap() { - unsafe { drop(Box::from_raw(self.value.get())) } - } - } - } - } - #[rustversion::before(1.70)] - pub use before_1_70_oncelock::OnceLock; - #[rustversion::since(1.70)] - pub use std::sync::OnceLock; -} +pub use spin::Once as OnceLock; +#[rustversion::since(1.70)] #[cfg(feature = "std")] -pub use std_oncelock::OnceLock; +pub use std::sync::OnceLock; #[cfg(not(feature = "std"))] pub use spin::Lazy as LazyLock; +#[rustversion::before(1.80)] #[cfg(feature = "std")] -mod std_lazylock { - #[rustversion::before(1.80)] - mod before_1_80_lazylock { - use core::ops::Deref; - use crate::sync::{Mutex, OnceLock}; - - /// Shim for `std::sync::LazyLock`. - pub struct LazyLock T> { - f: Mutex>, - once: OnceLock, - } - impl T> LazyLock { - /// Shim for `std::sync::LazyLock::new`. - pub const fn new(f: F) -> Self { - Self { f: Mutex::new(Some(f)), once: OnceLock::new() } - } - /// Shim for `std::sync::LazyLock::get`. - pub fn get(&self) -> &T { - // Since this initializer will only be called once, the value in the Mutex will be `Some` - self.once.get_or_init(|| (self.f.lock().take().unwrap())()) - } - } - impl T> Deref for LazyLock { - type Target = T; - fn deref(&self) -> &T { - self.get() - } - } - } - #[rustversion::before(1.80)] - pub use before_1_80_lazylock::LazyLock; - #[rustversion::since(1.80)] - pub use std::sync::LazyLock; -} +pub use spin::Lazy as LazyLock; +#[rustversion::since(1.80)] #[cfg(feature = "std")] -pub use std_lazylock::LazyLock; +pub use std::sync::LazyLock; From ddbc32de4d4b622899a0cbf7ef460ac75ecc980d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 18:14:55 -0400 Subject: [PATCH 100/116] Update ciphersuite/dkg MSRVs --- Cargo.toml | 1 + crypto/ciphersuite/Cargo.toml | 2 +- crypto/ciphersuite/src/lib.rs | 7 +++++-- crypto/dkg/Cargo.toml | 2 +- crypto/dkg/dealer/Cargo.toml | 2 +- crypto/dkg/recovery/Cargo.toml | 2 +- crypto/dkg/src/lib.rs | 2 ++ 7 files changed, 12 insertions(+), 6 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index db9f078f..9cc9db61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -160,6 +160,7 @@ directories-next = { path = "patches/directories-next" } [workspace.lints.clippy] unwrap_or_default = "allow" manual_is_multiple_of = "allow" +incompatible_msrv = "allow" # Manually verified with a GitHub workflow borrow_as_ptr = "deny" cast_lossless = "deny" cast_possible_truncation = "deny" diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 9fcf60a6..9542aaa4 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite authors = ["Luke Parker "] keywords = ["ciphersuite", "ff", "group"] edition = "2021" -rust-version = "1.74" +rust-version = "1.73" [package.metadata.docs.rs] all-features = true diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index fd0c9194..02c30aed 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -3,8 +3,11 @@ #![cfg_attr(not(feature = "std"), no_std)] use core::fmt::Debug; -#[cfg(any(feature = "alloc", feature = "std"))] -use std_shims::io::{self, Read}; +#[allow(unused_imports)] +use std_shims::{ + prelude::*, + io::{self, Read}, +}; use rand_core::{RngCore, CryptoRng}; diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index d3529b7f..c9f2642b 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.74" +rust-version = "1.73" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/dealer/Cargo.toml b/crypto/dkg/dealer/Cargo.toml index 2790e7d6..78b37167 100644 --- a/crypto/dkg/dealer/Cargo.toml +++ b/crypto/dkg/dealer/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/dealer" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.74" +rust-version = "1.73" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml index 68aefc4c..9c85d701 100644 --- a/crypto/dkg/recovery/Cargo.toml +++ b/crypto/dkg/recovery/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recover authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.74" +rust-version = "1.73" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 064a6a10..bb1aa560 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -6,6 +6,8 @@ use core::{ ops::Deref, fmt::{self, Debug}, }; +#[allow(unused_imports)] +use std_shims::prelude::*; use std_shims::{sync::Arc, vec, vec::Vec, collections::HashMap, io}; use zeroize::{Zeroize, Zeroizing}; From c8ef044acb836812d7b5f0069dd2239b6fb41be9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 21:01:14 -0400 Subject: [PATCH 101/116] Version bump std-shims --- Cargo.lock | 2 +- common/std-shims/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 88ef9312..6cef2eab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9692,7 +9692,7 @@ dependencies = [ [[package]] name = "std-shims" -version = "0.1.3" +version = "0.1.4" dependencies = [ "hashbrown 0.14.5", "rustversion", diff --git a/common/std-shims/Cargo.toml b/common/std-shims/Cargo.toml index 0525f515..da46f6fd 100644 --- a/common/std-shims/Cargo.toml +++ b/common/std-shims/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "std-shims" -version = "0.1.3" +version = "0.1.4" description = "A series of std shims to make alloc more feasible" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/common/std-shims" From d407e35cee87dfa975e494b3b7dd9ebdce8a746c Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 21:42:25 -0400 Subject: [PATCH 102/116] Fix Ciphersuite feature flagging --- crypto/ciphersuite/Cargo.toml | 2 +- crypto/ciphersuite/src/lib.rs | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 9542aaa4..e2af42aa 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -24,7 +24,7 @@ rand_core = { version = "0.6", default-features = false } zeroize = { version = "^1.5", default-features = false, features = ["derive"] } subtle = { version = "^2.4", default-features = false } -digest = { version = "0.10", default-features = false } +digest = { version = "0.10", default-features = false, features = ["core-api"] } transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false } sha2 = { version = "0.10", default-features = false, optional = true } sha3 = { version = "0.10", default-features = false, optional = true } diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index 02c30aed..0e19b4b4 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -3,11 +3,11 @@ #![cfg_attr(not(feature = "std"), no_std)] use core::fmt::Debug; +#[cfg(any(feature = "alloc", feature = "std"))] #[allow(unused_imports)] -use std_shims::{ - prelude::*, - io::{self, Read}, -}; +use std_shims::prelude::*; +#[cfg(any(feature = "alloc", feature = "std"))] +use std_shims::io::{self, Read}; use rand_core::{RngCore, CryptoRng}; From 75964cf6da129a78bbcabf0c10f9638cb5498fe0 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 21:45:59 -0400 Subject: [PATCH 103/116] Place Schnorr signature aggregation behind a feature flag --- Cargo.lock | 2 +- coordinator/Cargo.toml | 2 +- crypto/schnorr/Cargo.toml | 3 ++- crypto/schnorr/src/lib.rs | 1 + crypto/schnorr/src/tests/mod.rs | 9 +++++---- 5 files changed, 10 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6cef2eab..91e51d24 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7845,7 +7845,7 @@ dependencies = [ [[package]] name = "schnorr-signatures" -version = "0.5.1" +version = "0.5.2" dependencies = [ "ciphersuite", "dalek-ff-group", diff --git a/coordinator/Cargo.toml b/coordinator/Cargo.toml index 1067fbb0..aa1c489d 100644 --- a/coordinator/Cargo.toml +++ b/coordinator/Cargo.toml @@ -26,7 +26,7 @@ blake2 = { version = "0.10", default-features = false, features = ["std"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std", "recommended"] } ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } -schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std"] } +schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std", "aggregate"] } dkg-musig = { path = "../crypto/dkg/musig", default-features = false, features = ["std"] } frost = { package = "modular-frost", path = "../crypto/frost" } frost-schnorrkel = { path = "../crypto/schnorrkel" } diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 2ea04f5b..8cc11325 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "schnorr-signatures" -version = "0.5.1" +version = "0.5.2" description = "Minimal Schnorr signatures crate hosting common code" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr" @@ -39,5 +39,6 @@ dalek-ff-group = { path = "../dalek-ff-group" } ciphersuite = { path = "../ciphersuite", features = ["ed25519"] } [features] +aggregate = [] std = ["std-shims/std", "rand_core/std", "zeroize/std", "transcript/std", "ciphersuite/std", "multiexp/std"] default = ["std"] diff --git a/crypto/schnorr/src/lib.rs b/crypto/schnorr/src/lib.rs index ecca87f7..e5b9f3c2 100644 --- a/crypto/schnorr/src/lib.rs +++ b/crypto/schnorr/src/lib.rs @@ -25,6 +25,7 @@ use ciphersuite::{ use multiexp::{multiexp_vartime, BatchVerifier}; /// Half-aggregation from . +#[cfg(feature = "aggregate")] pub mod aggregate; #[cfg(test)] diff --git a/crypto/schnorr/src/tests/mod.rs b/crypto/schnorr/src/tests/mod.rs index 47bd9bc3..97d569db 100644 --- a/crypto/schnorr/src/tests/mod.rs +++ b/crypto/schnorr/src/tests/mod.rs @@ -9,10 +9,9 @@ use ciphersuite::{ }; use multiexp::BatchVerifier; -use crate::{ - SchnorrSignature, - aggregate::{SchnorrAggregator, SchnorrAggregate}, -}; +use crate::SchnorrSignature; +#[cfg(feature = "aggregate")] +use crate::aggregate::{SchnorrAggregator, SchnorrAggregate}; mod rfc8032; @@ -77,6 +76,7 @@ pub(crate) fn batch_verify() { } } +#[cfg(feature = "aggregate")] pub(crate) fn aggregate() { const DST: &[u8] = b"Schnorr Aggregator Test"; @@ -117,5 +117,6 @@ fn test() { sign::(); verify::(); batch_verify::(); + #[cfg(feature = "aggregate")] aggregate::(); } From 8a1b56a9282efda6e8f1ca92d4b92da7af0e27ef Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 21:50:58 -0400 Subject: [PATCH 104/116] Make the transcript dependency optional for schnorr-signatures It's only required when aggregating. --- crypto/schnorr/Cargo.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 8cc11325..96abb069 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -23,7 +23,7 @@ rand_core = { version = "0.6", default-features = false } zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } -transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false } +transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false, optional = true } ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } multiexp = { path = "../multiexp", version = "0.4", default-features = false, features = ["batch"] } @@ -39,6 +39,6 @@ dalek-ff-group = { path = "../dalek-ff-group" } ciphersuite = { path = "../ciphersuite", features = ["ed25519"] } [features] -aggregate = [] -std = ["std-shims/std", "rand_core/std", "zeroize/std", "transcript/std", "ciphersuite/std", "multiexp/std"] +aggregate = ["transcript"] +std = ["std-shims/std", "rand_core/std", "zeroize/std", "transcript?/std", "ciphersuite/std", "multiexp/std"] default = ["std"] From 17c1d5cd6b9a0327808168010367074ee9f71a15 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 22:28:59 -0400 Subject: [PATCH 105/116] Tweak multiexp to Zeroize points when invoked in constant time, not just scalars --- Cargo.lock | 2 +- crypto/multiexp/Cargo.toml | 2 +- crypto/multiexp/src/batch.rs | 6 +++--- crypto/multiexp/src/lib.rs | 6 +++++- crypto/multiexp/src/pippenger.rs | 3 ++- crypto/multiexp/src/straus.rs | 5 +++-- crypto/multiexp/src/tests/batch.rs | 2 +- crypto/multiexp/src/tests/mod.rs | 4 ++-- 8 files changed, 18 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 91e51d24..3f95bf34 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5119,7 +5119,7 @@ dependencies = [ [[package]] name = "multiexp" -version = "0.4.0" +version = "0.4.1" dependencies = [ "dalek-ff-group", "ff", diff --git a/crypto/multiexp/Cargo.toml b/crypto/multiexp/Cargo.toml index 228b85ab..3f5f7f21 100644 --- a/crypto/multiexp/Cargo.toml +++ b/crypto/multiexp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "multiexp" -version = "0.4.0" +version = "0.4.1" description = "Multiexponentiation algorithms for ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/multiexp" diff --git a/crypto/multiexp/src/batch.rs b/crypto/multiexp/src/batch.rs index 8016047d..ea8044dd 100644 --- a/crypto/multiexp/src/batch.rs +++ b/crypto/multiexp/src/batch.rs @@ -12,7 +12,7 @@ use crate::{multiexp, multiexp_vartime}; // Flatten the contained statements to a single Vec. // Wrapped in Zeroizing in case any of the included statements contain private values. #[allow(clippy::type_complexity)] -fn flat + Zeroize>( +fn flat>( slice: &[(Id, Vec<(G::Scalar, G)>)], ) -> Zeroizing> { Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).copied().collect::>()) @@ -21,11 +21,11 @@ fn flat + Zeroize /// A batch verifier intended to verify a series of statements are each equivalent to zero. #[allow(clippy::type_complexity)] #[derive(Clone, Zeroize)] -pub struct BatchVerifier + Zeroize>( +pub struct BatchVerifier>( Zeroizing)>>, ); -impl + Zeroize> +impl> BatchVerifier { /// Create a new batch verifier, expected to verify the following amount of statements. diff --git a/crypto/multiexp/src/lib.rs b/crypto/multiexp/src/lib.rs index 604d0fd6..8b16aa91 100644 --- a/crypto/multiexp/src/lib.rs +++ b/crypto/multiexp/src/lib.rs @@ -5,6 +5,8 @@ #[cfg(not(feature = "std"))] #[macro_use] extern crate alloc; +#[allow(unused_imports)] +use std_shims::prelude::*; use std_shims::vec::Vec; use zeroize::Zeroize; @@ -175,7 +177,9 @@ fn algorithm(len: usize) -> Algorithm { /// Performs a multiexponentiation, automatically selecting the optimal algorithm based on the /// amount of pairs. -pub fn multiexp>(pairs: &[(G::Scalar, G)]) -> G { +pub fn multiexp>( + pairs: &[(G::Scalar, G)], +) -> G { match algorithm(pairs.len()) { Algorithm::Null => Group::identity(), Algorithm::Single => pairs[0].1 * pairs[0].0, diff --git a/crypto/multiexp/src/pippenger.rs b/crypto/multiexp/src/pippenger.rs index 3660b7b2..76b161ba 100644 --- a/crypto/multiexp/src/pippenger.rs +++ b/crypto/multiexp/src/pippenger.rs @@ -7,7 +7,7 @@ use crate::prep_bits; // Pippenger's algorithm for multiexponentiation, as published in the SIAM Journal on Computing // DOI: 10.1137/0209022 -pub(crate) fn pippenger>( +pub(crate) fn pippenger>( pairs: &[(G::Scalar, G)], window: u8, ) -> G { @@ -25,6 +25,7 @@ pub(crate) fn pippenger>( for p in 0 .. bits.len() { buckets[usize::from(bits[p][n])] += pairs[p].1; } + buckets.zeroize(); let mut intermediate_sum = G::identity(); for b in (1 .. buckets.len()).rev() { diff --git a/crypto/multiexp/src/straus.rs b/crypto/multiexp/src/straus.rs index f576c973..638b2827 100644 --- a/crypto/multiexp/src/straus.rs +++ b/crypto/multiexp/src/straus.rs @@ -24,12 +24,12 @@ fn prep_tables(pairs: &[(G::Scalar, G)], window: u8) -> Vec> { // Straus's algorithm for multiexponentiation, as published in The American Mathematical Monthly // DOI: 10.2307/2310929 -pub(crate) fn straus>( +pub(crate) fn straus>( pairs: &[(G::Scalar, G)], window: u8, ) -> G { let mut groupings = prep_bits(pairs, window); - let tables = prep_tables(pairs, window); + let mut tables = prep_tables(pairs, window); let mut res = G::identity(); for b in (0 .. groupings[0].len()).rev() { @@ -45,6 +45,7 @@ pub(crate) fn straus>( } groupings.zeroize(); + tables.zeroize(); res } diff --git a/crypto/multiexp/src/tests/batch.rs b/crypto/multiexp/src/tests/batch.rs index 2e78a5dc..09c04c74 100644 --- a/crypto/multiexp/src/tests/batch.rs +++ b/crypto/multiexp/src/tests/batch.rs @@ -9,7 +9,7 @@ use group::Group; use crate::BatchVerifier; -pub(crate) fn test_batch + Zeroize>() { +pub(crate) fn test_batch>() { let valid = |batch: BatchVerifier<_, G>| { assert!(batch.verify()); assert!(batch.verify_vartime()); diff --git a/crypto/multiexp/src/tests/mod.rs b/crypto/multiexp/src/tests/mod.rs index 3050c96e..9d5e8503 100644 --- a/crypto/multiexp/src/tests/mod.rs +++ b/crypto/multiexp/src/tests/mod.rs @@ -18,7 +18,7 @@ mod batch; use batch::test_batch; #[allow(dead_code)] -fn benchmark_internal>(straus_bool: bool) { +fn benchmark_internal>(straus_bool: bool) { let runs: usize = 20; let mut start = 0; @@ -83,7 +83,7 @@ fn benchmark_internal>(straus_bool: b } } -fn test_multiexp>() { +fn test_multiexp>() { let test = |pairs: &[_], sum| { // These should automatically determine the best algorithm assert_eq!(multiexp(pairs), sum); From 900a6612d79ae6e884b1b2a13926ff3ce3484d9a Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 19 Aug 2025 23:40:01 -0400 Subject: [PATCH 106/116] Use std-shims to reduce flexible-transcript MSRV to 1.66 flexible-transcript already had a shim to support <1.66. This was irrelevant since flexible-transcript had a MSRV of 1.73. Due to how clunky it was, it has been removed despite theoretically enabling an even lower MSRV. --- Cargo.lock | 4 ++-- crypto/ciphersuite/Cargo.toml | 2 +- crypto/dkg/Cargo.toml | 2 +- crypto/dkg/dealer/Cargo.toml | 2 +- crypto/dkg/recovery/Cargo.toml | 2 +- crypto/transcript/Cargo.toml | 9 ++++----- crypto/transcript/src/lib.rs | 26 +++----------------------- crypto/transcript/src/tests.rs | 2 ++ 8 files changed, 15 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3f95bf34..5ca04615 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2698,13 +2698,13 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flexible-transcript" -version = "0.3.3" +version = "0.3.4" dependencies = [ "blake2", "digest 0.10.7", "merlin", - "rustversion", "sha2", + "std-shims", "subtle", "zeroize", ] diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index e2af42aa..ee2b103c 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite authors = ["Luke Parker "] keywords = ["ciphersuite", "ff", "group"] edition = "2021" -rust-version = "1.73" +rust-version = "1.66" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index c9f2642b..4802034d 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.73" +rust-version = "1.66" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/dealer/Cargo.toml b/crypto/dkg/dealer/Cargo.toml index 78b37167..0b1b81bf 100644 --- a/crypto/dkg/dealer/Cargo.toml +++ b/crypto/dkg/dealer/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/dealer" authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.73" +rust-version = "1.66" [package.metadata.docs.rs] all-features = true diff --git a/crypto/dkg/recovery/Cargo.toml b/crypto/dkg/recovery/Cargo.toml index 9c85d701..db09125b 100644 --- a/crypto/dkg/recovery/Cargo.toml +++ b/crypto/dkg/recovery/Cargo.toml @@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recover authors = ["Luke Parker "] keywords = ["dkg", "multisig", "threshold", "ff", "group"] edition = "2021" -rust-version = "1.73" +rust-version = "1.66" [package.metadata.docs.rs] all-features = true diff --git a/crypto/transcript/Cargo.toml b/crypto/transcript/Cargo.toml index 39d84bb9..b15c009e 100644 --- a/crypto/transcript/Cargo.toml +++ b/crypto/transcript/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "flexible-transcript" -version = "0.3.3" +version = "0.3.4" description = "A simple transcript trait definition, along with viable options" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/transcript" authors = ["Luke Parker "] keywords = ["transcript"] edition = "2021" -rust-version = "1.73" +rust-version = "1.66" [package.metadata.docs.rs] all-features = true @@ -17,7 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"] workspace = true [dependencies] -rustversion = "1" +std-shims = { path = "../../common/std-shims", version = "0.1.4", default-features = false } subtle = { version = "^2.4", default-features = false } zeroize = { version = "^1.5", default-features = false } @@ -32,8 +32,7 @@ sha2 = { version = "0.10", default-features = false } blake2 = { version = "0.10", default-features = false } [features] -std = ["subtle/std", "zeroize/std", "digest/std", "blake2?/std", "merlin?/std"] +std = ["std-shims/std", "subtle/std", "zeroize/std", "digest/std", "blake2?/std", "merlin?/std"] recommended = ["blake2"] -merlin = ["dep:merlin"] tests = [] default = ["std"] diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index 3956f51d..988ab039 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -2,6 +2,9 @@ #![doc = include_str!("../README.md")] #![no_std] +#[allow(unused_imports)] +use std_shims::prelude::*; + use zeroize::Zeroize; use digest::{ @@ -159,35 +162,12 @@ where // These writes may be optimized out if they're never read // Attempt to get them marked as read - #[rustversion::since(1.66)] fn mark_read(transcript: &DigestTranscript) { // Just get a challenge from the state let mut challenge = core::hint::black_box(transcript.0.clone().finalize()); challenge.as_mut().zeroize(); } - #[rustversion::before(1.66)] - fn mark_read(transcript: &mut DigestTranscript) { - // Get a challenge - let challenge = transcript.0.clone().finalize(); - - // Attempt to use subtle's, non-exposed black_box function, by creating a Choice from this - // challenge - - let mut read = 0; - for byte in challenge.as_ref() { - read ^= byte; - } - challenge.as_mut().zeroize(); - - // Since this Choice isn't further read, its creation may be optimized out, including its - // internal black_box - // This remains our best attempt - let mut choice = bool::from(subtle::Choice::from(read >> 7)); - read.zeroize(); - choice.zeroize(); - } - mark_read(self) } } diff --git a/crypto/transcript/src/tests.rs b/crypto/transcript/src/tests.rs index ce5a0a1c..2308d5a5 100644 --- a/crypto/transcript/src/tests.rs +++ b/crypto/transcript/src/tests.rs @@ -1,6 +1,8 @@ use crate::Transcript; /// Test the sanity of a transcript. +/// +/// This will panic if sanity checks fail. pub fn test_transcript>() { // Ensure distinct names cause distinct challenges { From 2bc2ca6906291faf72e2c8f89ca0d287049e1e33 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 00:06:07 -0400 Subject: [PATCH 107/116] Implement FromUniformBytes<64> for dalek_ff_group::Scalar --- Cargo.lock | 4 ++-- crypto/ciphersuite/Cargo.toml | 2 +- crypto/dalek-ff-group/Cargo.toml | 2 +- crypto/dalek-ff-group/src/lib.rs | 8 +++++++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5ca04615..4352ad49 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1527,7 +1527,7 @@ dependencies = [ [[package]] name = "ciphersuite" -version = "0.4.1" +version = "0.4.2" dependencies = [ "dalek-ff-group", "digest 0.10.7", @@ -1980,7 +1980,7 @@ dependencies = [ [[package]] name = "dalek-ff-group" -version = "0.4.2" +version = "0.4.3" dependencies = [ "crypto-bigint", "curve25519-dalek", diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index ee2b103c..3562289b 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ciphersuite" -version = "0.4.1" +version = "0.4.2" description = "Ciphersuites built around ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite" diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index 24b28fcc..0869d11e 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dalek-ff-group" -version = "0.4.2" +version = "0.4.3" description = "ff/group bindings around curve25519-dalek" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-group" diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index b24b1e56..d56f117b 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -30,7 +30,7 @@ use dalek::{ pub use constants::{ED25519_BASEPOINT_TABLE, RISTRETTO_BASEPOINT_TABLE}; use group::{ - ff::{Field, PrimeField, FieldBits, PrimeFieldBits}, + ff::{Field, PrimeField, FieldBits, PrimeFieldBits, FromUniformBytes}, Group, GroupEncoding, prime::PrimeGroup, }; @@ -322,6 +322,12 @@ impl PrimeFieldBits for Scalar { } } +impl FromUniformBytes<64> for Scalar { + fn from_uniform_bytes(bytes: &[u8; 64]) -> Self { + Self::from_bytes_mod_order_wide(bytes) + } +} + impl Sum for Scalar { fn sum>(iter: I) -> Scalar { Self(DScalar::sum(iter)) From 38bda1d586bff0040baeb84109ee59cf67f4a56b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 00:23:39 -0400 Subject: [PATCH 108/116] dalek_ff_group::FieldElement: FromUniformBytes<64> --- crypto/dalek-ff-group/src/field.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index e0ef473a..3d49d63f 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -17,7 +17,7 @@ use crypto_bigint::{ impl_modulus, }; -use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; +use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits, FromUniformBytes}; use crate::{u8_from_bool, constant_time, math_op, math}; @@ -311,6 +311,12 @@ impl FieldElement { } } +impl FromUniformBytes<64> for FieldElement { + fn from_uniform_bytes(bytes: &[u8; 64]) -> Self { + Self::wide_reduce(*bytes) + } +} + impl Sum for FieldElement { fn sum>(iter: I) -> FieldElement { let mut res = FieldElement::ZERO; From 677a2e5749f4581d2f7cb84783758ab4d4207a9f Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 00:35:56 -0400 Subject: [PATCH 109/116] Fix zeroization timeline in multiexp, cargo machete --- Cargo.lock | 1 - crypto/multiexp/Cargo.toml | 2 +- crypto/multiexp/src/pippenger.rs | 3 ++- crypto/transcript/Cargo.toml | 3 +-- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4352ad49..667eb608 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2705,7 +2705,6 @@ dependencies = [ "merlin", "sha2", "std-shims", - "subtle", "zeroize", ] diff --git a/crypto/multiexp/Cargo.toml b/crypto/multiexp/Cargo.toml index 3f5f7f21..fcf32f85 100644 --- a/crypto/multiexp/Cargo.toml +++ b/crypto/multiexp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "multiexp" -version = "0.4.1" +version = "0.4.2" description = "Multiexponentiation algorithms for ff/group" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/multiexp" diff --git a/crypto/multiexp/src/pippenger.rs b/crypto/multiexp/src/pippenger.rs index 76b161ba..faf9edc2 100644 --- a/crypto/multiexp/src/pippenger.rs +++ b/crypto/multiexp/src/pippenger.rs @@ -25,13 +25,14 @@ pub(crate) fn pippenger>( for p in 0 .. bits.len() { buckets[usize::from(bits[p][n])] += pairs[p].1; } - buckets.zeroize(); let mut intermediate_sum = G::identity(); for b in (1 .. buckets.len()).rev() { intermediate_sum += buckets[b]; res += intermediate_sum; } + + buckets.zeroize(); } bits.zeroize(); diff --git a/crypto/transcript/Cargo.toml b/crypto/transcript/Cargo.toml index b15c009e..dc8b340d 100644 --- a/crypto/transcript/Cargo.toml +++ b/crypto/transcript/Cargo.toml @@ -19,7 +19,6 @@ workspace = true [dependencies] std-shims = { path = "../../common/std-shims", version = "0.1.4", default-features = false } -subtle = { version = "^2.4", default-features = false } zeroize = { version = "^1.5", default-features = false } digest = { version = "0.10", default-features = false, features = ["core-api"] } @@ -32,7 +31,7 @@ sha2 = { version = "0.10", default-features = false } blake2 = { version = "0.10", default-features = false } [features] -std = ["std-shims/std", "subtle/std", "zeroize/std", "digest/std", "blake2?/std", "merlin?/std"] +std = ["std-shims/std", "zeroize/std", "digest/std", "blake2?/std", "merlin?/std"] recommended = ["blake2"] tests = [] default = ["std"] From 8be03a8fc2bfce25335bc7e54cc799b348f8229e Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 01:15:56 -0400 Subject: [PATCH 110/116] Fix dirty lockfile --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 667eb608..9e549614 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5118,7 +5118,7 @@ dependencies = [ [[package]] name = "multiexp" -version = "0.4.1" +version = "0.4.2" dependencies = [ "dalek-ff-group", "ff", From b63ef32864c238e6d14ed8e68779e8abc9753124 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 04:50:37 -0400 Subject: [PATCH 111/116] Smash Ciphersuite definitions into their own crates Uses dalek-ff-group for Ed25519 and Ristretto. Uses minimal-ed448 for Ed448. Adds ciphersuite-kp256 for Secp256k1 and P-256. --- .github/workflows/crypto-tests.yml | 1 + Cargo.lock | 42 ++++++++++++-- Cargo.toml | 1 + common/std-shims/src/sync.rs | 9 --- coordinator/Cargo.toml | 1 + coordinator/src/main.rs | 3 +- coordinator/src/substrate/cosign.rs | 3 +- coordinator/src/substrate/mod.rs | 3 +- coordinator/src/tests/tributary/chain.rs | 3 +- coordinator/src/tests/tributary/dkg.rs | 3 +- coordinator/src/tests/tributary/mod.rs | 3 +- coordinator/src/tests/tributary/sync.rs | 3 +- coordinator/src/tributary/db.rs | 3 +- coordinator/src/tributary/handle.rs | 3 +- coordinator/src/tributary/mod.rs | 3 +- coordinator/src/tributary/scanner.rs | 3 +- coordinator/src/tributary/signing_protocol.rs | 3 +- coordinator/src/tributary/spec.rs | 3 +- coordinator/src/tributary/transaction.rs | 3 +- coordinator/tributary/Cargo.toml | 3 +- coordinator/tributary/src/blockchain.rs | 3 +- coordinator/tributary/src/lib.rs | 3 +- coordinator/tributary/src/mempool.rs | 3 +- coordinator/tributary/src/tendermint/mod.rs | 3 +- coordinator/tributary/src/tendermint/tx.rs | 3 +- coordinator/tributary/src/tests/block.rs | 4 +- coordinator/tributary/src/tests/blockchain.rs | 3 +- coordinator/tributary/src/tests/mempool.rs | 3 +- .../tributary/src/tests/transaction/mod.rs | 3 +- .../tributary/src/tests/transaction/signed.rs | 3 +- .../src/tests/transaction/tendermint.rs | 3 +- coordinator/tributary/src/transaction.rs | 3 +- crypto/ciphersuite/Cargo.toml | 29 ---------- crypto/ciphersuite/README.md | 6 ++ crypto/ciphersuite/kp256/Cargo.toml | 55 +++++++++++++++++++ crypto/ciphersuite/kp256/LICENSE | 21 +++++++ crypto/ciphersuite/kp256/README.md | 3 + .../{src/kp256.rs => kp256/src/lib.rs} | 13 ++--- crypto/ciphersuite/src/lib.md | 2 +- crypto/ciphersuite/src/lib.rs | 19 ------- crypto/dalek-ff-group/Cargo.toml | 6 +- .../src/ciphersuite.rs} | 12 +--- crypto/dalek-ff-group/src/lib.rs | 3 + crypto/dkg/Cargo.toml | 3 - crypto/dkg/musig/Cargo.toml | 2 +- crypto/dkg/musig/src/tests.rs | 3 +- crypto/dkg/pedpop/Cargo.toml | 2 +- crypto/dkg/pedpop/src/tests.rs | 3 +- crypto/dkg/promote/Cargo.toml | 2 +- crypto/dkg/promote/src/tests.rs | 3 +- crypto/ed448/Cargo.toml | 6 +- .../src/ed448.rs => ed448/src/ciphersuite.rs} | 14 +++-- crypto/ed448/src/lib.rs | 3 + crypto/frost/Cargo.toml | 13 +++-- crypto/frost/src/curve/dalek.rs | 2 +- crypto/frost/src/curve/ed448.rs | 5 +- crypto/frost/src/curve/kp256.rs | 2 +- crypto/schnorr/Cargo.toml | 2 +- crypto/schnorr/src/tests/mod.rs | 3 +- crypto/schnorr/src/tests/rfc8032.rs | 4 +- crypto/schnorrkel/Cargo.toml | 3 +- crypto/schnorrkel/src/lib.rs | 7 ++- message-queue/Cargo.toml | 3 +- message-queue/src/client.rs | 3 +- message-queue/src/main.rs | 3 +- message-queue/src/messages.rs | 3 +- orchestration/Cargo.toml | 3 +- orchestration/src/coordinator.rs | 3 +- orchestration/src/main.rs | 3 +- orchestration/src/message_queue.rs | 3 +- orchestration/src/processor.rs | 3 +- orchestration/src/serai.rs | 3 +- processor/Cargo.toml | 9 +-- processor/src/networks/ethereum.rs | 3 +- processor/src/tests/literal/mod.rs | 3 +- substrate/client/Cargo.toml | 6 +- substrate/client/src/networks/monero.rs | 3 +- .../client/tests/common/genesis_liquidity.rs | 3 +- .../client/tests/common/validator_sets.rs | 3 +- .../validator-sets/primitives/Cargo.toml | 3 +- .../validator-sets/primitives/src/lib.rs | 3 +- tests/coordinator/Cargo.toml | 4 +- tests/coordinator/src/lib.rs | 3 +- tests/coordinator/src/tests/batch.rs | 5 +- tests/coordinator/src/tests/key_gen.rs | 4 +- tests/coordinator/src/tests/rotation.rs | 2 +- tests/coordinator/src/tests/sign.rs | 2 +- tests/message-queue/Cargo.toml | 3 +- tests/message-queue/src/lib.rs | 3 +- tests/no-std/Cargo.toml | 7 ++- tests/no-std/src/lib.rs | 1 + tests/processor/Cargo.toml | 4 +- tests/processor/src/lib.rs | 3 +- tests/processor/src/networks.rs | 9 +-- tests/processor/src/tests/mod.rs | 3 +- 95 files changed, 322 insertions(+), 184 deletions(-) create mode 100644 crypto/ciphersuite/kp256/Cargo.toml create mode 100644 crypto/ciphersuite/kp256/LICENSE create mode 100644 crypto/ciphersuite/kp256/README.md rename crypto/ciphersuite/{src/kp256.rs => kp256/src/lib.rs} (96%) rename crypto/{ciphersuite/src/dalek.rs => dalek-ff-group/src/ciphersuite.rs} (90%) rename crypto/{ciphersuite/src/ed448.rs => ed448/src/ciphersuite.rs} (91%) diff --git a/.github/workflows/crypto-tests.yml b/.github/workflows/crypto-tests.yml index cf3f00b4..b6af2664 100644 --- a/.github/workflows/crypto-tests.yml +++ b/.github/workflows/crypto-tests.yml @@ -32,6 +32,7 @@ jobs: -p dalek-ff-group \ -p minimal-ed448 \ -p ciphersuite \ + -p ciphersuite-kp256 \ -p multiexp \ -p schnorr-signatures \ -p dleq \ diff --git a/Cargo.lock b/Cargo.lock index 9e549614..0ab8cab5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1529,22 +1529,30 @@ dependencies = [ name = "ciphersuite" version = "0.4.2" dependencies = [ - "dalek-ff-group", "digest 0.10.7", - "elliptic-curve", "ff", "ff-group-tests", "flexible-transcript", "group", "hex", + "rand_core", + "std-shims", + "subtle", + "zeroize", +] + +[[package]] +name = "ciphersuite-kp256" +version = "0.4.0" +dependencies = [ + "ciphersuite", + "elliptic-curve", + "ff-group-tests", + "hex", "k256", - "minimal-ed448", "p256", "rand_core", "sha2", - "sha3", - "std-shims", - "subtle", "zeroize", ] @@ -1982,14 +1990,17 @@ dependencies = [ name = "dalek-ff-group" version = "0.4.3" dependencies = [ + "ciphersuite", "crypto-bigint", "curve25519-dalek", "digest 0.10.7", "ff", "ff-group-tests", "group", + "hex", "rand_core", "rustversion", + "sha2", "subtle", "zeroize", ] @@ -2237,6 +2248,7 @@ name = "dkg-musig" version = "0.6.0" dependencies = [ "ciphersuite", + "dalek-ff-group", "dkg", "dkg-recovery", "multiexp", @@ -2252,6 +2264,7 @@ version = "0.6.0" dependencies = [ "chacha20", "ciphersuite", + "dalek-ff-group", "dkg", "dleq", "flexible-transcript", @@ -2267,6 +2280,7 @@ name = "dkg-promote" version = "0.6.1" dependencies = [ "ciphersuite", + "dalek-ff-group", "dkg", "dkg-recovery", "dleq", @@ -2918,6 +2932,7 @@ name = "frost-schnorrkel" version = "0.2.0" dependencies = [ "ciphersuite", + "dalek-ff-group", "flexible-transcript", "group", "modular-frost", @@ -4814,6 +4829,7 @@ dependencies = [ name = "minimal-ed448" version = "0.4.1" dependencies = [ + "ciphersuite", "crypto-bigint", "ff", "ff-group-tests", @@ -4822,6 +4838,7 @@ dependencies = [ "hex", "rand_core", "rustversion", + "sha3", "subtle", "zeroize", ] @@ -4885,6 +4902,7 @@ name = "modular-frost" version = "0.10.1" dependencies = [ "ciphersuite", + "ciphersuite-kp256", "dalek-ff-group", "digest 0.10.7", "dkg", @@ -8049,6 +8067,7 @@ dependencies = [ "bitcoin", "blake2", "ciphersuite", + "dalek-ff-group", "dkg-musig", "dockertest", "frame-system", @@ -8109,6 +8128,7 @@ dependencies = [ "blake2", "borsh", "ciphersuite", + "dalek-ff-group", "dkg-musig", "env_logger", "flexible-transcript", @@ -8142,6 +8162,8 @@ dependencies = [ "blake2", "borsh", "ciphersuite", + "ciphersuite-kp256", + "dalek-ff-group", "dkg", "dockertest", "hex", @@ -8349,6 +8371,7 @@ version = "0.1.0" dependencies = [ "borsh", "ciphersuite", + "dalek-ff-group", "env_logger", "flexible-transcript", "hex", @@ -8369,6 +8392,7 @@ name = "serai-message-queue-tests" version = "0.1.0" dependencies = [ "ciphersuite", + "dalek-ff-group", "dockertest", "hex", "rand_core", @@ -8385,6 +8409,7 @@ version = "0.1.0" dependencies = [ "bitcoin-serai", "ciphersuite", + "ciphersuite-kp256", "dalek-ff-group", "dkg", "dkg-dealer", @@ -8448,6 +8473,7 @@ name = "serai-orchestrator" version = "0.0.1" dependencies = [ "ciphersuite", + "dalek-ff-group", "flexible-transcript", "hex", "home", @@ -8536,7 +8562,9 @@ dependencies = [ "bitcoin-serai", "borsh", "ciphersuite", + "ciphersuite-kp256", "curve25519-dalek", + "dalek-ff-group", "dkg", "dockertest", "ethereum-serai", @@ -8671,6 +8699,7 @@ version = "0.1.0" dependencies = [ "borsh", "ciphersuite", + "dalek-ff-group", "dkg-musig", "parity-scale-codec", "scale-info", @@ -10419,6 +10448,7 @@ dependencies = [ "async-trait", "blake2", "ciphersuite", + "dalek-ff-group", "flexible-transcript", "futures-channel", "futures-util", diff --git a/Cargo.toml b/Cargo.toml index 9cc9db61..87838f91 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ members = [ "crypto/dalek-ff-group", "crypto/ed448", "crypto/ciphersuite", + "crypto/ciphersuite/kp256", "crypto/multiexp", diff --git a/common/std-shims/src/sync.rs b/common/std-shims/src/sync.rs index 0945a125..b25bfc61 100644 --- a/common/std-shims/src/sync.rs +++ b/common/std-shims/src/sync.rs @@ -25,15 +25,6 @@ mod mutex_shim { } pub use mutex_shim::{ShimMutex as Mutex, MutexGuard}; -#[cfg(not(feature = "std"))] -pub use spin::Once as OnceLock; -#[rustversion::before(1.70)] -#[cfg(feature = "std")] -pub use spin::Once as OnceLock; -#[rustversion::since(1.70)] -#[cfg(feature = "std")] -pub use std::sync::OnceLock; - #[cfg(not(feature = "std"))] pub use spin::Lazy as LazyLock; #[rustversion::before(1.80)] diff --git a/coordinator/Cargo.toml b/coordinator/Cargo.toml index aa1c489d..6f2b3c24 100644 --- a/coordinator/Cargo.toml +++ b/coordinator/Cargo.toml @@ -25,6 +25,7 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] } blake2 = { version = "0.10", default-features = false, features = ["std"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std", "recommended"] } +dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"] } ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std", "aggregate"] } dkg-musig = { path = "../crypto/dkg/musig", default-features = false, features = ["std"] } diff --git a/coordinator/src/main.rs b/coordinator/src/main.rs index adcc49ef..58a3b6d0 100644 --- a/coordinator/src/main.rs +++ b/coordinator/src/main.rs @@ -8,12 +8,13 @@ use std::{ use zeroize::{Zeroize, Zeroizing}; use rand_core::OsRng; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ ff::{Field, PrimeField}, GroupEncoding, }, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr::SchnorrSignature; use frost::Participant; diff --git a/coordinator/src/substrate/cosign.rs b/coordinator/src/substrate/cosign.rs index 644ddf13..403729e3 100644 --- a/coordinator/src/substrate/cosign.rs +++ b/coordinator/src/substrate/cosign.rs @@ -14,7 +14,8 @@ use zeroize::Zeroizing; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use borsh::{BorshSerialize, BorshDeserialize}; diff --git a/coordinator/src/substrate/mod.rs b/coordinator/src/substrate/mod.rs index a10806a3..2e295c76 100644 --- a/coordinator/src/substrate/mod.rs +++ b/coordinator/src/substrate/mod.rs @@ -6,7 +6,8 @@ use std::{ use zeroize::Zeroizing; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use serai_client::{ coins::CoinsEvent, diff --git a/coordinator/src/tests/tributary/chain.rs b/coordinator/src/tests/tributary/chain.rs index 62feb78b..38314b46 100644 --- a/coordinator/src/tests/tributary/chain.rs +++ b/coordinator/src/tests/tributary/chain.rs @@ -7,9 +7,10 @@ use zeroize::Zeroizing; use rand_core::{RngCore, CryptoRng, OsRng}; use futures_util::{task::Poll, poll}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, GroupEncoding}, - Ciphersuite, Ristretto, + Ciphersuite, }; use sp_application_crypto::sr25519; diff --git a/coordinator/src/tests/tributary/dkg.rs b/coordinator/src/tests/tributary/dkg.rs index 7999d58a..9bd606c7 100644 --- a/coordinator/src/tests/tributary/dkg.rs +++ b/coordinator/src/tests/tributary/dkg.rs @@ -4,7 +4,8 @@ use std::collections::HashMap; use zeroize::Zeroizing; use rand_core::{RngCore, OsRng}; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use frost::Participant; use sp_runtime::traits::Verify; diff --git a/coordinator/src/tests/tributary/mod.rs b/coordinator/src/tests/tributary/mod.rs index 1016248d..fab2be6f 100644 --- a/coordinator/src/tests/tributary/mod.rs +++ b/coordinator/src/tests/tributary/mod.rs @@ -2,7 +2,8 @@ use core::fmt::Debug; use rand_core::{RngCore, OsRng}; -use ciphersuite::{group::Group, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::Group, Ciphersuite}; use scale::{Encode, Decode}; use serai_client::{ diff --git a/coordinator/src/tests/tributary/sync.rs b/coordinator/src/tests/tributary/sync.rs index 18f60864..e68cdfc1 100644 --- a/coordinator/src/tests/tributary/sync.rs +++ b/coordinator/src/tests/tributary/sync.rs @@ -3,7 +3,8 @@ use std::{sync::Arc, collections::HashSet}; use rand_core::OsRng; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use tokio::{ sync::{mpsc, broadcast}, diff --git a/coordinator/src/tributary/db.rs b/coordinator/src/tributary/db.rs index fe39b7de..6b3e660d 100644 --- a/coordinator/src/tributary/db.rs +++ b/coordinator/src/tributary/db.rs @@ -3,7 +3,8 @@ use std::collections::HashMap; use scale::Encode; use borsh::{BorshSerialize, BorshDeserialize}; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use frost::Participant; use serai_client::validator_sets::primitives::{KeyPair, ExternalValidatorSet}; diff --git a/coordinator/src/tributary/handle.rs b/coordinator/src/tributary/handle.rs index fbce7dd9..bb29101a 100644 --- a/coordinator/src/tributary/handle.rs +++ b/coordinator/src/tributary/handle.rs @@ -4,7 +4,8 @@ use std::collections::HashMap; use zeroize::Zeroizing; use rand_core::OsRng; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use frost::dkg::Participant; use scale::{Encode, Decode}; diff --git a/coordinator/src/tributary/mod.rs b/coordinator/src/tributary/mod.rs index 27bb6396..4ac8bb97 100644 --- a/coordinator/src/tributary/mod.rs +++ b/coordinator/src/tributary/mod.rs @@ -1,4 +1,5 @@ -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use serai_client::validator_sets::primitives::ExternalValidatorSet; diff --git a/coordinator/src/tributary/scanner.rs b/coordinator/src/tributary/scanner.rs index 8e1f4842..f090dccf 100644 --- a/coordinator/src/tributary/scanner.rs +++ b/coordinator/src/tributary/scanner.rs @@ -3,7 +3,8 @@ use std::{sync::Arc, collections::HashSet}; use zeroize::Zeroizing; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use tokio::sync::broadcast; diff --git a/coordinator/src/tributary/signing_protocol.rs b/coordinator/src/tributary/signing_protocol.rs index dbb61585..dc2f58a2 100644 --- a/coordinator/src/tributary/signing_protocol.rs +++ b/coordinator/src/tributary/signing_protocol.rs @@ -63,9 +63,10 @@ use rand_core::OsRng; use blake2::{Digest, Blake2s256}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::PrimeField, GroupEncoding}, - Ciphersuite, Ristretto, + Ciphersuite, }; use dkg_musig::musig; use frost::{FrostError, dkg::Participant, ThresholdKeys, sign::*}; diff --git a/coordinator/src/tributary/spec.rs b/coordinator/src/tributary/spec.rs index 345584b6..fb2732bf 100644 --- a/coordinator/src/tributary/spec.rs +++ b/coordinator/src/tributary/spec.rs @@ -3,7 +3,8 @@ use std::{io, collections::HashMap}; use transcript::{Transcript, RecommendedTranscript}; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use frost::Participant; use scale::Encode; diff --git a/coordinator/src/tributary/transaction.rs b/coordinator/src/tributary/transaction.rs index 8d8bdd4c..4d878571 100644 --- a/coordinator/src/tributary/transaction.rs +++ b/coordinator/src/tributary/transaction.rs @@ -7,9 +7,10 @@ use rand_core::{RngCore, CryptoRng}; use blake2::{Digest, Blake2s256}; use transcript::{Transcript, RecommendedTranscript}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, GroupEncoding}, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr::SchnorrSignature; use frost::Participant; diff --git a/coordinator/tributary/Cargo.toml b/coordinator/tributary/Cargo.toml index b6a5a251..d4001df3 100644 --- a/coordinator/tributary/Cargo.toml +++ b/coordinator/tributary/Cargo.toml @@ -27,7 +27,8 @@ rand_chacha = { version = "0.3", default-features = false, features = ["std"] } blake2 = { version = "0.10", default-features = false, features = ["std"] } transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["std", "recommended"] } -ciphersuite = { package = "ciphersuite", path = "../../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] } +dalek-ff-group = { path = "../../crypto/dalek-ff-group" } +ciphersuite = { package = "ciphersuite", path = "../../crypto/ciphersuite", default-features = false, features = ["std"] } schnorr = { package = "schnorr-signatures", path = "../../crypto/schnorr", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] } diff --git a/coordinator/tributary/src/blockchain.rs b/coordinator/tributary/src/blockchain.rs index 1664860b..7cb6f69f 100644 --- a/coordinator/tributary/src/blockchain.rs +++ b/coordinator/tributary/src/blockchain.rs @@ -1,6 +1,7 @@ use std::collections::{VecDeque, HashSet}; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use serai_db::{Get, DbTxn, Db}; diff --git a/coordinator/tributary/src/lib.rs b/coordinator/tributary/src/lib.rs index 0ea74bfe..8a1ff54a 100644 --- a/coordinator/tributary/src/lib.rs +++ b/coordinator/tributary/src/lib.rs @@ -5,7 +5,8 @@ use async_trait::async_trait; use zeroize::Zeroizing; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use scale::Decode; use futures_channel::mpsc::UnboundedReceiver; diff --git a/coordinator/tributary/src/mempool.rs b/coordinator/tributary/src/mempool.rs index 7558bae0..f87958be 100644 --- a/coordinator/tributary/src/mempool.rs +++ b/coordinator/tributary/src/mempool.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use serai_db::{DbTxn, Db}; diff --git a/coordinator/tributary/src/tendermint/mod.rs b/coordinator/tributary/src/tendermint/mod.rs index 0ce6232c..07112f9f 100644 --- a/coordinator/tributary/src/tendermint/mod.rs +++ b/coordinator/tributary/src/tendermint/mod.rs @@ -11,12 +11,13 @@ use rand_chacha::ChaCha12Rng; use transcript::{Transcript, RecommendedTranscript}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ GroupEncoding, ff::{Field, PrimeField}, }, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr::{ SchnorrSignature, diff --git a/coordinator/tributary/src/tendermint/tx.rs b/coordinator/tributary/src/tendermint/tx.rs index 8af40708..9ce838fb 100644 --- a/coordinator/tributary/src/tendermint/tx.rs +++ b/coordinator/tributary/src/tendermint/tx.rs @@ -4,7 +4,8 @@ use scale::{Encode, Decode, IoReader}; use blake2::{Digest, Blake2s256}; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use crate::{ transaction::{Transaction, TransactionKind, TransactionError}, diff --git a/coordinator/tributary/src/tests/block.rs b/coordinator/tributary/src/tests/block.rs index c5bf19c6..41f1ce65 100644 --- a/coordinator/tributary/src/tests/block.rs +++ b/coordinator/tributary/src/tests/block.rs @@ -1,9 +1,11 @@ use std::{sync::Arc, io, collections::HashMap, fmt::Debug}; use blake2::{Digest, Blake2s256}; + +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, Group}, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr::SchnorrSignature; diff --git a/coordinator/tributary/src/tests/blockchain.rs b/coordinator/tributary/src/tests/blockchain.rs index 6103a62f..f77ac681 100644 --- a/coordinator/tributary/src/tests/blockchain.rs +++ b/coordinator/tributary/src/tests/blockchain.rs @@ -10,7 +10,8 @@ use rand::rngs::OsRng; use blake2::{Digest, Blake2s256}; -use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::Field, Ciphersuite}; use serai_db::{DbTxn, Db, MemDb}; diff --git a/coordinator/tributary/src/tests/mempool.rs b/coordinator/tributary/src/tests/mempool.rs index 66148cf3..77a68dac 100644 --- a/coordinator/tributary/src/tests/mempool.rs +++ b/coordinator/tributary/src/tests/mempool.rs @@ -3,7 +3,8 @@ use std::{sync::Arc, collections::HashMap}; use zeroize::Zeroizing; use rand::{RngCore, rngs::OsRng}; -use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::Field, Ciphersuite}; use tendermint::ext::Commit; diff --git a/coordinator/tributary/src/tests/transaction/mod.rs b/coordinator/tributary/src/tests/transaction/mod.rs index 1f85947a..9784ad65 100644 --- a/coordinator/tributary/src/tests/transaction/mod.rs +++ b/coordinator/tributary/src/tests/transaction/mod.rs @@ -6,9 +6,10 @@ use rand::{RngCore, CryptoRng, rngs::OsRng}; use blake2::{Digest, Blake2s256}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, Group}, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr::SchnorrSignature; diff --git a/coordinator/tributary/src/tests/transaction/signed.rs b/coordinator/tributary/src/tests/transaction/signed.rs index fee290db..aa1e250e 100644 --- a/coordinator/tributary/src/tests/transaction/signed.rs +++ b/coordinator/tributary/src/tests/transaction/signed.rs @@ -2,7 +2,8 @@ use rand::rngs::OsRng; use blake2::{Digest, Blake2s256}; -use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::Field, Ciphersuite}; use crate::{ ReadWrite, diff --git a/coordinator/tributary/src/tests/transaction/tendermint.rs b/coordinator/tributary/src/tests/transaction/tendermint.rs index 62d55b9b..ca7decc0 100644 --- a/coordinator/tributary/src/tests/transaction/tendermint.rs +++ b/coordinator/tributary/src/tests/transaction/tendermint.rs @@ -3,7 +3,8 @@ use std::sync::Arc; use zeroize::Zeroizing; use rand::{RngCore, rngs::OsRng}; -use ciphersuite::{Ristretto, Ciphersuite, group::ff::Field}; +use dalek_ff_group::Ristretto; +use ciphersuite::{Ciphersuite, group::ff::Field}; use scale::Encode; diff --git a/coordinator/tributary/src/transaction.rs b/coordinator/tributary/src/transaction.rs index 8e9342d7..a4fc616f 100644 --- a/coordinator/tributary/src/transaction.rs +++ b/coordinator/tributary/src/transaction.rs @@ -6,9 +6,10 @@ use thiserror::Error; use blake2::{Digest, Blake2b512}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{Group, GroupEncoding}, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr::SchnorrSignature; diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 3562289b..f86155aa 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -26,20 +26,10 @@ subtle = { version = "^2.4", default-features = false } digest = { version = "0.10", default-features = false, features = ["core-api"] } transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false } -sha2 = { version = "0.10", default-features = false, optional = true } -sha3 = { version = "0.10", default-features = false, optional = true } ff = { version = "0.13", default-features = false, features = ["bits"] } group = { version = "0.13", default-features = false } -dalek-ff-group = { path = "../dalek-ff-group", version = "0.4", default-features = false, optional = true } - -elliptic-curve = { version = "0.13", default-features = false, features = ["hash2curve"], optional = true } -p256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"], optional = true } -k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"], optional = true } - -minimal-ed448 = { path = "../ed448", version = "0.4", default-features = false, optional = true } - [dev-dependencies] hex = { version = "0.4", default-features = false, features = ["std"] } @@ -59,27 +49,8 @@ std = [ "digest/std", "transcript/std", - "sha2?/std", - "sha3?/std", "ff/std", - - "dalek-ff-group?/std", - - "elliptic-curve?/std", - "p256?/std", - "k256?/std", - "minimal-ed448?/std", ] -dalek = ["sha2", "dalek-ff-group"] -ed25519 = ["dalek"] -ristretto = ["dalek"] - -kp256 = ["sha2", "elliptic-curve"] -p256 = ["kp256", "dep:p256"] -secp256k1 = ["kp256", "k256"] - -ed448 = ["sha3", "minimal-ed448"] - default = ["std"] diff --git a/crypto/ciphersuite/README.md b/crypto/ciphersuite/README.md index bec62c18..45eefb52 100644 --- a/crypto/ciphersuite/README.md +++ b/crypto/ciphersuite/README.md @@ -21,6 +21,8 @@ Their `hash_to_F` is the [IETF's hash to curve](https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-16.html), yet applied to their scalar field. +Please see the [`ciphersuite-kp256`](https://docs.rs/ciphersuite-kp256) crate for more info. + ### Ed25519/Ristretto Ed25519/Ristretto are offered via @@ -33,6 +35,8 @@ the draft [RFC-RISTRETTO](https://www.ietf.org/archive/id/draft-irtf-cfrg-ristretto255-decaf448-05.html). The domain-separation tag is naively prefixed to the message. +Please see the [`dalek-ff-group`](https://docs.rs/dalek-ff-group) crate for more info. + ### Ed448 Ed448 is offered via [minimal-ed448](https://crates.io/crates/minimal-ed448), an @@ -42,3 +46,5 @@ to its prime-order subgroup. Its `hash_to_F` is the wide reduction of SHAKE256, with a 114-byte output, as used in [RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). The domain-separation tag is naively prefixed to the message. + +Please see the [`minimal-ed448`](https://docs.rs/minimal-ed448) crate for more info. diff --git a/crypto/ciphersuite/kp256/Cargo.toml b/crypto/ciphersuite/kp256/Cargo.toml new file mode 100644 index 00000000..22c3df85 --- /dev/null +++ b/crypto/ciphersuite/kp256/Cargo.toml @@ -0,0 +1,55 @@ +[package] +name = "ciphersuite-kp256" +version = "0.4.0" +description = "Ciphersuites built around ff/group" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite/kp256" +authors = ["Luke Parker "] +keywords = ["ciphersuite", "ff", "group"] +edition = "2021" +rust-version = "1.66" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +rand_core = { version = "0.6", default-features = false } + +zeroize = { version = "^1.5", default-features = false, features = ["derive"] } + +sha2 = { version = "0.10", default-features = false } + +elliptic-curve = { version = "0.13", default-features = false, features = ["hash2curve"] } +p256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"] } +k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"] } + +ciphersuite = { path = "../", version = "0.4", default-features = false } + +[dev-dependencies] +hex = { version = "0.4", default-features = false, features = ["std"] } + +rand_core = { version = "0.6", default-features = false, features = ["std"] } + +ff-group-tests = { version = "0.13", path = "../../ff-group-tests" } + +[features] +alloc = ["ciphersuite/alloc"] +std = [ + "rand_core/std", + + "zeroize/std", + + "sha2/std", + + "elliptic-curve/std", + "p256/std", + "k256/std", + + "ciphersuite/std", +] + +default = ["std"] diff --git a/crypto/ciphersuite/kp256/LICENSE b/crypto/ciphersuite/kp256/LICENSE new file mode 100644 index 00000000..be67c32f --- /dev/null +++ b/crypto/ciphersuite/kp256/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021-2023 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crypto/ciphersuite/kp256/README.md b/crypto/ciphersuite/kp256/README.md new file mode 100644 index 00000000..47ca2b44 --- /dev/null +++ b/crypto/ciphersuite/kp256/README.md @@ -0,0 +1,3 @@ +# Ciphersuite {k, p}256 + +SECP256k1 and P-256 Ciphersuites around k256 and p256. diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/kp256/src/lib.rs similarity index 96% rename from crypto/ciphersuite/src/kp256.rs rename to crypto/ciphersuite/kp256/src/lib.rs index 37fdb2e4..bfe480a7 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/kp256/src/lib.rs @@ -1,16 +1,17 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + use zeroize::Zeroize; use sha2::Sha256; -use group::ff::PrimeField; - use elliptic_curve::{ generic_array::GenericArray, bigint::{NonZero, CheckedAdd, Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}, }; -use crate::Ciphersuite; +use ciphersuite::{group::ff::PrimeField, Ciphersuite}; macro_rules! kp_curve { ( @@ -107,12 +108,9 @@ fn test_oversize_dst() { /// Ciphersuite for Secp256k1. /// /// hash_to_F is implemented via the IETF draft for hash to curve's hash_to_field (v16). -#[cfg(feature = "secp256k1")] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct Secp256k1; -#[cfg(feature = "secp256k1")] kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1"); -#[cfg(feature = "secp256k1")] #[test] fn test_secp256k1() { ff_group_tests::group::test_prime_group_bits::<_, k256::ProjectivePoint>(&mut rand_core::OsRng); @@ -145,12 +143,9 @@ fn test_secp256k1() { /// Ciphersuite for P-256. /// /// hash_to_F is implemented via the IETF draft for hash to curve's hash_to_field (v16). -#[cfg(feature = "p256")] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct P256; -#[cfg(feature = "p256")] kp_curve!("p256", p256, P256, b"P-256"); -#[cfg(feature = "p256")] #[test] fn test_p256() { ff_group_tests::group::test_prime_group_bits::<_, p256::ProjectivePoint>(&mut rand_core::OsRng); diff --git a/crypto/ciphersuite/src/lib.md b/crypto/ciphersuite/src/lib.md index 9c6a5b4a..f00acb68 100644 --- a/crypto/ciphersuite/src/lib.md +++ b/crypto/ciphersuite/src/lib.md @@ -2,7 +2,7 @@ Ciphersuites for elliptic curves premised on ff/group. -This library, except for the not recommended Ed448 ciphersuite, was +This library was [audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06](https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06). diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index 0e19b4b4..93f39c66 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -26,25 +26,6 @@ use group::{ #[cfg(any(feature = "alloc", feature = "std"))] use group::GroupEncoding; -#[cfg(feature = "dalek")] -mod dalek; -#[cfg(feature = "ristretto")] -pub use dalek::Ristretto; -#[cfg(feature = "ed25519")] -pub use dalek::Ed25519; - -#[cfg(feature = "kp256")] -mod kp256; -#[cfg(feature = "secp256k1")] -pub use kp256::Secp256k1; -#[cfg(feature = "p256")] -pub use kp256::P256; - -#[cfg(feature = "ed448")] -mod ed448; -#[cfg(feature = "ed448")] -pub use ed448::*; - /// Unified trait defining a ciphersuite around an elliptic curve. pub trait Ciphersuite: 'static + Send + Sync + Clone + Copy + PartialEq + Eq + Debug + Zeroize diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index 0869d11e..e8e918b1 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -25,18 +25,22 @@ subtle = { version = "^2.4", default-features = false } rand_core = { version = "0.6", default-features = false } digest = { version = "0.10", default-features = false } +sha2 = { version = "0.10", default-features = false } ff = { version = "0.13", default-features = false, features = ["bits"] } group = { version = "0.13", default-features = false } +ciphersuite = { path = "../ciphersuite", default-features = false } crypto-bigint = { version = "0.5", default-features = false, features = ["zeroize"] } curve25519-dalek = { version = ">= 4.0, < 4.2", default-features = false, features = ["alloc", "zeroize", "digest", "group", "precomputed-tables"] } [dev-dependencies] +hex = "0.4" rand_core = { version = "0.6", default-features = false, features = ["std"] } ff-group-tests = { path = "../ff-group-tests" } [features] -std = ["zeroize/std", "subtle/std", "rand_core/std", "digest/std"] +alloc = ["zeroize/alloc", "ciphersuite/alloc"] +std = ["alloc", "zeroize/std", "subtle/std", "rand_core/std", "digest/std", "sha2/std", "ciphersuite/std"] default = ["std"] diff --git a/crypto/ciphersuite/src/dalek.rs b/crypto/dalek-ff-group/src/ciphersuite.rs similarity index 90% rename from crypto/ciphersuite/src/dalek.rs rename to crypto/dalek-ff-group/src/ciphersuite.rs index bd9c70c1..4d585423 100644 --- a/crypto/ciphersuite/src/dalek.rs +++ b/crypto/dalek-ff-group/src/ciphersuite.rs @@ -3,9 +3,9 @@ use zeroize::Zeroize; use sha2::{Digest, Sha512}; use group::Group; -use dalek_ff_group::Scalar; +use crate::Scalar; -use crate::Ciphersuite; +use ciphersuite::Ciphersuite; macro_rules! dalek_curve { ( @@ -15,7 +15,7 @@ macro_rules! dalek_curve { $Point: ident, $ID: literal ) => { - use dalek_ff_group::$Point; + use crate::$Point; impl Ciphersuite for $Ciphersuite { type F = Scalar; @@ -40,12 +40,9 @@ macro_rules! dalek_curve { /// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition /// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as /// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other. -#[cfg(any(test, feature = "ristretto"))] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct Ristretto; -#[cfg(any(test, feature = "ristretto"))] dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto"); -#[cfg(any(test, feature = "ristretto"))] #[test] fn test_ristretto() { ff_group_tests::group::test_prime_group_bits::<_, RistrettoPoint>(&mut rand_core::OsRng); @@ -71,12 +68,9 @@ fn test_ristretto() { /// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition /// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as /// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other. -#[cfg(feature = "ed25519")] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct Ed25519; -#[cfg(feature = "ed25519")] dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519"); -#[cfg(feature = "ed25519")] #[test] fn test_ed25519() { ff_group_tests::group::test_prime_group_bits::<_, EdwardsPoint>(&mut rand_core::OsRng); diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index d56f117b..87fa0f57 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -38,6 +38,9 @@ use group::{ mod field; pub use field::FieldElement; +mod ciphersuite; +pub use crate::ciphersuite::{Ed25519, Ristretto}; + // Use black_box when possible #[rustversion::since(1.66)] mod black_box { diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 4802034d..80b89ca9 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -27,9 +27,6 @@ borsh = { version = "1", default-features = false, features = ["derive", "de_str ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] } -[dev-dependencies] -ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] } - [features] std = [ "thiserror/std", diff --git a/crypto/dkg/musig/Cargo.toml b/crypto/dkg/musig/Cargo.toml index 1dfde36b..bf56794c 100644 --- a/crypto/dkg/musig/Cargo.toml +++ b/crypto/dkg/musig/Cargo.toml @@ -31,7 +31,7 @@ dkg = { path = "../", version = "0.6", default-features = false } [dev-dependencies] rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dalek-ff-group = { path = "../../dalek-ff-group" } dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] } [features] diff --git a/crypto/dkg/musig/src/tests.rs b/crypto/dkg/musig/src/tests.rs index a89404a1..784960f5 100644 --- a/crypto/dkg/musig/src/tests.rs +++ b/crypto/dkg/musig/src/tests.rs @@ -3,7 +3,8 @@ use std::collections::HashMap; use zeroize::Zeroizing; use rand_core::OsRng; -use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::Field, Ciphersuite}; use dkg_recovery::recover_key; use crate::*; diff --git a/crypto/dkg/pedpop/Cargo.toml b/crypto/dkg/pedpop/Cargo.toml index 375c9629..6bd0f549 100644 --- a/crypto/dkg/pedpop/Cargo.toml +++ b/crypto/dkg/pedpop/Cargo.toml @@ -34,4 +34,4 @@ dkg = { path = "../", version = "0.6", default-features = false, features = ["st [dev-dependencies] rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dalek-ff-group = { path = "../../dalek-ff-group", default-features = false } diff --git a/crypto/dkg/pedpop/src/tests.rs b/crypto/dkg/pedpop/src/tests.rs index 483b8b3b..dc463880 100644 --- a/crypto/dkg/pedpop/src/tests.rs +++ b/crypto/dkg/pedpop/src/tests.rs @@ -2,7 +2,8 @@ use std::collections::HashMap; use rand_core::{RngCore, CryptoRng, OsRng}; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use crate::*; diff --git a/crypto/dkg/promote/Cargo.toml b/crypto/dkg/promote/Cargo.toml index 5f2ff181..6875074f 100644 --- a/crypto/dkg/promote/Cargo.toml +++ b/crypto/dkg/promote/Cargo.toml @@ -30,5 +30,5 @@ dkg = { path = "../", version = "0.6.1", default-features = false, features = [" [dev-dependencies] zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["ristretto"] } +dalek-ff-group = { path = "../../dalek-ff-group" } dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] } diff --git a/crypto/dkg/promote/src/tests.rs b/crypto/dkg/promote/src/tests.rs index a748f61d..bb5b01a8 100644 --- a/crypto/dkg/promote/src/tests.rs +++ b/crypto/dkg/promote/src/tests.rs @@ -4,9 +4,10 @@ use std::collections::HashMap; use zeroize::{Zeroize, Zeroizing}; use rand_core::OsRng; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, Group}, - Ciphersuite, Ristretto, + Ciphersuite, }; use dkg::*; diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index 7deba509..a68ef9ff 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -24,8 +24,11 @@ rand_core = { version = "0.6", default-features = false } zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } subtle = { version = "^2.4", default-features = false } +sha3 = { version = "0.10", default-features = false } + ff = { version = "0.13", default-features = false, features = ["bits"] } group = { version = "0.13", default-features = false } +ciphersuite = { path = "../ciphersuite", default-features = false } generic-array = { version = "1", default-features = false } crypto-bigint = { version = "0.5", default-features = false, features = ["zeroize"] } @@ -38,5 +41,6 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] } ff-group-tests = { path = "../ff-group-tests" } [features] -std = ["rand_core/std", "zeroize/std", "subtle/std", "ff/std"] +alloc = ["zeroize/alloc", "ciphersuite/alloc"] +std = ["alloc", "rand_core/std", "zeroize/std", "subtle/std", "sha3/std", "ff/std", "ciphersuite/std"] default = ["std"] diff --git a/crypto/ciphersuite/src/ed448.rs b/crypto/ed448/src/ciphersuite.rs similarity index 91% rename from crypto/ciphersuite/src/ed448.rs rename to crypto/ed448/src/ciphersuite.rs index 8a927251..c677bad4 100644 --- a/crypto/ciphersuite/src/ed448.rs +++ b/crypto/ed448/src/ciphersuite.rs @@ -1,15 +1,17 @@ use zeroize::Zeroize; -use digest::{ - typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput, - ExtendableOutput, XofReader, HashMarker, Digest, +use sha3::{ + digest::{ + typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput, + ExtendableOutput, XofReader, HashMarker, Digest, + }, + Shake256, }; -use sha3::Shake256; use group::Group; -use minimal_ed448::{Scalar, Point}; +use crate::{Scalar, Point}; -use crate::Ciphersuite; +use ciphersuite::Ciphersuite; /// Shake256, fixed to a 114-byte output, as used by Ed448. #[derive(Clone, Default)] diff --git a/crypto/ed448/src/lib.rs b/crypto/ed448/src/lib.rs index f5c70613..2fbfeb60 100644 --- a/crypto/ed448/src/lib.rs +++ b/crypto/ed448/src/lib.rs @@ -14,3 +14,6 @@ pub use field::FieldElement; mod point; pub use point::Point; + +mod ciphersuite; +pub use crate::ciphersuite::Ed448; diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index a0d52366..bb5ba759 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -34,6 +34,7 @@ dalek-ff-group = { path = "../dalek-ff-group", version = "0.4", default-features minimal-ed448 = { path = "../ed448", version = "0.4", default-features = false, features = ["std"], optional = true } ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] } +ciphersuite-kp256 = { path = "../ciphersuite/kp256", version = "0.4", default-features = false, features = ["std"], optional = true } multiexp = { path = "../multiexp", version = "0.4", default-features = false, features = ["std", "batch"] } @@ -52,12 +53,12 @@ dkg-recovery = { path = "../dkg/recovery", default-features = false, features = dkg-dealer = { path = "../dkg/dealer", default-features = false, features = ["std"] } [features] -ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"] -ristretto = ["dalek-ff-group", "ciphersuite/ristretto"] +ed25519 = ["dalek-ff-group"] +ristretto = ["dalek-ff-group"] -secp256k1 = ["ciphersuite/secp256k1"] -p256 = ["ciphersuite/p256"] +secp256k1 = ["ciphersuite-kp256"] +p256 = ["ciphersuite-kp256"] -ed448 = ["minimal-ed448", "ciphersuite/ed448"] +ed448 = ["minimal-ed448"] -tests = ["hex", "rand_core/getrandom", "dkg-dealer" ,"dkg-recovery"] +tests = ["hex", "rand_core/getrandom", "dkg-dealer", "dkg-recovery"] diff --git a/crypto/frost/src/curve/dalek.rs b/crypto/frost/src/curve/dalek.rs index 094e2004..aa97adbd 100644 --- a/crypto/frost/src/curve/dalek.rs +++ b/crypto/frost/src/curve/dalek.rs @@ -16,7 +16,7 @@ macro_rules! dalek_curve { $CONTEXT: literal, $chal: literal ) => { - pub use ciphersuite::$Curve; + pub use dalek_ff_group::$Curve; impl Curve for $Curve { const CONTEXT: &'static [u8] = $CONTEXT; diff --git a/crypto/frost/src/curve/ed448.rs b/crypto/frost/src/curve/ed448.rs index 0a5e4483..4aa5e7ae 100644 --- a/crypto/frost/src/curve/ed448.rs +++ b/crypto/frost/src/curve/ed448.rs @@ -1,7 +1,8 @@ use digest::Digest; use minimal_ed448::{Scalar, Point}; -pub use ciphersuite::{group::GroupEncoding, Shake256_114, Ed448}; +pub use minimal_ed448::Ed448; +pub use ciphersuite::{group::GroupEncoding, Ciphersuite}; use crate::{curve::Curve, algorithm::Hram}; @@ -18,7 +19,7 @@ impl Ietf8032Ed448Hram { #[allow(non_snake_case)] pub(crate) fn hram(context: &[u8], R: &Point, A: &Point, m: &[u8]) -> Scalar { Scalar::wide_reduce( - Shake256_114::digest( + ::H::digest( [ &[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(), context, diff --git a/crypto/frost/src/curve/kp256.rs b/crypto/frost/src/curve/kp256.rs index 81ccc2e0..a3b3ecc9 100644 --- a/crypto/frost/src/curve/kp256.rs +++ b/crypto/frost/src/curve/kp256.rs @@ -11,7 +11,7 @@ macro_rules! kp_curve { $CONTEXT: literal ) => { - pub use ciphersuite::$Curve; + pub use ciphersuite_kp256::$Curve; impl Curve for $Curve { const CONTEXT: &'static [u8] = $CONTEXT; diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 96abb069..db5c171d 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -36,7 +36,7 @@ rand_core = { version = "0.6", features = ["std"] } sha2 = "0.10" dalek-ff-group = { path = "../dalek-ff-group" } -ciphersuite = { path = "../ciphersuite", features = ["ed25519"] } +ciphersuite = { path = "../ciphersuite" } [features] aggregate = ["transcript"] diff --git a/crypto/schnorr/src/tests/mod.rs b/crypto/schnorr/src/tests/mod.rs index 97d569db..79380f81 100644 --- a/crypto/schnorr/src/tests/mod.rs +++ b/crypto/schnorr/src/tests/mod.rs @@ -3,9 +3,10 @@ use core::ops::Deref; use zeroize::Zeroizing; use rand_core::OsRng; +use dalek_ff_group::Ed25519; use ciphersuite::{ group::{ff::Field, Group}, - Ciphersuite, Ed25519, + Ciphersuite, }; use multiexp::BatchVerifier; diff --git a/crypto/schnorr/src/tests/rfc8032.rs b/crypto/schnorr/src/tests/rfc8032.rs index 418f4c0e..63b4e7a1 100644 --- a/crypto/schnorr/src/tests/rfc8032.rs +++ b/crypto/schnorr/src/tests/rfc8032.rs @@ -5,8 +5,8 @@ use sha2::{Digest, Sha512}; -use dalek_ff_group::Scalar; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ed25519}; +use dalek_ff_group::{Scalar, Ed25519}; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use crate::SchnorrSignature; diff --git a/crypto/schnorrkel/Cargo.toml b/crypto/schnorrkel/Cargo.toml index 23d87f79..81271edc 100644 --- a/crypto/schnorrkel/Cargo.toml +++ b/crypto/schnorrkel/Cargo.toml @@ -24,7 +24,8 @@ transcript = { package = "flexible-transcript", path = "../transcript", version group = "0.13" -ciphersuite = { path = "../ciphersuite", version = "^0.4.1", features = ["std", "ristretto"] } +dalek-ff-group = { path = "../dalek-ff-group" } +ciphersuite = { path = "../ciphersuite", version = "^0.4.1", features = ["std"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1" } frost = { path = "../frost", package = "modular-frost", version = "^0.10.0", features = ["ristretto"] } diff --git a/crypto/schnorrkel/src/lib.rs b/crypto/schnorrkel/src/lib.rs index bb46bc02..a5a0418b 100644 --- a/crypto/schnorrkel/src/lib.rs +++ b/crypto/schnorrkel/src/lib.rs @@ -9,8 +9,11 @@ use zeroize::Zeroizing; use transcript::{Transcript, MerlinTranscript}; -use group::{ff::PrimeField, GroupEncoding}; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{ + group::{ff::PrimeField, GroupEncoding}, + Ciphersuite, +}; use schnorr::SchnorrSignature; use ::frost::{ diff --git a/message-queue/Cargo.toml b/message-queue/Cargo.toml index 9eeaa5ce..fc65e59b 100644 --- a/message-queue/Cargo.toml +++ b/message-queue/Cargo.toml @@ -30,7 +30,8 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] } # Cryptography transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std", "recommended"] } -ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] } +dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"] } +ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } schnorr-signatures = { path = "../crypto/schnorr", default-features = false, features = ["std"] } # Application diff --git a/message-queue/src/client.rs b/message-queue/src/client.rs index 3aaf5a24..1b1696d8 100644 --- a/message-queue/src/client.rs +++ b/message-queue/src/client.rs @@ -3,9 +3,10 @@ use core::ops::Deref; use zeroize::{Zeroize, Zeroizing}; use rand_core::OsRng; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::ff::{Field, PrimeField}, - Ciphersuite, Ristretto, + Ciphersuite, }; use schnorr_signatures::SchnorrSignature; diff --git a/message-queue/src/main.rs b/message-queue/src/main.rs index b1c6e85b..b857ccc2 100644 --- a/message-queue/src/main.rs +++ b/message-queue/src/main.rs @@ -3,7 +3,8 @@ pub(crate) use std::{ collections::HashMap, }; -pub(crate) use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +pub(crate) use ciphersuite::{group::GroupEncoding, Ciphersuite}; pub(crate) use schnorr_signatures::SchnorrSignature; pub(crate) use serai_primitives::ExternalNetworkId; diff --git a/message-queue/src/messages.rs b/message-queue/src/messages.rs index 13c3dee0..e7c5a046 100644 --- a/message-queue/src/messages.rs +++ b/message-queue/src/messages.rs @@ -1,5 +1,6 @@ use transcript::{Transcript, RecommendedTranscript}; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use borsh::{BorshSerialize, BorshDeserialize}; diff --git a/orchestration/Cargo.toml b/orchestration/Cargo.toml index fca38066..5c62683e 100644 --- a/orchestration/Cargo.toml +++ b/orchestration/Cargo.toml @@ -23,7 +23,8 @@ rand_core = { version = "0.6", default-features = false, features = ["std", "get rand_chacha = { version = "0.3", default-features = false, features = ["std"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std", "recommended"] } -ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] } +dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"] } +ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } zalloc = { path = "../common/zalloc" } diff --git a/orchestration/src/coordinator.rs b/orchestration/src/coordinator.rs index 26058886..32489393 100644 --- a/orchestration/src/coordinator.rs +++ b/orchestration/src/coordinator.rs @@ -2,7 +2,8 @@ use std::path::Path; use zeroize::Zeroizing; -use ciphersuite::{group::ff::PrimeField, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::PrimeField, Ciphersuite}; use crate::{Network, Os, mimalloc, os, build_serai_service, write_dockerfile}; diff --git a/orchestration/src/main.rs b/orchestration/src/main.rs index 9f0bacad..09e046a3 100644 --- a/orchestration/src/main.rs +++ b/orchestration/src/main.rs @@ -18,12 +18,13 @@ use rand_chacha::ChaCha20Rng; use transcript::{Transcript, RecommendedTranscript}; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ ff::{Field, PrimeField}, GroupEncoding, }, - Ciphersuite, Ristretto, + Ciphersuite, }; mod mimalloc; diff --git a/orchestration/src/message_queue.rs b/orchestration/src/message_queue.rs index ea97a619..e5fdb7a0 100644 --- a/orchestration/src/message_queue.rs +++ b/orchestration/src/message_queue.rs @@ -1,6 +1,7 @@ use std::path::Path; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use crate::{Network, Os, mimalloc, os, build_serai_service, write_dockerfile}; diff --git a/orchestration/src/processor.rs b/orchestration/src/processor.rs index cefe6455..a16cd1fc 100644 --- a/orchestration/src/processor.rs +++ b/orchestration/src/processor.rs @@ -2,7 +2,8 @@ use std::path::Path; use zeroize::Zeroizing; -use ciphersuite::{group::ff::PrimeField, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::PrimeField, Ciphersuite}; use crate::{Network, Os, mimalloc, os, build_serai_service, write_dockerfile}; diff --git a/orchestration/src/serai.rs b/orchestration/src/serai.rs index e2f96f6a..e812242a 100644 --- a/orchestration/src/serai.rs +++ b/orchestration/src/serai.rs @@ -1,7 +1,8 @@ use std::path::Path; use zeroize::Zeroizing; -use ciphersuite::{group::ff::PrimeField, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::PrimeField, Ciphersuite}; use crate::{Network, Os, mimalloc, os, build_serai_service, write_dockerfile}; diff --git a/processor/Cargo.toml b/processor/Cargo.toml index d0a650d0..674e1578 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -35,7 +35,8 @@ serde_json = { version = "1", default-features = false, features = ["std"] } # Cryptography blake2 = { version = "0.10", default-features = false, features = ["std"] } -ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] } +dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"] } +ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std"] } dkg-pedpop = { path = "../crypto/dkg/pedpop", default-features = false } @@ -50,10 +51,10 @@ secp256k1 = { version = "0.29", default-features = false, features = ["std", "gl bitcoin-serai = { path = "../networks/bitcoin", default-features = false, features = ["std"], optional = true } # Ethereum +ciphersuite-kp256 = { path = "../crypto/ciphersuite/kp256", default-features = false, features = ["std"], optional = true } ethereum-serai = { path = "../networks/ethereum", default-features = false, optional = true } # Monero -dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"], optional = true } monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", default-features = false, optional = true } monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } @@ -86,9 +87,9 @@ serai-docker-tests = { path = "../tests/docker" } secp256k1 = ["k256", "frost/secp256k1"] bitcoin = ["dep:secp256k1", "secp256k1", "bitcoin-serai", "serai-client/bitcoin"] -ethereum = ["secp256k1", "ethereum-serai/tests"] +ethereum = ["secp256k1", "ciphersuite-kp256", "ethereum-serai/tests"] -ed25519 = ["dalek-ff-group", "frost/ed25519"] +ed25519 = ["frost/ed25519"] monero = ["ed25519", "monero-simple-request-rpc", "monero-wallet", "serai-client/monero"] binaries = ["env_logger", "serai-env", "message-queue"] diff --git a/processor/src/networks/ethereum.rs b/processor/src/networks/ethereum.rs index 7aba2071..baa5d699 100644 --- a/processor/src/networks/ethereum.rs +++ b/processor/src/networks/ethereum.rs @@ -7,7 +7,8 @@ use std::{ use async_trait::async_trait; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Secp256k1}; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; +use ciphersuite_kp256::Secp256k1; use frost::ThresholdKeys; use ethereum_serai::{ diff --git a/processor/src/tests/literal/mod.rs b/processor/src/tests/literal/mod.rs index 2e8160ec..0474f271 100644 --- a/processor/src/tests/literal/mod.rs +++ b/processor/src/tests/literal/mod.rs @@ -286,7 +286,8 @@ mod monero { mod ethereum { use super::*; - use ciphersuite::{Ciphersuite, Secp256k1}; + use ciphersuite::Ciphersuite; + use ciphersuite_kp256::Secp256k1; use serai_client::validator_sets::primitives::Session; diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 9fe5c1ce..1d273695 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -38,6 +38,7 @@ simple-request = { path = "../../common/request", version = "0.1", optional = tr bitcoin = { version = "0.32", optional = true } +dalek-ff-group = { path = "../../crypto/dalek-ff-group", optional = true } ciphersuite = { path = "../../crypto/ciphersuite", version = "0.4", optional = true } monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", version = "0.1.0", default-features = false, features = ["std"], optional = true } @@ -47,7 +48,8 @@ hex = "0.4" blake2 = "0.10" -ciphersuite = { path = "../../crypto/ciphersuite", features = ["ristretto"] } +dalek-ff-group = { path = "../../crypto/dalek-ff-group" } +ciphersuite = { path = "../../crypto/ciphersuite" } dkg-musig = { path = "../../crypto/dkg/musig" } frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] } schnorrkel = { path = "../../crypto/schnorrkel", package = "frost-schnorrkel" } @@ -63,7 +65,7 @@ borsh = ["serai-abi/borsh"] networks = [] bitcoin = ["networks", "dep:bitcoin"] -monero = ["networks", "ciphersuite/ed25519", "monero-wallet"] +monero = ["networks", "dalek-ff-group", "monero-wallet"] # Assumes the default usage is to use Serai as a DEX, which doesn't actually # require connecting to a Serai node diff --git a/substrate/client/src/networks/monero.rs b/substrate/client/src/networks/monero.rs index bd5e0a15..c838eebc 100644 --- a/substrate/client/src/networks/monero.rs +++ b/substrate/client/src/networks/monero.rs @@ -2,7 +2,8 @@ use core::{str::FromStr, fmt}; use scale::{Encode, Decode}; -use ciphersuite::{Ciphersuite, Ed25519}; +use dalek_ff_group::Ed25519; +use ciphersuite::Ciphersuite; use monero_wallet::address::{AddressError, Network, AddressType, MoneroAddress}; diff --git a/substrate/client/tests/common/genesis_liquidity.rs b/substrate/client/tests/common/genesis_liquidity.rs index a4b96b19..ad416a66 100644 --- a/substrate/client/tests/common/genesis_liquidity.rs +++ b/substrate/client/tests/common/genesis_liquidity.rs @@ -3,7 +3,8 @@ use std::collections::HashMap; use rand_core::{RngCore, OsRng}; use zeroize::Zeroizing; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use dkg_musig::musig; use schnorrkel::Schnorrkel; diff --git a/substrate/client/tests/common/validator_sets.rs b/substrate/client/tests/common/validator_sets.rs index 1ce1e105..609613d9 100644 --- a/substrate/client/tests/common/validator_sets.rs +++ b/substrate/client/tests/common/validator_sets.rs @@ -9,7 +9,8 @@ use sp_core::{ Pair as PairTrait, }; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use dkg_musig::musig; use schnorrkel::Schnorrkel; diff --git a/substrate/validator-sets/primitives/Cargo.toml b/substrate/validator-sets/primitives/Cargo.toml index 41883059..a3865a1e 100644 --- a/substrate/validator-sets/primitives/Cargo.toml +++ b/substrate/validator-sets/primitives/Cargo.toml @@ -18,7 +18,8 @@ workspace = true [dependencies] zeroize = { version = "^1.5", features = ["derive"], optional = true } -ciphersuite = { path = "../../../crypto/ciphersuite", version = "0.4", default-features = false, features = ["alloc", "ristretto"] } +dalek-ff-group = { path = "../../../crypto/dalek-ff-group", default-features = false, features = ["alloc"] } +ciphersuite = { path = "../../../crypto/ciphersuite", version = "0.4", default-features = false, features = ["alloc"] } dkg-musig = { path = "../../../crypto/dkg/musig", default-features = false } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true } diff --git a/substrate/validator-sets/primitives/src/lib.rs b/substrate/validator-sets/primitives/src/lib.rs index 9ff9f18b..0f3722cb 100644 --- a/substrate/validator-sets/primitives/src/lib.rs +++ b/substrate/validator-sets/primitives/src/lib.rs @@ -3,7 +3,8 @@ #[cfg(feature = "std")] use zeroize::Zeroize; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use scale::{Encode, Decode, MaxEncodedLen}; use scale_info::TypeInfo; diff --git a/tests/coordinator/Cargo.toml b/tests/coordinator/Cargo.toml index edc3c112..dced560b 100644 --- a/tests/coordinator/Cargo.toml +++ b/tests/coordinator/Cargo.toml @@ -24,7 +24,9 @@ zeroize = { version = "1", default-features = false } rand_core = { version = "0.6", default-features = false } blake2 = "0.10" -ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["ristretto", "secp256k1"] } +dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = false } +ciphersuite = { path = "../../crypto/ciphersuite", default-features = false } +ciphersuite-kp256 = { path = "../../crypto/ciphersuite/kp256", default-features = false } schnorrkel = "0.11" dkg = { path = "../../crypto/dkg", default-features = false } diff --git a/tests/coordinator/src/lib.rs b/tests/coordinator/src/lib.rs index a1efcf41..69da80da 100644 --- a/tests/coordinator/src/lib.rs +++ b/tests/coordinator/src/lib.rs @@ -14,9 +14,10 @@ use rand_core::{RngCore, OsRng}; use zeroize::Zeroizing; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::PrimeField, GroupEncoding}, - Ciphersuite, Ristretto, + Ciphersuite, }; use serai_client::primitives::ExternalNetworkId; diff --git a/tests/coordinator/src/tests/batch.rs b/tests/coordinator/src/tests/batch.rs index 4fb5e858..7b36c18a 100644 --- a/tests/coordinator/src/tests/batch.rs +++ b/tests/coordinator/src/tests/batch.rs @@ -10,7 +10,10 @@ use blake2::{ digest::{consts::U32, Digest}, Blake2b, }; -use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto, Secp256k1}; + +use dalek_ff_group::Ristretto; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; +use ciphersuite_kp256::Secp256k1; use dkg::Participant; use scale::Encode; diff --git a/tests/coordinator/src/tests/key_gen.rs b/tests/coordinator/src/tests/key_gen.rs index 66aa9f5b..1fc1a634 100644 --- a/tests/coordinator/src/tests/key_gen.rs +++ b/tests/coordinator/src/tests/key_gen.rs @@ -6,10 +6,12 @@ use std::{ use zeroize::Zeroizing; use rand_core::OsRng; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, GroupEncoding}, - Ciphersuite, Ristretto, Secp256k1, + Ciphersuite, }; +use ciphersuite_kp256::Secp256k1; use dkg::ThresholdParams; use serai_client::{ diff --git a/tests/coordinator/src/tests/rotation.rs b/tests/coordinator/src/tests/rotation.rs index c3659a9e..43d87640 100644 --- a/tests/coordinator/src/tests/rotation.rs +++ b/tests/coordinator/src/tests/rotation.rs @@ -1,6 +1,6 @@ use tokio::time::{sleep, Duration}; -use ciphersuite::Secp256k1; +use ciphersuite_kp256::Secp256k1; use serai_client::{ primitives::{insecure_pair_from_name, NetworkId}, diff --git a/tests/coordinator/src/tests/sign.rs b/tests/coordinator/src/tests/sign.rs index f6fdb6e6..50ffae41 100644 --- a/tests/coordinator/src/tests/sign.rs +++ b/tests/coordinator/src/tests/sign.rs @@ -5,7 +5,7 @@ use std::{ use rand_core::{RngCore, OsRng}; -use ciphersuite::Secp256k1; +use ciphersuite_kp256::Secp256k1; use dkg::Participant; diff --git a/tests/message-queue/Cargo.toml b/tests/message-queue/Cargo.toml index cd077e48..de4a63e7 100644 --- a/tests/message-queue/Cargo.toml +++ b/tests/message-queue/Cargo.toml @@ -22,7 +22,8 @@ hex = "0.4" zeroize = { version = "1", default-features = false } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } -ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["ristretto"] } +dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = false } +ciphersuite = { path = "../../crypto/ciphersuite", default-features = false } serai-primitives = { path = "../../substrate/primitives" } serai-message-queue = { path = "../../message-queue" } diff --git a/tests/message-queue/src/lib.rs b/tests/message-queue/src/lib.rs index d59273d9..d83a66de 100644 --- a/tests/message-queue/src/lib.rs +++ b/tests/message-queue/src/lib.rs @@ -2,9 +2,10 @@ use std::collections::HashMap; use rand_core::OsRng; +use dalek_ff_group::Ristretto; use ciphersuite::{ group::{ff::Field, GroupEncoding}, - Ciphersuite, Ristretto, + Ciphersuite, }; use serai_primitives::{ExternalNetworkId, EXTERNAL_NETWORKS}; diff --git a/tests/no-std/Cargo.toml b/tests/no-std/Cargo.toml index fa0649f5..c8e07f48 100644 --- a/tests/no-std/Cargo.toml +++ b/tests/no-std/Cargo.toml @@ -19,10 +19,11 @@ workspace = true [dependencies] flexible-transcript = { path = "../../crypto/transcript", default-features = false, features = ["recommended", "merlin"] } -dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = false } -minimal-ed448 = { path = "../../crypto/ed448", default-features = false } +dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = false, features = ["alloc"] } +minimal-ed448 = { path = "../../crypto/ed448", default-features = false, features = ["alloc"] } -ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["alloc", "secp256k1", "p256", "ed25519", "ristretto", "ed448"] } +ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["alloc"] } +ciphersuite-kp256 = { path = "../../crypto/ciphersuite/kp256", default-features = false, features = ["alloc"] } multiexp = { path = "../../crypto/multiexp", default-features = false, features = ["batch"] } diff --git a/tests/no-std/src/lib.rs b/tests/no-std/src/lib.rs index fe0cff64..0ea98fe7 100644 --- a/tests/no-std/src/lib.rs +++ b/tests/no-std/src/lib.rs @@ -6,6 +6,7 @@ pub use dalek_ff_group; pub use minimal_ed448; pub use ciphersuite; +pub use ciphersuite_kp256; pub use multiexp; diff --git a/tests/processor/Cargo.toml b/tests/processor/Cargo.toml index 918899a2..82073076 100644 --- a/tests/processor/Cargo.toml +++ b/tests/processor/Cargo.toml @@ -23,7 +23,9 @@ zeroize = { version = "1", default-features = false } rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } curve25519-dalek = "4" -ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["secp256k1", "ristretto"] } +dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = false } +ciphersuite = { path = "../../crypto/ciphersuite", default-features = false } +ciphersuite-kp256 = { path = "../../crypto/ciphersuite/kp256", default-features = false } dkg = { path = "../../crypto/dkg", default-features = false } bitcoin-serai = { path = "../../networks/bitcoin" } diff --git a/tests/processor/src/lib.rs b/tests/processor/src/lib.rs index 108abeda..19821f83 100644 --- a/tests/processor/src/lib.rs +++ b/tests/processor/src/lib.rs @@ -5,7 +5,8 @@ use std::sync::{OnceLock, Mutex}; use zeroize::Zeroizing; use rand_core::{RngCore, OsRng}; -use ciphersuite::{group::ff::PrimeField, Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::{group::ff::PrimeField, Ciphersuite}; use serai_client::primitives::ExternalNetworkId; use messages::{ProcessorMessage, CoordinatorMessage}; diff --git a/tests/processor/src/networks.rs b/tests/processor/src/networks.rs index e6ef485c..686ac7b1 100644 --- a/tests/processor/src/networks.rs +++ b/tests/processor/src/networks.rs @@ -90,7 +90,7 @@ pub enum Wallet { }, Ethereum { rpc_url: String, - key: ::F, + key: ::F, nonce: u64, }, Monero { @@ -149,7 +149,8 @@ impl Wallet { } ExternalNetworkId::Ethereum => { - use ciphersuite::{group::ff::Field, Secp256k1}; + use ciphersuite::group::ff::Field; + use ciphersuite_kp256::Secp256k1; use ethereum_serai::alloy::{ primitives::{U256, Address}, simple_request_transport::SimpleRequest, @@ -321,7 +322,7 @@ impl Wallet { )); let to_as_key = PublicKey::new( - ::read_G(&mut to.as_slice()).unwrap(), + ::read_G(&mut to.as_slice()).unwrap(), ) .unwrap(); let router_addr = { @@ -502,7 +503,7 @@ impl Wallet { .unwrap() } Wallet::Ethereum { key, .. } => ExternalAddress::new( - ethereum_serai::crypto::address(&(ciphersuite::Secp256k1::generator() * key)).into(), + ethereum_serai::crypto::address(&(ciphersuite_kp256::Secp256k1::generator() * key)).into(), ) .unwrap(), Wallet::Monero { view_pair, .. } => { diff --git a/tests/processor/src/tests/mod.rs b/tests/processor/src/tests/mod.rs index e2b6fcdc..6a0f76c6 100644 --- a/tests/processor/src/tests/mod.rs +++ b/tests/processor/src/tests/mod.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; -use ciphersuite::{Ciphersuite, Ristretto}; +use dalek_ff_group::Ristretto; +use ciphersuite::Ciphersuite; use dockertest::DockerTest; From 4122a0135f7c1725e8584499c288f0ab8540bdae Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 05:20:47 -0400 Subject: [PATCH 112/116] Fix dirty Cargo.lock --- Cargo.lock | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.lock b/Cargo.lock index 0ab8cab5..475b5ab0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8510,6 +8510,7 @@ dependencies = [ "blake2", "borsh", "ciphersuite", + "ciphersuite-kp256", "const-hex", "dalek-ff-group", "dkg-pedpop", From 9841061b49914fe769f717e7332255c9a6d0bf65 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 06:38:25 -0400 Subject: [PATCH 113/116] Add missing feature in substrate/client --- substrate/client/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 1d273695..7a3e2cdb 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -65,7 +65,7 @@ borsh = ["serai-abi/borsh"] networks = [] bitcoin = ["networks", "dep:bitcoin"] -monero = ["networks", "dalek-ff-group", "monero-wallet"] +monero = ["networks", "dalek-ff-group", "ciphersuite", "monero-wallet"] # Assumes the default usage is to use Serai as a DEX, which doesn't actually # require connecting to a Serai node From 758d422595f3d9c0ad50b23c03ad2187f1857166 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 20 Aug 2025 08:14:00 -0400 Subject: [PATCH 114/116] Have ::zeroize yield a well-defined value --- Cargo.lock | 2 +- crypto/ed448/Cargo.toml | 2 +- crypto/ed448/src/point.rs | 14 +++++++++++++- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 475b5ab0..15afb24d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4827,7 +4827,7 @@ dependencies = [ [[package]] name = "minimal-ed448" -version = "0.4.1" +version = "0.4.2" dependencies = [ "ciphersuite", "crypto-bigint", diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index a68ef9ff..bb9748a1 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "minimal-ed448" -version = "0.4.1" +version = "0.4.2" description = "Unaudited, inefficient implementation of Ed448 in Rust" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ed448" diff --git a/crypto/ed448/src/point.rs b/crypto/ed448/src/point.rs index cd49023f..d9b47b5e 100644 --- a/crypto/ed448/src/point.rs +++ b/crypto/ed448/src/point.rs @@ -50,13 +50,25 @@ fn recover_x(y: FieldElement) -> CtOption { } /// Ed448 point. -#[derive(Clone, Copy, Debug, Zeroize)] +#[derive(Clone, Copy, Debug)] pub struct Point { x: FieldElement, y: FieldElement, z: FieldElement, } +impl Zeroize for Point { + fn zeroize(&mut self) { + self.x.zeroize(); + self.y.zeroize(); + self.z.zeroize(); + let identity = Self::identity(); + self.x = identity.x; + self.y = identity.y; + self.z = identity.z; + } +} + const G: Point = Point { x: G_X, y: G_Y, z: FieldElement::ONE }; impl ConstantTimeEq for Point { From da3095ed158b6812867671d099b26cdf6db26af3 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 22 Aug 2025 18:42:43 -0400 Subject: [PATCH 115/116] Remove `FieldElement::from_square` The new `FieldElement::from_u256` is sufficient to load an unreduced value. The caller can perform the square themselves, without us explicitly supporting this special case. Updates the monero-oxide version used to one which no longer uses `FieldElement::from_square` (as their use is why it was added). --- Cargo.lock | 54 ++++++++++++++++++++---------- crypto/dalek-ff-group/src/field.rs | 6 ---- processor/Cargo.toml | 4 +-- substrate/client/Cargo.toml | 2 +- tests/full-stack/Cargo.toml | 4 +-- tests/processor/Cargo.toml | 4 +-- 6 files changed, 43 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 15afb24d..821d48fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1889,6 +1889,7 @@ checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array 0.14.7", "rand_core", + "serdect", "subtle", "zeroize", ] @@ -3493,7 +3494,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -4040,7 +4041,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -4924,20 +4925,27 @@ dependencies = [ [[package]] name = "monero-address" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", + "monero-base58", "monero-io", - "monero-primitives", - "std-shims", "thiserror 2.0.14", "zeroize", ] +[[package]] +name = "monero-base58" +version = "0.1.0" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" +dependencies = [ + "monero-primitives", +] + [[package]] name = "monero-borromean" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "monero-generators", @@ -4950,7 +4958,7 @@ dependencies = [ [[package]] name = "monero-bulletproofs" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "monero-generators", @@ -4965,7 +4973,7 @@ dependencies = [ [[package]] name = "monero-clsag" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "dalek-ff-group", @@ -4986,8 +4994,9 @@ dependencies = [ [[package]] name = "monero-generators" version = "0.4.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ + "crypto-bigint", "curve25519-dalek", "dalek-ff-group", "group", @@ -5000,7 +5009,7 @@ dependencies = [ [[package]] name = "monero-io" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "std-shims", @@ -5009,7 +5018,7 @@ dependencies = [ [[package]] name = "monero-mlsag" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "monero-generators", @@ -5023,7 +5032,7 @@ dependencies = [ [[package]] name = "monero-oxide" version = "0.1.4-alpha" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "hex-literal", @@ -5041,7 +5050,7 @@ dependencies = [ [[package]] name = "monero-primitives" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "monero-generators", @@ -5054,7 +5063,7 @@ dependencies = [ [[package]] name = "monero-rpc" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "hex", @@ -5070,7 +5079,7 @@ dependencies = [ [[package]] name = "monero-simple-request-rpc" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "digest_auth", "hex", @@ -5083,12 +5092,11 @@ dependencies = [ [[package]] name = "monero-wallet" version = "0.1.0" -source = "git+https://github.com/monero-oxide/monero-oxide?rev=a74f41c2270707e340a9cb57fcd97a762d04975b#a74f41c2270707e340a9cb57fcd97a762d04975b" +source = "git+https://github.com/monero-oxide/monero-oxide?rev=32e6b5fe5ba9e1ea3e68da882550005122a11d22#32e6b5fe5ba9e1ea3e68da882550005122a11d22" dependencies = [ "curve25519-dalek", "dalek-ff-group", "flexible-transcript", - "group", "hex", "modular-frost", "monero-address", @@ -8801,6 +8809,16 @@ dependencies = [ "time", ] +[[package]] +name = "serdect" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" +dependencies = [ + "base16ct", + "serde", +] + [[package]] name = "sha-1" version = "0.9.8" @@ -11203,7 +11221,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 3d49d63f..18a372de 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -230,12 +230,6 @@ impl FieldElement { FieldElement(reduce(U512::from_le_bytes(value))) } - /// Interpret the value as a little-endian integer, square it, and reduce it into a FieldElement. - pub fn from_square(value: [u8; 32]) -> FieldElement { - let value = U256::from_le_bytes(value); - FieldElement(reduce(U512::from(value.mul_wide(&value)))) - } - /// Perform an exponentiation. pub fn pow(&self, other: FieldElement) -> FieldElement { let mut table = [FieldElement::ONE; 16]; diff --git a/processor/Cargo.toml b/processor/Cargo.toml index 674e1578..d276fbe9 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -55,8 +55,8 @@ ciphersuite-kp256 = { path = "../crypto/ciphersuite/kp256", default-features = f ethereum-serai = { path = "../networks/ethereum", default-features = false, optional = true } # Monero -monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", default-features = false, optional = true } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22", default-features = false, optional = true } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22", default-features = false, features = ["std", "multisig", "compile-time-generators"], optional = true } # Application log = { version = "0.4", default-features = false, features = ["std"] } diff --git a/substrate/client/Cargo.toml b/substrate/client/Cargo.toml index 7a3e2cdb..e3bfe7f9 100644 --- a/substrate/client/Cargo.toml +++ b/substrate/client/Cargo.toml @@ -40,7 +40,7 @@ bitcoin = { version = "0.32", optional = true } dalek-ff-group = { path = "../../crypto/dalek-ff-group", optional = true } ciphersuite = { path = "../../crypto/ciphersuite", version = "0.4", optional = true } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b", version = "0.1.0", default-features = false, features = ["std"], optional = true } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22", version = "0.1.0", default-features = false, features = ["std"], optional = true } [dev-dependencies] rand_core = "0.6" diff --git a/tests/full-stack/Cargo.toml b/tests/full-stack/Cargo.toml index aed9526a..dd5dd998 100644 --- a/tests/full-stack/Cargo.toml +++ b/tests/full-stack/Cargo.toml @@ -27,8 +27,8 @@ rand_core = { version = "0.6", default-features = false } curve25519-dalek = { version = "4", features = ["rand_core"] } bitcoin-serai = { path = "../../networks/bitcoin" } -monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22" } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22" } scale = { package = "parity-scale-codec", version = "3" } serde = "1" diff --git a/tests/processor/Cargo.toml b/tests/processor/Cargo.toml index 82073076..6d04c5c9 100644 --- a/tests/processor/Cargo.toml +++ b/tests/processor/Cargo.toml @@ -33,8 +33,8 @@ bitcoin-serai = { path = "../../networks/bitcoin" } k256 = "0.13" ethereum-serai = { path = "../../networks/ethereum" } -monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } -monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "a74f41c2270707e340a9cb57fcd97a762d04975b" } +monero-simple-request-rpc = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22" } +monero-wallet = { git = "https://github.com/monero-oxide/monero-oxide", rev = "32e6b5fe5ba9e1ea3e68da882550005122a11d22" } messages = { package = "serai-processor-messages", path = "../../processor/messages" } From a7c77f8b5f7d2e7a22e1d5e761f923a13552e0b9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 23 Aug 2025 05:17:22 -0400 Subject: [PATCH 116/116] repr(transparent) on dalek_ff_group::FieldElement --- Cargo.lock | 2 +- crypto/dalek-ff-group/Cargo.toml | 2 +- crypto/dalek-ff-group/src/field.rs | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 821d48fe..4e3bec66 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1989,7 +1989,7 @@ dependencies = [ [[package]] name = "dalek-ff-group" -version = "0.4.3" +version = "0.4.4" dependencies = [ "ciphersuite", "crypto-bigint", diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index e8e918b1..e67924d8 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dalek-ff-group" -version = "0.4.3" +version = "0.4.4" description = "ff/group bindings around curve25519-dalek" license = "MIT" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-group" diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 18a372de..c21496f7 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -36,6 +36,7 @@ type ResidueType = Residue; /// A constant-time implementation of the Ed25519 field. #[derive(Clone, Copy, PartialEq, Eq, Default, Debug, Zeroize)] +#[repr(transparent)] pub struct FieldElement(ResidueType); // Square root of -1.