Utilize zeroize (#76)

* Apply Zeroize to nonces used in Bulletproofs

Also makes bit decomposition constant time for a given amount of 
outputs.

* Fix nonce reuse for single-signer CLSAG

* Attach Zeroize to most structures in Monero, and ZOnDrop to anything with private data

* Zeroize private keys and nonces

* Merge prepare_outputs and prepare_transactions

* Ensure CLSAG is constant time

* Pass by borrow where needed, bug fixes

The past few commitments have been one in-progress chunk which I've 
broken up as best read.

* Add Zeroize to FROST structs

Still needs to zeroize internally, yet next step. Not quite as 
aggressive as Monero, partially due to the limitations of HashMaps, 
partially due to less concern about metadata, yet does still delete a 
few smaller items of metadata (group key, context string...).

* Remove Zeroize from most Monero multisig structs

These structs largely didn't have private data, just fields with private 
data, yet those fields implemented ZeroizeOnDrop making them already 
covered. While there is still traces of the transaction left in RAM, 
fully purging that was never the intent.

* Use Zeroize within dleq

bitvec doesn't offer Zeroize, so a manual zeroing has been implemented.

* Use Zeroize for random_nonce

It isn't perfect, due to the inability to zeroize the digest, and due to 
kp256 requiring a few transformations. It does the best it can though.

Does move the per-curve random_nonce to a provided one, which is allowed 
as of https://github.com/cfrg/draft-irtf-cfrg-frost/pull/231.

* Use Zeroize on FROST keygen/signing

* Zeroize constant time multiexp.

* Correct when FROST keygen zeroizes

* Move the FROST keys Arc into FrostKeys

Reduces amount of instances in memory.

* Manually implement Debug for FrostCore to not leak the secret share

* Misc bug fixes

* clippy + multiexp test bug fixes

* Correct FROST key gen share summation

It leaked our own share for ourself.

* Fix cross-group DLEq tests
This commit is contained in:
Luke Parker
2022-08-03 03:25:18 -05:00
committed by GitHub
parent a30568ff57
commit 797be71eb3
56 changed files with 698 additions and 425 deletions

View File

@@ -16,6 +16,7 @@ rand_chacha = { version = "0.3", optional = true }
rand = "0.8"
rand_distr = "0.4"
zeroize = { version = "1.3", features = ["zeroize_derive"] }
subtle = "2.4"
tiny-keccak = { version = "2", features = ["keccak"] }

View File

@@ -3,6 +3,8 @@ use std::io::Read;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use group::{Group, GroupEncoding};
@@ -29,7 +31,7 @@ fn transcript() -> RecommendedTranscript {
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
x: Scalar,
mut x: Scalar,
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
@@ -45,6 +47,7 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
)
.serialize(&mut res)
.unwrap();
x.zeroize();
res
}

View File

@@ -1,6 +1,8 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use tiny_keccak::{Hasher, Keccak};
use curve25519_dalek::{
@@ -25,7 +27,7 @@ pub mod wallet;
#[cfg(test)]
mod tests;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[allow(non_camel_case_types)]
pub enum Protocol {
Unsupported,
@@ -61,7 +63,7 @@ lazy_static! {
}
#[allow(non_snake_case)]
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Commitment {
pub mask: Scalar,
pub amount: u64,

View File

@@ -4,6 +4,8 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use subtle::{Choice, ConditionallySelectable};
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
use group::{ff::Field, Group};
@@ -99,11 +101,12 @@ pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, Scalar
for j in 0 .. M {
for i in (0 .. N).rev() {
if (j < sv.len()) && ((sv[j][i / 8] & (1u8 << (i % 8))) != 0) {
aL.0[(j * N) + i] = Scalar::one();
} else {
aR.0[(j * N) + i] = -Scalar::one();
let mut bit = Choice::from(0);
if j < sv.len() {
bit = Choice::from((sv[j][i / 8] >> (i % 8)) & 1);
}
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::zero(), &Scalar::one(), bit);
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::one(), &Scalar::zero(), bit);
}
}

View File

@@ -2,6 +2,8 @@
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use multiexp::BatchVerifier;
@@ -73,7 +75,7 @@ impl Bulletproofs {
}
#[must_use]
pub fn batch_verify<ID: Copy, R: RngCore + CryptoRng>(
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,

View File

@@ -1,6 +1,8 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
use group::{ff::Field, Group};
@@ -47,11 +49,12 @@ impl OriginalStruct {
let (aL, aR) = bit_decompose(commitments);
let (mut cache, _) = hash_commitments(commitments.iter().map(Commitment::calculate));
let (alpha, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
let (sL, sR) =
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
let (rho, S) = alpha_rho(&mut *rng, &GENERATORS, &sL, &sR);
let (mut alpha, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
let (mut rho, S) = alpha_rho(&mut *rng, &GENERATORS, &sL, &sR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
@@ -72,23 +75,33 @@ impl OriginalStruct {
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
let r1 = yMN * sR;
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
let t2 = inner_product(&l1, &r1);
let (T1, T2, x, mut taux) = {
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
let t2 = inner_product(&l1, &r1);
let tau1 = Scalar::random(&mut *rng);
let tau2 = Scalar::random(rng);
let mut tau1 = Scalar::random(&mut *rng);
let mut tau2 = Scalar::random(rng);
let T1 = prove_multiexp(&[(t1, *H), (tau1, EdwardsPoint::generator())]);
let T2 = prove_multiexp(&[(t2, *H), (tau2, EdwardsPoint::generator())]);
let T1 = prove_multiexp(&[(t1, *H), (tau1, EdwardsPoint::generator())]);
let T2 = prove_multiexp(&[(t2, *H), (tau2, EdwardsPoint::generator())]);
let x =
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
let x =
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
let taux = (tau2 * (x * x)) + (tau1 * x);
tau1.zeroize();
tau2.zeroize();
(T1, T2, x, taux)
};
let mu = (x * rho) + alpha;
alpha.zeroize();
rho.zeroize();
let mut taux = (tau2 * (x * x)) + (tau1 * x);
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
taux += zpow[i + 2] * gamma;
}
let mu = (x * rho) + alpha;
let l = &l0 + &(l1 * x);
let r = &r0 + &(r1 * x);
@@ -155,7 +168,7 @@ impl OriginalStruct {
}
#[must_use]
fn verify_core<ID: Copy, R: RngCore + CryptoRng>(
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
@@ -284,7 +297,7 @@ impl OriginalStruct {
}
#[must_use]
pub(crate) fn batch_verify<ID: Copy, R: RngCore + CryptoRng>(
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,

View File

@@ -1,6 +1,8 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
use group::ff::Field;
@@ -109,7 +111,7 @@ impl PlusStruct {
let cL = weighted_inner_product(&aL, &bR, y);
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
let (dL, dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
@@ -134,12 +136,15 @@ impl PlusStruct {
b = (bL * winv) + (bR * w);
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
dL.zeroize();
dR.zeroize();
}
let r = Scalar::random(&mut *rng);
let s = Scalar::random(&mut *rng);
let d = Scalar::random(&mut *rng);
let eta = Scalar::random(rng);
let mut r = Scalar::random(&mut *rng);
let mut s = Scalar::random(&mut *rng);
let mut d = Scalar::random(&mut *rng);
let mut eta = Scalar::random(rng);
let A1 = prove_multiexp(&[
(r, G_proof[0]),
@@ -151,8 +156,13 @@ impl PlusStruct {
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
let r1 = (a[0] * e) + r;
r.zeroize();
let s1 = (b[0] * e) + s;
s.zeroize();
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
d.zeroize();
eta.zeroize();
alpha1.zeroize();
PlusStruct {
A: *A,
@@ -167,7 +177,7 @@ impl PlusStruct {
}
#[must_use]
fn verify_core<ID: Copy, R: RngCore + CryptoRng>(
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
@@ -293,7 +303,7 @@ impl PlusStruct {
}
#[must_use]
pub(crate) fn batch_verify<ID: Copy, R: RngCore + CryptoRng>(
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,

View File

@@ -1,11 +1,13 @@
use core::ops::{Add, Sub, Mul, Index};
use zeroize::{Zeroize, ZeroizeOnDrop};
use group::ff::Field;
use dalek_ff_group::{Scalar, EdwardsPoint};
use multiexp::multiexp;
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
macro_rules! math_op {
($Op: ident, $op: ident, $f: expr) => {

View File

@@ -4,6 +4,9 @@ use lazy_static::lazy_static;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use subtle::{ConstantTimeEq, Choice, CtOption};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
@@ -45,7 +48,7 @@ pub enum ClsagError {
InvalidC1,
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ClsagInput {
// The actual commitment for the true spend
pub commitment: Commitment,
@@ -161,11 +164,12 @@ fn core(
}
// Perform the core loop
let mut c1 = None;
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
for i in (start .. end).map(|i| i % n) {
if i == 0 {
c1 = Some(c);
}
// This will only execute once and shouldn't need to be constant time. Making it constant time
// removes the risk of branch prediction creating timing differences depending on ring index
// however
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
let c_p = mu_P * c;
let c_c = mu_C * c;
@@ -224,14 +228,10 @@ impl Clsag {
// Single signer CLSAG
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
mut inputs: Vec<(Scalar, EdwardsPoint, ClsagInput)>,
sum_outputs: Scalar,
msg: [u8; 32],
) -> Vec<(Clsag, EdwardsPoint)> {
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let mut res = Vec::with_capacity(inputs.len());
let mut sum_pseudo_outs = Scalar::zero();
for i in 0 .. inputs.len() {
@@ -242,8 +242,7 @@ impl Clsag {
sum_pseudo_outs += mask;
}
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let mut nonce = random_scalar(rng);
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
rng,
&inputs[i].1,
@@ -254,6 +253,8 @@ impl Clsag {
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
);
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
inputs[i].0.zeroize();
nonce.zeroize();
res.push((clsag, pseudo_out));
}

View File

@@ -7,6 +7,8 @@ use std::{
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
traits::{Identity, IsIdentity},
@@ -52,7 +54,7 @@ impl ClsagInput {
}
}
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ClsagDetails {
input: ClsagInput,
mask: Scalar,
@@ -195,7 +197,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
);
self.interim = Some(Interim { p, c, clsag, pseudo_out });
dfg::Scalar(nonces[0].0 - (p * view.secret_share().0))
nonces[0] - (dfg::Scalar(p) * view.secret_share())
}
#[must_use]

View File

@@ -1,3 +1,5 @@
use zeroize::Zeroize;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
pub(crate) mod hash_to_point;
@@ -11,8 +13,10 @@ use crate::{
ringct::{clsag::Clsag, bulletproofs::Bulletproofs},
};
pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
secret * hash_to_point(&secret * &ED25519_BASEPOINT_TABLE)
pub fn generate_key_image(mut secret: Scalar) -> EdwardsPoint {
let res = secret * hash_to_point(&secret * &ED25519_BASEPOINT_TABLE);
secret.zeroize();
res
}
#[derive(Clone, PartialEq, Eq, Debug)]

View File

@@ -84,9 +84,11 @@ macro_rules! bulletproofs_tests {
#[test]
fn $max() {
// Check Bulletproofs errors if we try to prove for too many outputs
assert!(
Bulletproofs::prove(&mut OsRng, &[Commitment::new(Scalar::zero(), 0); 17], $plus).is_err()
);
let mut commitments = vec![];
for _ in 0 .. 17 {
commitments.push(Commitment::new(Scalar::zero(), 0));
}
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
}
};
}

View File

@@ -56,7 +56,7 @@ fn clsag() {
let image = generate_key_image(secrets[0]);
let (clsag, pseudo_out) = Clsag::sign(
&mut OsRng,
&vec![(
vec![(
secrets[0],
image,
ClsagInput::new(

View File

@@ -1,5 +1,7 @@
use core::cmp::Ordering;
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{
@@ -11,7 +13,6 @@ use crate::{
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Input {
Gen(u64),
ToKey { amount: u64, key_offsets: Vec<u64>, key_image: EdwardsPoint },
}
@@ -107,7 +108,7 @@ impl Output {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum Timelock {
None,
Block(usize),

View File

@@ -2,6 +2,8 @@ use std::string::ToString;
use thiserror::Error;
use zeroize::Zeroize;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
edwards::{EdwardsPoint, CompressedEdwardsY},
@@ -11,14 +13,14 @@ use base58_monero::base58::{encode_check, decode_check};
use crate::wallet::ViewPair;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum Network {
Mainnet,
Testnet,
Stagenet,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum AddressType {
Standard,
Integrated([u8; 8]),
@@ -35,7 +37,7 @@ impl AddressType {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct AddressMeta {
pub network: Network,
pub kind: AddressType,
@@ -91,7 +93,7 @@ impl AddressMeta {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct Address {
pub meta: AddressMeta,
pub spend: EdwardsPoint,

View File

@@ -5,6 +5,8 @@ use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use rand_distr::{Distribution, Gamma};
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{
@@ -91,7 +93,7 @@ fn offset(ring: &[u64]) -> Vec<u64> {
res
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Decoys {
pub i: u8,
pub offsets: Vec<u64>,

View File

@@ -1,3 +1,5 @@
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
@@ -39,7 +41,7 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
#[allow(non_snake_case)]
pub(crate) fn shared_key(
uniqueness: Option<[u8; 32]>,
s: Scalar,
s: &Scalar,
P: &EdwardsPoint,
o: usize,
) -> (u8, Scalar) {
@@ -76,7 +78,7 @@ pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
hash_to_scalar(&mask)
}
#[derive(Clone, Copy)]
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
pub struct ViewPair {
pub spend: EdwardsPoint,
pub view: Scalar,

View File

@@ -1,5 +1,7 @@
use std::convert::TryFrom;
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::deserialize, blockdata::transaction::ExtraField};
@@ -11,7 +13,7 @@ use crate::{
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask},
};
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct SpendableOutput {
pub tx: [u8; 32],
pub o: u8,
@@ -20,6 +22,7 @@ pub struct SpendableOutput {
pub commitment: Commitment,
}
#[derive(Zeroize, ZeroizeOnDrop)]
pub struct Timelocked(Timelock, Vec<SpendableOutput>);
impl Timelocked {
pub fn timelock(&self) -> Timelock {
@@ -76,7 +79,7 @@ impl SpendableOutput {
}
impl Transaction {
pub fn scan(&self, view: ViewPair, guaranteed: bool) -> Timelocked {
pub fn scan(&self, view: &ViewPair, guaranteed: bool) -> Timelocked {
let mut extra = vec![];
write_varint(&u64::try_from(self.prefix.extra.len()).unwrap(), &mut extra).unwrap();
extra.extend(&self.prefix.extra);
@@ -103,7 +106,7 @@ impl Transaction {
for pubkey in &pubkeys {
let (view_tag, key_offset) = shared_key(
Some(uniqueness(&self.prefix.inputs)).filter(|_| guaranteed),
view.view,
&view.view,
pubkey,
o,
);

View File

@@ -3,6 +3,8 @@ use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::Encodable, PublicKey, blockdata::transaction::SubField};
@@ -35,7 +37,7 @@ mod multisig;
pub use multisig::TransactionMachine;
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
struct SendOutput {
R: EdwardsPoint,
view_tag: u8,
@@ -53,7 +55,7 @@ impl SendOutput {
) -> SendOutput {
let r = random_scalar(rng);
let (view_tag, shared_key) =
shared_key(Some(unique).filter(|_| output.0.meta.guaranteed), r, &output.0.view, o);
shared_key(Some(unique).filter(|_| output.0.meta.guaranteed), &r, &output.0.view, o);
let spend = output.0.spend;
SendOutput {
@@ -127,7 +129,8 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
signable.push((
spend + input.key_offset,
generate_key_image(spend + input.key_offset),
ClsagInput::new(input.commitment, decoys[i].clone()).map_err(TransactionError::ClsagError)?,
ClsagInput::new(input.commitment.clone(), decoys[i].clone())
.map_err(TransactionError::ClsagError)?,
));
tx.prefix.inputs.push(Input::ToKey {
@@ -161,12 +164,11 @@ impl Fee {
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct SignableTransaction {
protocol: Protocol,
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
outputs: Vec<SendOutput>,
fee: u64,
}
@@ -251,80 +253,78 @@ impl SignableTransaction {
Err(TransactionError::TooManyOutputs)?;
}
Ok(SignableTransaction { protocol, inputs, payments, outputs: vec![], fee })
Ok(SignableTransaction { protocol, inputs, payments, fee })
}
fn prepare_outputs<R: RngCore + CryptoRng>(
fn prepare_transaction<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
uniqueness: [u8; 32],
) -> (Vec<Commitment>, Scalar) {
) -> (Transaction, Scalar) {
// Shuffle the payments
self.payments.shuffle(rng);
// Actually create the outputs
self.outputs = Vec::with_capacity(self.payments.len() + 1);
for (o, output) in self.payments.iter().enumerate() {
self.outputs.push(SendOutput::new(rng, uniqueness, *output, o));
}
let outputs = self
.payments
.drain(..)
.enumerate()
.map(|(o, output)| SendOutput::new(rng, uniqueness, output, o))
.collect::<Vec<_>>();
let commitments = self.outputs.iter().map(|output| output.commitment).collect::<Vec<_>>();
let commitments = outputs.iter().map(|output| output.commitment.clone()).collect::<Vec<_>>();
let sum = commitments.iter().map(|commitment| commitment.mask).sum();
(commitments, sum)
}
fn prepare_transaction<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
commitments: &[Commitment],
) -> Transaction {
// Safe due to the constructor checking MAX_OUTPUTS
let bp = Bulletproofs::prove(rng, commitments, self.protocol.bp_plus()).unwrap();
let bp = Bulletproofs::prove(rng, &commitments, self.protocol.bp_plus()).unwrap();
// Create the TX extra
// TODO: Review this for canonicity with Monero
let mut extra = vec![];
SubField::TxPublicKey(PublicKey { point: self.outputs[0].R.compress() })
SubField::TxPublicKey(PublicKey { point: outputs[0].R.compress() })
.consensus_encode(&mut extra)
.unwrap();
SubField::AdditionalPublickKey(
self.outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect(),
outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect(),
)
.consensus_encode(&mut extra)
.unwrap();
let mut tx_outputs = Vec::with_capacity(self.outputs.len());
let mut ecdh_info = Vec::with_capacity(self.outputs.len());
for o in 0 .. self.outputs.len() {
let mut tx_outputs = Vec::with_capacity(outputs.len());
let mut ecdh_info = Vec::with_capacity(outputs.len());
for output in &outputs {
tx_outputs.push(Output {
amount: 0,
key: self.outputs[o].dest,
view_tag: Some(self.outputs[o].view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
key: output.dest,
view_tag: Some(output.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
});
ecdh_info.push(self.outputs[o].amount);
ecdh_info.push(output.amount);
}
Transaction {
prefix: TransactionPrefix {
version: 2,
timelock: Timelock::None,
inputs: vec![],
outputs: tx_outputs,
extra,
},
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
(
Transaction {
prefix: TransactionPrefix {
version: 2,
timelock: Timelock::None,
inputs: vec![],
outputs: tx_outputs,
extra,
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![],
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![],
},
},
},
}
sum,
)
}
pub async fn sign<R: RngCore + CryptoRng>(
@@ -335,16 +335,17 @@ impl SignableTransaction {
) -> Result<Transaction, TransactionError> {
let mut images = Vec::with_capacity(self.inputs.len());
for input in &self.inputs {
let offset = spend + input.key_offset;
let mut offset = spend + input.key_offset;
if (&offset * &ED25519_BASEPOINT_TABLE) != input.key {
Err(TransactionError::WrongPrivateKey)?;
}
images.push(generate_key_image(offset));
offset.zeroize();
}
images.sort_by(key_image_sort);
let (commitments, mask_sum) = self.prepare_outputs(
let (mut tx, mask_sum) = self.prepare_transaction(
rng,
uniqueness(
&images
@@ -354,12 +355,10 @@ impl SignableTransaction {
),
);
let mut tx = self.prepare_transaction(rng, &commitments);
let signable =
prepare_inputs(rng, rpc, self.protocol.ring_len(), &self.inputs, spend, &mut tx).await?;
let clsag_pairs = Clsag::sign(rng, &signable, mask_sum, tx.signature_hash());
let clsag_pairs = Clsag::sign(rng, signable, mask_sum, tx.signature_hash());
match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {

View File

@@ -125,7 +125,7 @@ impl SignableTransaction {
AlgorithmMachine::new(
ClsagMultisig::new(transcript.clone(), input.key, inputs[i].clone())
.map_err(TransactionError::MultisigError)?,
Arc::new(offset),
offset,
&included,
)
.map_err(TransactionError::FrostError)?,
@@ -283,25 +283,18 @@ impl SignMachine<Transaction> for TransactionSignMachine {
}
// Create the actual transaction
let output_masks;
let mut tx = {
let (mut tx, output_masks) = {
let mut sorted_images = images.clone();
sorted_images.sort_by(key_image_sort);
let commitments;
(commitments, output_masks) = self.signable.prepare_outputs(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys")),
self.signable.prepare_transaction(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"transaction_keys_bulletproofs")),
uniqueness(
&images
&sorted_images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
);
self.signable.prepare_transaction(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"bulletproofs")),
&commitments,
)
};
@@ -338,7 +331,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
});
*value.3.write().unwrap() = Some(ClsagDetails::new(
ClsagInput::new(value.1.commitment, value.2).map_err(|_| {
ClsagInput::new(value.1.commitment.clone(), value.2).map_err(|_| {
panic!("Signing an input which isn't present in the ring we created for it")
})?,
mask,

View File

@@ -101,7 +101,7 @@ async fn send_core(test: usize, multisig: bool) {
// Grab the largest output available
let output = {
let mut outputs = tx.as_ref().unwrap().scan(view_pair, false).ignore_timelock();
let mut outputs = tx.as_ref().unwrap().scan(&view_pair, false).ignore_timelock();
outputs.sort_by(|x, y| x.commitment.amount.cmp(&y.commitment.amount).reverse());
outputs.swap_remove(0)
};
@@ -126,7 +126,7 @@ async fn send_core(test: usize, multisig: bool) {
for i in (start + 1) .. (start + 9) {
let tx = rpc.get_block_transactions(i).await.unwrap().swap_remove(0);
let output = tx.scan(view_pair, false).ignore_timelock().swap_remove(0);
let output = tx.scan(&view_pair, false).ignore_timelock().swap_remove(0);
amount += output.commitment.amount;
outputs.push(output);
}
@@ -154,7 +154,7 @@ async fn send_core(test: usize, multisig: bool) {
.clone()
.multisig(
&rpc,
(*keys[&i]).clone(),
keys[&i].clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
(1 ..= THRESHOLD).collect::<Vec<_>>(),