Apply an initial set of rustfmt rules

This commit is contained in:
Luke Parker
2022-07-15 01:26:07 -04:00
parent 0b879a53fa
commit e67033a207
67 changed files with 1983 additions and 1796 deletions

View File

@@ -1,13 +1,23 @@
use std::{env, path::Path, process::Command};
fn main() {
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
if !Command::new("git")
.args(&["submodule", "update", "--init", "--recursive"])
.status()
.unwrap()
.success()
{
panic!("git failed to init submodules");
}
if !Command ::new("mkdir").args(&["-p", ".build"])
.current_dir(&Path::new("c")).status().unwrap().success() {
panic!("failed to create a directory to track build progress");
if !Command::new("mkdir")
.args(&["-p", ".build"])
.current_dir(&Path::new("c"))
.status()
.unwrap()
.success()
{
panic!("failed to create a directory to track build progress");
}
let out_dir = &env::var("OUT_DIR").unwrap();
@@ -16,18 +26,29 @@ fn main() {
// If the signaling file was deleted, run this script again to rebuild Monero though
println!("cargo:rerun-if-changed=c/.build/monero");
if !Path::new("c/.build/monero").exists() {
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
panic!("make failed to build Monero. Please check your dependencies");
if !Command::new("make")
.arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
.current_dir(&Path::new("c/monero"))
.status()
.unwrap()
.success()
{
panic!("make failed to build Monero. Please check your dependencies");
}
if !Command::new("touch").arg("monero")
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
panic!("failed to create a file to label Monero as built");
if !Command::new("touch")
.arg("monero")
.current_dir(&Path::new("c/.build"))
.status()
.unwrap()
.success()
{
panic!("failed to create a file to label Monero as built");
}
}
println!("cargo:rerun-if-changed=c/wrapper.cpp");
#[rustfmt::skip]
cc::Build::new()
.static_flag(true)
.warnings(false)

View File

@@ -1,7 +1,4 @@
use crate::{
serialize::*,
transaction::Transaction
};
use crate::{serialize::*, transaction::Transaction};
#[derive(Clone, PartialEq, Debug)]
pub struct BlockHeader {
@@ -9,7 +6,7 @@ pub struct BlockHeader {
pub minor_version: u64,
pub timestamp: u64,
pub previous: [u8; 32],
pub nonce: u32
pub nonce: u32,
}
impl BlockHeader {
@@ -22,15 +19,21 @@ impl BlockHeader {
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
Ok(
BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: { let mut previous = [0; 32]; r.read_exact(&mut previous)?; previous },
nonce: { let mut nonce = [0; 4]; r.read_exact(&mut nonce)?; u32::from_le_bytes(nonce) }
}
)
Ok(BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: {
let mut previous = [0; 32];
r.read_exact(&mut previous)?;
previous
},
nonce: {
let mut nonce = [0; 4];
r.read_exact(&mut nonce)?;
u32::from_le_bytes(nonce)
},
})
}
}
@@ -38,7 +41,7 @@ impl BlockHeader {
pub struct Block {
pub header: BlockHeader,
pub miner_tx: Transaction,
pub txs: Vec<[u8; 32]>
pub txs: Vec<[u8; 32]>,
}
impl Block {
@@ -53,14 +56,15 @@ impl Block {
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
Ok(
Block {
header: BlockHeader::deserialize(r)?,
miner_tx: Transaction::deserialize(r)?,
txs: (0 .. read_varint(r)?).map(
|_| { let mut tx = [0; 32]; r.read_exact(&mut tx).map(|_| tx) }
).collect::<Result<_, _>>()?
}
)
Ok(Block {
header: BlockHeader::deserialize(r)?,
miner_tx: Transaction::deserialize(r)?,
txs: (0 .. read_varint(r)?)
.map(|_| {
let mut tx = [0; 32];
r.read_exact(&mut tx).map(|_| tx)
})
.collect::<Result<_, _>>()?,
})
}
}

View File

@@ -18,7 +18,7 @@ pub enum MultisigError {
#[error("invalid discrete log equality proof")]
InvalidDLEqProof(u16),
#[error("invalid key image {0}")]
InvalidKeyImage(u16)
InvalidKeyImage(u16),
}
fn transcript() -> RecommendedTranscript {
@@ -29,7 +29,7 @@ fn transcript() -> RecommendedTranscript {
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
x: Scalar
x: Scalar,
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
@@ -41,8 +41,10 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
// merge later in some form, when it should instead just merge xH (as it does)
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
dfg::Scalar(x)
).serialize(&mut res).unwrap();
dfg::Scalar(x),
)
.serialize(&mut res)
.unwrap();
res
}
@@ -51,26 +53,22 @@ pub(crate) fn read_dleq<Re: Read>(
serialized: &mut Re,
H: EdwardsPoint,
l: u16,
xG: dfg::EdwardsPoint
xG: dfg::EdwardsPoint,
) -> Result<dfg::EdwardsPoint, MultisigError> {
let mut bytes = [0; 32];
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
)?;
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or(MultisigError::InvalidDLEqProof(l))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(MultisigError::InvalidDLEqProof(l))?;
}
DLEqProof::<dfg::EdwardsPoint>::deserialize(
serialized
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
&[xG, xH]
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
DLEqProof::<dfg::EdwardsPoint>::deserialize(serialized)
.map_err(|_| MultisigError::InvalidDLEqProof(l))?
.verify(&mut transcript(), &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)], &[xG, xH])
.map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH)
}

View File

@@ -10,7 +10,7 @@ use tiny_keccak::{Hasher, Keccak};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY},
};
#[cfg(feature = "multisig")]
@@ -31,8 +31,13 @@ mod tests;
lazy_static! {
static ref H: EdwardsPoint = CompressedEdwardsY(
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
).decompress().unwrap();
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94")
.unwrap()
.try_into()
.unwrap()
)
.decompress()
.unwrap();
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
}
@@ -40,9 +45,7 @@ lazy_static! {
// need to link against libsodium
#[no_mangle]
unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
isize::from(
slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()
) - 1
isize::from(slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()) - 1
}
// Offer a wide reduction to C. Our seeded RNG prevented Monero from defining an unbiased scalar
@@ -51,9 +54,8 @@ unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
// sampling however, hence the need for this function
#[no_mangle]
unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
let res = Scalar::from_bytes_mod_order_wide(
std::slice::from_raw_parts(value, 64).try_into().unwrap()
);
let res =
Scalar::from_bytes_mod_order_wide(std::slice::from_raw_parts(value, 64).try_into().unwrap());
for (i, b) in res.to_bytes().iter().enumerate() {
value.add(i).write(*b);
}
@@ -63,12 +65,12 @@ unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Commitment {
pub mask: Scalar,
pub amount: u64
pub amount: u64,
}
impl Commitment {
pub fn zero() -> Commitment {
Commitment { mask: Scalar::one(), amount: 0}
Commitment { mask: Scalar::one(), amount: 0 }
}
pub fn new(mask: Scalar, amount: u64) -> Commitment {

View File

@@ -20,7 +20,7 @@ pub struct Bulletproofs {
pub R: Vec<EdwardsPoint>,
pub a: Scalar,
pub b: Scalar,
pub t: Scalar
pub t: Scalar,
}
impl Bulletproofs {
@@ -38,7 +38,10 @@ impl Bulletproofs {
len + clawback
}
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
pub fn new<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: &[Commitment],
) -> Result<Bulletproofs, TransactionError> {
if outputs.len() > MAX_OUTPUTS {
return Err(TransactionError::TooManyOutputs)?;
}
@@ -54,22 +57,28 @@ impl Bulletproofs {
#[link(name = "wrapper")]
extern "C" {
fn free(ptr: *const u8);
fn c_generate_bp(seed: *const u8, len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
fn c_generate_bp(
seed: *const u8,
len: u8,
amounts: *const u64,
masks: *const [u8; 32],
) -> *const u8;
}
let ptr = c_generate_bp(
seed.as_ptr(),
u8::try_from(outputs.len()).unwrap(),
amounts.as_ptr(),
masks.as_ptr()
masks.as_ptr(),
);
let mut len = 6 * 32;
len += (2 * (1 + (usize::from(ptr.add(len).read()) * 32))) + (3 * 32);
res = Bulletproofs::deserialize(
// Wrap in a cursor to provide a mutable Reader
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len))
).expect("Couldn't deserialize Bulletproofs from Monero");
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len)),
)
.expect("Couldn't deserialize Bulletproofs from Monero");
free(ptr);
};
@@ -87,9 +96,10 @@ impl Bulletproofs {
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
self.serialize(&mut serialized).unwrap();
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes()
).collect();
let commitments: Vec<[u8; 32]> = commitments
.iter()
.map(|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes())
.collect();
unsafe {
#[link(name = "wrapper")]
@@ -99,7 +109,7 @@ impl Bulletproofs {
serialized_len: usize,
serialized: *const u8,
commitments_len: u8,
commitments: *const [u8; 32]
commitments: *const [u8; 32],
) -> bool;
}
@@ -108,15 +118,16 @@ impl Bulletproofs {
serialized.len(),
serialized.as_ptr(),
u8::try_from(commitments.len()).unwrap(),
commitments.as_ptr()
commitments.as_ptr(),
)
}
}
fn serialize_core<
W: std::io::Write,
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
fn serialize_core<W: std::io::Write, F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>>(
&self,
w: &mut W,
specific_write_vec: F,
) -> std::io::Result<()> {
write_point(&self.A, w)?;
write_point(&self.S, w)?;
write_point(&self.T1, w)?;
@@ -150,7 +161,7 @@ impl Bulletproofs {
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?
t: read_scalar(r)?,
};
if bp.L.len() != bp.R.len() {

View File

@@ -8,15 +8,12 @@ use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
traits::VartimePrecomputedMultiscalarMul,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
};
use crate::{
Commitment, random_scalar, hash_to_scalar,
transaction::RING_LEN,
wallet::decoys::Decoys,
ringct::hash_to_point,
serialize::*
Commitment, random_scalar, hash_to_scalar, transaction::RING_LEN, wallet::decoys::Decoys,
ringct::hash_to_point, serialize::*,
};
#[cfg(feature = "multisig")]
@@ -41,7 +38,7 @@ pub enum ClsagError {
#[error("invalid s")]
InvalidS,
#[error("invalid c1")]
InvalidC1
InvalidC1,
}
#[derive(Clone, PartialEq, Debug)]
@@ -49,14 +46,11 @@ pub struct ClsagInput {
// The actual commitment for the true spend
pub commitment: Commitment,
// True spend index, offsets, and ring
pub decoys: Decoys
pub decoys: Decoys,
}
impl ClsagInput {
pub fn new(
commitment: Commitment,
decoys: Decoys
) -> Result<ClsagInput, ClsagError> {
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<ClsagInput, ClsagError> {
let n = decoys.len();
if n > u8::MAX.into() {
Err(ClsagError::InternalError("max ring size in this library is u8 max".to_string()))?;
@@ -78,7 +72,7 @@ impl ClsagInput {
enum Mode {
Sign(usize, EdwardsPoint, EdwardsPoint),
#[cfg(feature = "experimental")]
Verify(Scalar)
Verify(Scalar),
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
@@ -90,7 +84,7 @@ fn core(
msg: &[u8; 32],
D: &EdwardsPoint,
s: &[Scalar],
A_c1: Mode
A_c1: Mode,
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
@@ -99,13 +93,17 @@ fn core(
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
let mut to_hash = vec![];
to_hash.reserve_exact(((2 * n) + 5) * 32);
const PREFIX: &[u8] = "CLSAG_".as_bytes();
const AGG_0: &[u8] = "CLSAG_agg_0".as_bytes();
const ROUND: &[u8] = "round".as_bytes();
const PREFIX: &[u8] = b"CLSAG_";
#[rustfmt::skip]
const AGG_0: &[u8] = b"agg_0";
#[rustfmt::skip]
const ROUND: &[u8] = b"round";
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
to_hash.extend(PREFIX);
to_hash.extend(AGG_0);
to_hash.extend([0; 32 - AGG_0.len()]);
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN.len()]);
let mut P = Vec::with_capacity(n);
for member in ring {
@@ -125,7 +123,7 @@ fn core(
// mu_P with agg_0
let mu_P = hash_to_scalar(&to_hash);
// mu_C with agg_1
to_hash[AGG_0.len() - 1] = b'1';
to_hash[PREFIX_AGG_0_LEN.len() - 1] = b'1';
let mu_C = hash_to_scalar(&to_hash);
// Truncate it for the round transcript, altering the DST as needed
@@ -149,7 +147,7 @@ fn core(
to_hash.extend(A.compress().to_bytes());
to_hash.extend(AH.compress().to_bytes());
c = hash_to_scalar(&to_hash);
},
}
#[cfg(feature = "experimental")]
Mode::Verify(c1) => {
@@ -188,7 +186,7 @@ fn core(
pub struct Clsag {
pub D: EdwardsPoint,
pub s: Vec<Scalar>,
pub c1: Scalar
pub c1: Scalar,
}
impl Clsag {
@@ -201,7 +199,7 @@ impl Clsag {
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint
AH: EdwardsPoint,
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
let r: usize = input.decoys.i.into();
@@ -214,14 +212,10 @@ impl Clsag {
for _ in 0 .. input.decoys.ring.len() {
s.push(random_scalar(rng));
}
let ((D, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
let ((D, p, c), c1) =
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
(
Clsag { D, s, c1 },
pseudo_out,
p,
c * z
)
(Clsag { D, s, c1 }, pseudo_out, p, c * z)
}
// Single signer CLSAG
@@ -229,7 +223,7 @@ impl Clsag {
rng: &mut R,
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
sum_outputs: Scalar,
msg: [u8; 32]
msg: [u8; 32],
) -> Vec<(Clsag, EdwardsPoint)> {
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
@@ -254,7 +248,7 @@ impl Clsag {
mask,
&msg,
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
);
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
@@ -271,17 +265,10 @@ impl Clsag {
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
msg: &[u8; 32],
) -> Result<(), ClsagError> {
let (_, c1) = core(
ring,
I,
pseudo_out,
msg,
&self.D.mul_by_cofactor(),
&self.s,
Mode::Verify(self.c1)
);
let (_, c1) =
core(ring, I, pseudo_out, msg, &self.D.mul_by_cofactor(), &self.s, Mode::Verify(self.c1));
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}
@@ -299,13 +286,7 @@ impl Clsag {
}
pub fn deserialize<R: std::io::Read>(decoys: usize, r: &mut R) -> std::io::Result<Clsag> {
Ok(
Clsag {
s: read_raw_vec(read_scalar, decoys, r)?,
c1: read_scalar(r)?,
D: read_point(r)?
}
)
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
}
pub fn verify(
@@ -313,7 +294,7 @@ impl Clsag {
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
msg: &[u8; 32],
) -> Result<(), ClsagError> {
// Serialize it to pass the struct to Monero without extensive FFI
let mut serialized = Vec::with_capacity(1 + ((self.s.len() + 2) * 32));
@@ -341,15 +322,19 @@ impl Clsag {
ring: *const u8,
I: *const u8,
pseudo_out: *const u8,
msg: *const u8
msg: *const u8,
) -> bool;
}
if c_verify_clsag(
serialized.len(), serialized.as_ptr(),
u8::try_from(ring.len()).map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
serialized.len(),
serialized.as_ptr(),
u8::try_from(ring.len())
.map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
ring_bytes.as_ptr(),
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
I_bytes.as_ptr(),
pseudo_out_bytes.as_ptr(),
msg.as_ptr(),
) {
Ok(())
} else {

View File

@@ -1,5 +1,8 @@
use core::fmt::Debug;
use std::{io::Read, sync::{Arc, RwLock}};
use std::{
io::Read,
sync::{Arc, RwLock},
};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
@@ -8,7 +11,7 @@ use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
traits::{Identity, IsIdentity},
scalar::Scalar,
edwards::EdwardsPoint
edwards::EdwardsPoint,
};
use group::Group;
@@ -19,7 +22,10 @@ use dalek_ff_group as dfg;
use crate::{
frost::{MultisigError, write_dleq, read_dleq},
ringct::{hash_to_point, clsag::{ClsagInput, Clsag}}
ringct::{
hash_to_point,
clsag::{ClsagInput, Clsag},
},
};
impl ClsagInput {
@@ -49,7 +55,7 @@ impl ClsagInput {
#[derive(Clone, Debug)]
pub struct ClsagDetails {
input: ClsagInput,
mask: Scalar
mask: Scalar,
}
impl ClsagDetails {
@@ -65,7 +71,7 @@ struct Interim {
c: Scalar,
clsag: Clsag,
pseudo_out: EdwardsPoint
pseudo_out: EdwardsPoint,
}
#[allow(non_snake_case)]
@@ -74,34 +80,33 @@ pub struct ClsagMultisig {
transcript: RecommendedTranscript,
H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
// an extra round
image: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>,
msg: Option<[u8; 32]>,
interim: Option<Interim>
interim: Option<Interim>,
}
impl ClsagMultisig {
pub fn new(
transcript: RecommendedTranscript,
output_key: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>
details: Arc<RwLock<Option<ClsagDetails>>>,
) -> Result<ClsagMultisig, MultisigError> {
Ok(
ClsagMultisig {
transcript,
Ok(ClsagMultisig {
transcript,
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
details,
details,
msg: None,
interim: None
}
)
msg: None,
interim: None,
})
}
pub const fn serialized_len() -> usize {
@@ -128,7 +133,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
view: &FrostView<Ed25519>
view: &FrostView<Ed25519>,
) -> Vec<u8> {
let mut serialized = Vec::with_capacity(Self::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
@@ -140,7 +145,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&mut self,
view: &FrostView<Ed25519>,
l: u16,
serialized: &mut Re
serialized: &mut Re,
) -> Result<(), FrostError> {
if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG");
@@ -149,12 +154,9 @@ impl Algorithm<Ed25519> for ClsagMultisig {
}
self.transcript.append_message(b"participant", &l.to_be_bytes());
let image = read_dleq(
serialized,
self.H,
l,
view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
let image = read_dleq(serialized, self.H, l, view.verification_share(l))
.map_err(|_| FrostError::InvalidCommitment(l))?
.0;
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref());
self.image += image;
@@ -170,7 +172,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
view: &FrostView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: &[dfg::Scalar],
msg: &[u8]
msg: &[u8],
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
// The transcript contains private data, preventing passive adversaries from recreating this
@@ -189,7 +191,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
self.mask(),
&self.msg.as_ref().unwrap(),
nonce_sums[0][0].0,
nonce_sums[0][1].0
nonce_sums[0][1].0,
);
self.interim = Some(Interim { p, c, clsag, pseudo_out });
@@ -203,17 +205,20 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self,
_: dfg::EdwardsPoint,
_: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar
sum: dfg::Scalar,
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let mut clsag = interim.clsag.clone();
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
if clsag.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
&self.msg.as_ref().unwrap()
).is_ok() {
if clsag
.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
&self.msg.as_ref().unwrap(),
)
.is_ok()
{
return Some((clsag, interim.pseudo_out));
}
return None;
@@ -227,8 +232,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
share: dfg::Scalar,
) -> bool {
let interim = self.interim.as_ref().unwrap();
return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
nonces[0][0].0 - (interim.p * verification_share.0)
);
return (&share.0 * &ED25519_BASEPOINT_TABLE) ==
(nonces[0][0].0 - (interim.p * verification_share.0));
}
}

View File

@@ -8,7 +8,7 @@ pub mod bulletproofs;
use crate::{
serialize::*,
ringct::{clsag::Clsag, bulletproofs::Bulletproofs}
ringct::{clsag::Clsag, bulletproofs::Bulletproofs},
};
pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
@@ -19,7 +19,7 @@ pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
pub struct RctBase {
pub fee: u64,
pub ecdh_info: Vec<[u8; 8]>,
pub commitments: Vec<EdwardsPoint>
pub commitments: Vec<EdwardsPoint>,
}
impl RctBase {
@@ -37,12 +37,15 @@ impl RctBase {
w.write_all(ecdh)?;
}
write_raw_vec(write_point, &self.commitments, w)
},
_ => panic!("Serializing unknown RctType's Base")
}
_ => panic!("Serializing unknown RctType's Base"),
}
}
pub fn deserialize<R: std::io::Read>(outputs: usize, r: &mut R) -> std::io::Result<(RctBase, u8)> {
pub fn deserialize<R: std::io::Read>(
outputs: usize,
r: &mut R,
) -> std::io::Result<(RctBase, u8)> {
let mut rct_type = [0];
r.read_exact(&mut rct_type)?;
Ok((
@@ -51,13 +54,16 @@ impl RctBase {
} else {
RctBase {
fee: read_varint(r)?,
ecdh_info: (0 .. outputs).map(
|_| { let mut ecdh = [0; 8]; r.read_exact(&mut ecdh).map(|_| ecdh) }
).collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?
ecdh_info: (0 .. outputs)
.map(|_| {
let mut ecdh = [0; 8];
r.read_exact(&mut ecdh).map(|_| ecdh)
})
.collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?,
}
},
rct_type[0]
rct_type[0],
))
}
}
@@ -65,18 +71,14 @@ impl RctBase {
#[derive(Clone, PartialEq, Debug)]
pub enum RctPrunable {
Null,
Clsag {
bulletproofs: Vec<Bulletproofs>,
clsags: Vec<Clsag>,
pseudo_outs: Vec<EdwardsPoint>
}
Clsag { bulletproofs: Vec<Bulletproofs>, clsags: Vec<Clsag>, pseudo_outs: Vec<EdwardsPoint> },
}
impl RctPrunable {
pub fn rct_type(&self) -> u8 {
match self {
RctPrunable::Null => 0,
RctPrunable::Clsag { .. } => 5
RctPrunable::Clsag { .. } => 5,
}
}
@@ -98,26 +100,30 @@ impl RctPrunable {
pub fn deserialize<R: std::io::Read>(
rct_type: u8,
decoys: &[usize],
r: &mut R
r: &mut R,
) -> std::io::Result<RctPrunable> {
Ok(
match rct_type {
0 => RctPrunable::Null,
5 => RctPrunable::Clsag {
// TODO: Can the amount of outputs be calculated from the BPs for any validly formed TX?
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
clsags: (0 .. decoys.len()).map(|o| Clsag::deserialize(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?
}
)
Ok(match rct_type {
0 => RctPrunable::Null,
5 => RctPrunable::Clsag {
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
clsags: (0 .. decoys.len())
.map(|o| Clsag::deserialize(decoys[o], r))
.collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
},
_ => Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Tried to deserialize unknown RCT type",
))?,
})
}
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect(),
RctPrunable::Clsag { bulletproofs, .. } => {
bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect()
}
}
}
}
@@ -125,7 +131,7 @@ impl RctPrunable {
#[derive(Clone, PartialEq, Debug)]
pub struct RctSignatures {
pub base: RctBase,
pub prunable: RctPrunable
pub prunable: RctPrunable,
}
impl RctSignatures {
@@ -138,7 +144,11 @@ impl RctSignatures {
self.prunable.serialize(w)
}
pub fn deserialize<R: std::io::Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> std::io::Result<RctSignatures> {
pub fn deserialize<R: std::io::Read>(
decoys: Vec<usize>,
outputs: usize,
r: &mut R,
) -> std::io::Result<RctSignatures> {
let base = RctBase::deserialize(outputs, r)?;
Ok(RctSignatures { base: base.0, prunable: RctPrunable::deserialize(base.1, &decoys, r)? })
}

View File

@@ -9,13 +9,17 @@ use serde_json::json;
use reqwest;
use crate::{transaction::{Input, Timelock, Transaction}, block::Block, wallet::Fee};
use crate::{
transaction::{Input, Timelock, Transaction},
block::Block,
wallet::Fee,
};
#[derive(Deserialize, Debug)]
pub struct EmptyResponse {}
#[derive(Deserialize, Debug)]
pub struct JsonRpcResponse<T> {
result: T
result: T,
}
#[derive(Clone, Error, Debug)]
@@ -31,7 +35,7 @@ pub enum RpcError {
#[error("pruned transaction")]
PrunedTransaction,
#[error("invalid transaction ({0:?})")]
InvalidTransaction([u8; 32])
InvalidTransaction([u8; 32]),
}
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
@@ -40,8 +44,10 @@ fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
CompressedEdwardsY(
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?
).decompress().ok_or(RpcError::InvalidPoint(point.to_string()))
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
)
.decompress()
.ok_or(RpcError::InvalidPoint(point.to_string()))
}
#[derive(Clone, Debug)]
@@ -52,10 +58,11 @@ impl Rpc {
Rpc(daemon)
}
pub async fn rpc_call<
Params: Serialize + Debug,
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Option<Params>) -> Result<Response, RpcError> {
pub async fn rpc_call<Params: Serialize + Debug, Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Option<Params>,
) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let mut builder = client.post(&(self.0.clone() + "/" + method));
if let Some(params) = params.as_ref() {
@@ -65,44 +72,43 @@ impl Rpc {
self.call_tail(method, builder).await
}
pub async fn bin_call<
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Vec<u8>) -> Result<Response, RpcError> {
pub async fn bin_call<Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Vec<u8>,
) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let builder = client.post(&(self.0.clone() + "/" + method)).body(params);
self.call_tail(method, builder.header("Content-Type", "application/octet-stream")).await
}
async fn call_tail<
Response: DeserializeOwned + Debug
>(&self, method: &str, builder: reqwest::RequestBuilder) -> Result<Response, RpcError> {
let res = builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?;
async fn call_tail<Response: DeserializeOwned + Debug>(
&self,
method: &str,
builder: reqwest::RequestBuilder,
) -> Result<Response, RpcError> {
let res = builder.send().await.map_err(|_| RpcError::ConnectionError)?;
Ok(
if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
}
)
Ok(if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
})
}
pub async fn get_height(&self) -> Result<usize, RpcError> {
#[derive(Deserialize, Debug)]
struct HeightResponse {
height: usize
height: usize,
}
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
}
async fn get_transactions_core(
&self,
hashes: &[[u8; 32]]
hashes: &[[u8; 32]],
) -> Result<(Vec<Result<Transaction, RpcError>>, Vec<[u8; 32]>), RpcError> {
if hashes.len() == 0 {
return Ok((vec![], vec![]));
@@ -112,39 +118,48 @@ impl Rpc {
struct TransactionResponse {
tx_hash: String,
as_hex: String,
pruned_as_hex: String
pruned_as_hex: String,
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>
txs: Vec<TransactionResponse>,
}
let txs: TransactionsResponse = self.rpc_call("get_transactions", Some(json!({
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
}))).await?;
let txs: TransactionsResponse = self
.rpc_call(
"get_transactions",
Some(json!({
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
})),
)
.await?;
Ok((
txs.txs.iter().map(|res| {
let tx = Transaction::deserialize(
&mut std::io::Cursor::new(
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap()
)
).map_err(|_| RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap()))?;
txs
.txs
.iter()
.map(|res| {
let tx = Transaction::deserialize(&mut std::io::Cursor::new(
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap(),
))
.map_err(|_| {
RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap())
})?;
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.len() == 0 {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.len() == 0 {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?,
}
}
}
Ok(tx)
}).collect(),
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect()
Ok(tx)
})
.collect(),
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect(),
))
}
@@ -158,7 +173,10 @@ impl Rpc {
txs.iter().cloned().collect::<Result<_, _>>()
}
pub async fn get_transactions_possible(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
pub async fn get_transactions_possible(
&self,
hashes: &[[u8; 32]],
) -> Result<Vec<Transaction>, RpcError> {
let (txs, _) = self.get_transactions_core(hashes).await?;
Ok(txs.iter().cloned().filter_map(|tx| tx.ok()).collect())
}
@@ -166,37 +184,39 @@ impl Rpc {
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String
blob: String,
}
let block: JsonRpcResponse<BlockResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_block",
"params": {
"height": height
}
}))).await?;
let block: JsonRpcResponse<BlockResponse> = self
.rpc_call(
"json_rpc",
Some(json!({
"method": "get_block",
"params": {
"height": height
}
})),
)
.await?;
Ok(
Block::deserialize(
&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?)
).expect("Monero returned a block we couldn't deserialize")
Block::deserialize(&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?))
.expect("Monero returned a block we couldn't deserialize"),
)
}
async fn get_block_transactions_core(
&self,
height: usize,
possible: bool
possible: bool,
) -> Result<Vec<Transaction>, RpcError> {
let block = self.get_block(height).await?;
let mut res = vec![block.miner_tx];
res.extend(
if possible {
self.get_transactions_possible(&block.txs).await?
} else {
self.get_transactions(&block.txs).await?
}
);
res.extend(if possible {
self.get_transactions_possible(&block.txs).await?
} else {
self.get_transactions(&block.txs).await?
});
Ok(res)
}
@@ -204,14 +224,17 @@ impl Rpc {
self.get_block_transactions_core(height, false).await
}
pub async fn get_block_transactions_possible(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
pub async fn get_block_transactions_possible(
&self,
height: usize,
) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions_core(height, true).await
}
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32]
txid: [u8; 32],
}
#[allow(dead_code)]
@@ -221,42 +244,52 @@ impl Rpc {
status: String,
untrusted: bool,
credits: usize,
top_hash: String
top_hash: String,
}
let indexes: OIndexes = self.bin_call("get_o_indexes.bin", monero_epee_bin_serde::to_bytes(
&Request {
txid: hash
}).unwrap()
).await?;
let indexes: OIndexes = self
.bin_call(
"get_o_indexes.bin",
monero_epee_bin_serde::to_bytes(&Request { txid: hash }).unwrap(),
)
.await?;
Ok(indexes.o_indexes)
}
// from and to are inclusive
pub async fn get_output_distribution(&self, from: usize, to: usize) -> Result<Vec<u64>, RpcError> {
pub async fn get_output_distribution(
&self,
from: usize,
to: usize,
) -> Result<Vec<u64>, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
pub struct Distribution {
distribution: Vec<u64>
distribution: Vec<u64>,
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distributions {
distributions: Vec<Distribution>
distributions: Vec<Distribution>,
}
let mut distributions: JsonRpcResponse<Distributions> = self.rpc_call("json_rpc", Some(json!({
"method": "get_output_distribution",
"params": {
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to
}
}))).await?;
let mut distributions: JsonRpcResponse<Distributions> = self
.rpc_call(
"json_rpc",
Some(json!({
"method": "get_output_distribution",
"params": {
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to
}
})),
)
.await?;
Ok(distributions.result.distributions.swap_remove(0).distribution)
}
@@ -264,46 +297,62 @@ impl Rpc {
pub async fn get_outputs(
&self,
indexes: &[u64],
height: usize
height: usize,
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
#[derive(Deserialize, Debug)]
pub struct Out {
key: String,
mask: String,
txid: String
txid: String,
}
#[derive(Deserialize, Debug)]
struct Outs {
outs: Vec<Out>
outs: Vec<Out>,
}
let outs: Outs = self.rpc_call("get_outs", Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
}))).await?;
let outs: Outs = self
.rpc_call(
"get_outs",
Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
})),
)
.await?;
let txs = self.get_transactions(
&outs.outs.iter().map(|out|
rpc_hex(&out.txid).expect("Monero returned an invalidly encoded hash")
.try_into().expect("Monero returned an invalid sized hash")
).collect::<Vec<_>>()
).await?;
let txs = self
.get_transactions(
&outs
.outs
.iter()
.map(|out| {
rpc_hex(&out.txid)
.expect("Monero returned an invalidly encoded hash")
.try_into()
.expect("Monero returned an invalid sized hash")
})
.collect::<Vec<_>>(),
)
.await?;
// TODO: Support time based lock times. These shouldn't be needed, and it may be painful to
// get the median time for the given height, yet we do need to in order to be complete
outs.outs.iter().enumerate().map(
|(i, out)| Ok(
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
outs
.outs
.iter()
.enumerate()
.map(|(i, out)| {
Ok(Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
match txs[i].prefix.timelock {
Timelock::Block(t_height) => (t_height <= height),
_ => false
_ => false,
}
})
)
).collect()
}))
})
.collect()
}
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
@@ -311,12 +360,17 @@ impl Rpc {
#[derive(Deserialize, Debug)]
struct FeeResponse {
fee: u64,
quantization_mask: u64
quantization_mask: u64,
}
let res: JsonRpcResponse<FeeResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_fee_estimate"
}))).await?;
let res: JsonRpcResponse<FeeResponse> = self
.rpc_call(
"json_rpc",
Some(json!({
"method": "get_fee_estimate"
})),
)
.await?;
Ok(Fee { per_weight: res.result.fee, mask: res.result.quantization_mask })
}
@@ -335,14 +389,14 @@ impl Rpc {
overspend: bool,
too_big: bool,
too_few_outputs: bool,
reason: String
reason: String,
}
let mut buf = Vec::with_capacity(2048);
tx.serialize(&mut buf).unwrap();
let res: SendRawResponse = self.rpc_call("send_raw_transaction", Some(json!({
"tx_as_hex": hex::encode(&buf)
}))).await?;
let res: SendRawResponse = self
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(&buf) })))
.await?;
if res.status != "OK" {
Err(RpcError::InvalidTransaction(tx.hash()))?;

View File

@@ -1,6 +1,9 @@
use std::io;
use curve25519_dalek::{scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
pub const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
@@ -30,22 +33,22 @@ pub fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<
w.write_all(&point.compress().to_bytes())
}
pub fn write_raw_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
pub fn write_raw_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
pub fn write_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
pub fn write_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
write_varint(&values.len().try_into().unwrap(), w)?;
write_raw_vec(f, &values, w)
}
@@ -75,23 +78,26 @@ pub fn read_32<R: io::Read>(r: &mut R) -> io::Result<[u8; 32]> {
Ok(res)
}
// TODO: Potentially update to Monero's parsing rules on scalars/points, which should be any arbitrary 32-bytes
// We may be able to consider such transactions as malformed and accordingly be opinionated in ignoring them
// TODO: https://github.com/serai-dex/serai/issues/25
pub fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(
read_32(r)?
).ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
Scalar::from_canonical_bytes(read_32(r)?)
.ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
}
pub fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
CompressedEdwardsY(
read_32(r)?
).decompress().filter(|point| point.is_torsion_free()).ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
CompressedEdwardsY(read_32(r)?)
.decompress()
.filter(|point| point.is_torsion_free())
.ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
}
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: usize, r: &mut R) -> io::Result<Vec<T>> {
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
len: usize,
r: &mut R,
) -> io::Result<Vec<T>> {
let mut res = Vec::with_capacity(
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?,
);
for _ in 0 .. len {
res.push(f(r)?);
@@ -99,6 +105,9 @@ pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: u
Ok(res)
}
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
r: &mut R,
) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
}

View File

@@ -5,14 +5,19 @@ use crate::wallet::address::{Network, AddressType, Address};
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
const STANDARD: &'static str = "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const STANDARD: &'static str =
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
const INTEGRATED: &'static str = "4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6MnpXSn88oBX35";
const INTEGRATED: &'static str =
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
pXSn88oBX35";
const SUB_SPEND: [u8; 32] = hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_SPEND: [u8; 32] =
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
const SUBADDRESS: &'static str = "8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
const SUBADDRESS: &'static str =
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
#[test]
fn standard_address() {

View File

@@ -11,13 +11,18 @@ use transcript::{Transcript, RecommendedTranscript};
use frost::curve::Ed25519;
use crate::{
Commitment,
random_scalar,
Commitment, random_scalar,
wallet::Decoys,
ringct::{generate_key_image, clsag::{ClsagInput, Clsag}}
ringct::{
generate_key_image,
clsag::{ClsagInput, Clsag},
},
};
#[cfg(feature = "multisig")]
use crate::{frost::MultisigError, ringct::clsag::{ClsagDetails, ClsagMultisig}};
use crate::{
frost::MultisigError,
ringct::clsag::{ClsagDetails, ClsagMultisig},
};
#[cfg(feature = "multisig")]
use frost::tests::{key_gen, algorithm_machines, sign};
@@ -59,13 +64,15 @@ fn clsag() {
Decoys {
i: u8::try_from(real).unwrap(),
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap()
ring: ring.clone(),
},
)
.unwrap(),
)],
random_scalar(&mut OsRng),
msg
).swap_remove(0);
msg,
)
.swap_remove(0);
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
#[cfg(feature = "experimental")]
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
@@ -103,23 +110,23 @@ fn clsag_multisig() -> Result<(), MultisigError> {
ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&1].group_key().0,
Arc::new(RwLock::new(Some(
ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap(),
mask_sum
Arc::new(RwLock::new(Some(ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone(),
},
)
)))
).unwrap(),
&keys
.unwrap(),
mask_sum,
)))),
)
.unwrap(),
&keys,
),
&[1; 32]
&[1; 32],
);
Ok(())

View File

@@ -2,7 +2,10 @@ use rand::rngs::OsRng;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use crate::{random_scalar, ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point}};
use crate::{
random_scalar,
ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point},
};
#[test]
fn hash_to_point() {

View File

@@ -2,7 +2,11 @@ use core::cmp::Ordering;
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{hash, serialize::*, ringct::{RctPrunable, RctSignatures}};
use crate::{
hash,
serialize::*,
ringct::{RctPrunable, RctSignatures},
};
pub const RING_LEN: usize = 11;
@@ -10,11 +14,7 @@ pub const RING_LEN: usize = 11;
pub enum Input {
Gen(u64),
ToKey {
amount: u64,
key_offsets: Vec<u64>,
key_image: EdwardsPoint
}
ToKey { amount: u64, key_offsets: Vec<u64>, key_image: EdwardsPoint },
}
impl Input {
@@ -30,7 +30,7 @@ impl Input {
Input::Gen(height) => {
w.write_all(&[255])?;
write_varint(height, w)
},
}
Input::ToKey { amount, key_offsets, key_image } => {
w.write_all(&[2])?;
@@ -44,17 +44,18 @@ impl Input {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Input> {
let mut variant = [0];
r.read_exact(&mut variant)?;
Ok(
match variant[0] {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_point(r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
}
)
Ok(match variant[0] {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_point(r)?,
},
_ => Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Tried to deserialize unknown/unused input type",
))?,
})
}
}
@@ -63,7 +64,7 @@ impl Input {
pub struct Output {
pub amount: u64,
pub key: EdwardsPoint,
pub tag: Option<u8>
pub tag: Option<u8>,
}
impl Output {
@@ -86,16 +87,22 @@ impl Output {
let mut tag = [0];
r.read_exact(&mut tag)?;
if (tag[0] != 2) && (tag[0] != 3) {
Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused output type"))?;
Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Tried to deserialize unknown/unused output type",
))?;
}
Ok(
Output {
amount,
key: read_point(r)?,
tag: if tag[0] == 3 { r.read_exact(&mut tag)?; Some(tag[0]) } else { None }
}
)
Ok(Output {
amount,
key: read_point(r)?,
tag: if tag[0] == 3 {
r.read_exact(&mut tag)?;
Some(tag[0])
} else {
None
},
})
}
}
@@ -103,7 +110,7 @@ impl Output {
pub enum Timelock {
None,
Block(usize),
Time(u64)
Time(u64),
}
impl Timelock {
@@ -126,9 +133,9 @@ impl Timelock {
&match self {
Timelock::None => 0,
Timelock::Block(block) => (*block).try_into().unwrap(),
Timelock::Time(time) => *time
Timelock::Time(time) => *time,
},
w
w,
)
}
}
@@ -139,7 +146,7 @@ impl PartialOrd for Timelock {
(Timelock::None, _) => Some(Ordering::Less),
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
_ => None
_ => None,
}
}
}
@@ -150,17 +157,19 @@ pub struct TransactionPrefix {
pub timelock: Timelock,
pub inputs: Vec<Input>,
pub outputs: Vec<Output>,
pub extra: Vec<u8>
pub extra: Vec<u8>,
}
impl TransactionPrefix {
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
// Assumes Timelock::None since this library won't let you create a TX with a timelock
1 + 1 +
varint_len(inputs) + (inputs * Input::fee_weight()) +
// Only 16 outputs are possible under transactions by this lib
1 + (outputs * Output::fee_weight()) +
varint_len(extra) + extra
varint_len(inputs) +
(inputs * Input::fee_weight()) +
1 +
(outputs * Output::fee_weight()) +
varint_len(extra) +
extra
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
@@ -178,7 +187,7 @@ impl TransactionPrefix {
timelock: Timelock::from_raw(read_varint(r)?),
inputs: read_vec(Input::deserialize, r)?,
outputs: read_vec(Output::deserialize, r)?,
extra: vec![]
extra: vec![],
};
let len = read_varint(r)?;
@@ -192,12 +201,13 @@ impl TransactionPrefix {
#[derive(Clone, PartialEq, Debug)]
pub struct Transaction {
pub prefix: TransactionPrefix,
pub rct_signatures: RctSignatures
pub rct_signatures: RctSignatures,
}
impl Transaction {
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
TransactionPrefix::fee_weight(inputs, outputs, extra) + RctSignatures::fee_weight(inputs, outputs)
TransactionPrefix::fee_weight(inputs, outputs, extra) +
RctSignatures::fee_weight(inputs, outputs)
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
@@ -207,19 +217,21 @@ impl Transaction {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Transaction> {
let prefix = TransactionPrefix::deserialize(r)?;
Ok(
Transaction {
rct_signatures: RctSignatures::deserialize(
prefix.inputs.iter().map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len()
}).collect(),
prefix.outputs.len(),
r
)?,
Ok(Transaction {
rct_signatures: RctSignatures::deserialize(
prefix
}
)
.inputs
.iter()
.map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len(),
})
.collect(),
prefix.outputs.len(),
r,
)?,
prefix,
})
}
pub fn hash(&self) -> [u8; 32] {
@@ -234,10 +246,11 @@ impl Transaction {
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.serialize(
&mut serialized,
self.rct_signatures.prunable.rct_type()
).unwrap();
self
.rct_signatures
.base
.serialize(&mut serialized, self.rct_signatures.prunable.rct_type())
.unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
@@ -262,7 +275,11 @@ impl Transaction {
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.serialize(&mut serialized, self.rct_signatures.prunable.rct_type()).unwrap();
self
.rct_signatures
.base
.serialize(&mut serialized, self.rct_signatures.prunable.rct_type())
.unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();

View File

@@ -2,7 +2,10 @@ use std::string::ToString;
use thiserror::Error;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::{EdwardsPoint, CompressedEdwardsY}};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use base58_monero::base58::{encode_check, decode_check};
@@ -12,14 +15,14 @@ use crate::wallet::ViewPair;
pub enum Network {
Mainnet,
Testnet,
Stagenet
Stagenet,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum AddressType {
Standard,
Integrated([u8; 8]),
Subaddress
Subaddress,
}
impl AddressType {
@@ -27,7 +30,7 @@ impl AddressType {
match network {
Network::Mainnet => (18, 19, 42),
Network::Testnet => (53, 54, 63),
Network::Stagenet => (24, 25, 36)
Network::Stagenet => (24, 25, 36),
}
}
}
@@ -36,7 +39,7 @@ impl AddressType {
pub struct AddressMeta {
pub network: Network,
pub kind: AddressType,
pub guaranteed: bool
pub guaranteed: bool,
}
#[derive(Clone, Error, Debug)]
@@ -50,7 +53,7 @@ pub enum AddressError {
#[error("different network than expected")]
DifferentNetwork,
#[error("invalid key")]
InvalidKey
InvalidKey,
}
impl AddressMeta {
@@ -59,7 +62,7 @@ impl AddressMeta {
let byte = match self.kind {
AddressType::Standard => bytes.0,
AddressType::Integrated(_) => bytes.1,
AddressType::Subaddress => bytes.2
AddressType::Subaddress => bytes.2,
};
byte | (if self.guaranteed { 1 << 7 } else { 0 })
}
@@ -76,7 +79,7 @@ impl AddressMeta {
_ if actual == standard => Some(AddressType::Standard),
_ if actual == integrated => Some(AddressType::Integrated([0; 8])),
_ if actual == subaddress => Some(AddressType::Subaddress),
_ => None
_ => None,
} {
meta = Some(AddressMeta { network, kind, guaranteed });
break;
@@ -91,19 +94,15 @@ impl AddressMeta {
pub struct Address {
pub meta: AddressMeta,
pub spend: EdwardsPoint,
pub view: EdwardsPoint
pub view: EdwardsPoint,
}
impl ViewPair {
pub fn address(&self, network: Network, kind: AddressType, guaranteed: bool) -> Address {
Address {
meta: AddressMeta {
network,
kind,
guaranteed
},
meta: AddressMeta { network, kind, guaranteed },
spend: self.spend,
view: &self.view * &ED25519_BASEPOINT_TABLE
view: &self.view * &ED25519_BASEPOINT_TABLE,
}
}
}
@@ -134,14 +133,18 @@ impl Address {
let len = match meta.kind {
AddressType::Standard | AddressType::Subaddress => 65,
AddressType::Integrated(_) => 73
AddressType::Integrated(_) => 73,
};
if raw.len() != len {
Err(AddressError::InvalidLength)?;
}
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
if let AddressType::Integrated(ref mut payment_id) = meta.kind {
payment_id.copy_from_slice(&raw[65 .. 73]);

View File

@@ -7,7 +7,11 @@ use rand_distr::{Distribution, Gamma};
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{transaction::RING_LEN, wallet::SpendableOutput, rpc::{RpcError, Rpc}};
use crate::{
transaction::RING_LEN,
wallet::SpendableOutput,
rpc::{RpcError, Rpc},
};
const LOCK_WINDOW: usize = 10;
const MATURITY: u64 = 60;
@@ -30,7 +34,7 @@ async fn select_n<R: RngCore + CryptoRng>(
high: u64,
per_second: f64,
used: &mut HashSet<u64>,
count: usize
count: usize,
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
let mut iters = 0;
let mut confirmed = Vec::with_capacity(count);
@@ -94,7 +98,7 @@ fn offset(ring: &[u64]) -> Vec<u64> {
pub struct Decoys {
pub i: u8,
pub offsets: Vec<u64>,
pub ring: Vec<[EdwardsPoint; 2]>
pub ring: Vec<[EdwardsPoint; 2]>,
}
impl Decoys {
@@ -106,14 +110,14 @@ impl Decoys {
rng: &mut R,
rpc: &Rpc,
height: usize,
inputs: &[SpendableOutput]
inputs: &[SpendableOutput],
) -> Result<Vec<Decoys>, RpcError> {
// Convert the inputs in question to the raw output data
let mut outputs = Vec::with_capacity(inputs.len());
for input in inputs {
outputs.push((
rpc.get_o_indexes(input.tx).await?[usize::from(input.o)],
[input.key, input.commitment.calculate()]
[input.key, input.commitment.calculate()],
));
}
@@ -153,17 +157,10 @@ impl Decoys {
}
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't bother
// with an overage
let mut decoys = select_n(
rng,
rpc,
height,
high,
per_second,
&mut used,
inputs.len() * DECOYS
).await?;
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
let mut decoys =
select_n(rng, rpc, height, high, per_second, &mut used, inputs.len() * DECOYS).await?;
let mut res = Vec::with_capacity(inputs.len());
for o in outputs {
@@ -178,8 +175,8 @@ impl Decoys {
// 500 outputs since while itself not being a sufficiently mature blockchain
// Considering Monero's p2p layer doesn't actually check transaction sanity, it should be
// fine for us to not have perfectly matching rules, especially since this code will infinite
// loop if it can't determine sanity, which is possible with sufficient inputs on sufficiently
// small chains
// loop if it can't determine sanity, which is possible with sufficient inputs on
// sufficiently small chains
if high > 500 {
// Make sure the TX passes the sanity check that the median output is within the last 40%
let target_median = high * 3 / 5;
@@ -190,28 +187,30 @@ impl Decoys {
if removed.0 == o.0 {
ring.push(o);
} else {
// We could not remove this, saving CPU time and removing low values as possibilities, yet
// it'd increase the amount of decoys required to create this transaction and some removed
// outputs may be the best option (as we drop the first half, not just the bottom n)
// We could not remove this, saving CPU time and removing low values as
// possibilities, yet it'd increase the amount of decoys required to create this
// transaction and some removed outputs may be the best option (as we drop the first
// half, not just the bottom n)
used.remove(&removed.0);
}
}
// Select new outputs until we have a full sized ring again
ring.extend(
select_n(rng, rpc, height, high, per_second, &mut used, RING_LEN - ring.len()).await?
select_n(rng, rpc, height, high, per_second, &mut used, RING_LEN - ring.len()).await?,
);
ring.sort_by(|a, b| a.0.cmp(&b.0));
}
// The other sanity check rule is about duplicates, yet we already enforce unique ring members
// The other sanity check rule is about duplicates, yet we already enforce unique ring
// members
}
res.push(Decoys {
// Binary searches for the real spend since we don't know where it sorted to
i: u8::try_from(ring.partition_point(|x| x.0 < o.0)).unwrap(),
offsets: offset(&ring.iter().map(|output| output.0).collect::<Vec<_>>()),
ring: ring.iter().map(|output| output.1).collect()
ring: ring.iter().map(|output| output.1).collect(),
});
}

View File

@@ -1,10 +1,6 @@
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{
hash, hash_to_scalar,
serialize::write_varint,
transaction::Input
};
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
pub mod address;
@@ -30,8 +26,10 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => { write_varint(&(*height).try_into().unwrap(), &mut u).unwrap(); },
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes())
Input::Gen(height) => {
write_varint(&(*height).try_into().unwrap(), &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
}
hash(&u)
@@ -39,7 +37,12 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
#[allow(non_snake_case)]
pub(crate) fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
pub(crate) fn shared_key(
uniqueness: Option<[u8; 32]>,
s: Scalar,
P: &EdwardsPoint,
o: usize,
) -> Scalar {
// uniqueness
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
// || 8Ra
@@ -69,5 +72,5 @@ pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
#[derive(Clone, Copy)]
pub struct ViewPair {
pub spend: EdwardsPoint,
pub view: Scalar
pub view: Scalar,
}

View File

@@ -1,10 +1,6 @@
use std::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::deserialize, blockdata::transaction::ExtraField};
@@ -12,7 +8,7 @@ use crate::{
Commitment,
serialize::{write_varint, read_32, read_scalar, read_point},
transaction::{Timelock, Transaction},
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask}
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask},
};
#[derive(Clone, PartialEq, Debug)]
@@ -21,7 +17,7 @@ pub struct SpendableOutput {
pub o: u8,
pub key: EdwardsPoint,
pub key_offset: Scalar,
pub commitment: Commitment
pub commitment: Commitment,
}
pub struct Timelocked(Timelock, Vec<SpendableOutput>);
@@ -61,27 +57,26 @@ impl SpendableOutput {
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<SpendableOutput> {
Ok(
SpendableOutput {
tx: read_32(r)?,
o: { let mut o = [0; 1]; r.read_exact(&mut o)?; o[0] },
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(
read_scalar(r)?,
{ let mut amount = [0; 8]; r.read_exact(&mut amount)?; u64::from_le_bytes(amount) }
)
}
)
Ok(SpendableOutput {
tx: read_32(r)?,
o: {
let mut o = [0; 1];
r.read_exact(&mut o)?;
o[0]
},
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(read_scalar(r)?, {
let mut amount = [0; 8];
r.read_exact(&mut amount)?;
u64::from_le_bytes(amount)
}),
})
}
}
impl Transaction {
pub fn scan(
&self,
view: ViewPair,
guaranteed: bool
) -> Timelocked {
pub fn scan(&self, view: ViewPair, guaranteed: bool) -> Timelocked {
let mut extra = vec![];
write_varint(&u64::try_from(self.prefix.extra.len()).unwrap(), &mut extra).unwrap();
extra.extend(&self.prefix.extra);
@@ -110,7 +105,7 @@ impl Transaction {
Some(uniqueness(&self.prefix.inputs)).filter(|_| guaranteed),
view.view,
pubkey,
o
o,
);
// P - shared == spend
if (output.key - (&key_offset * &ED25519_BASEPOINT_TABLE)) != view.spend {
@@ -129,7 +124,7 @@ impl Transaction {
Some(amount) => amount_decryption(*amount, key_offset),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => break
None => break,
};
// Rebuild the commitment to verify it
@@ -147,7 +142,7 @@ impl Transaction {
o: o.try_into().unwrap(),
key: output.key,
key_offset,
commitment
commitment,
});
}
// Break to prevent public keys from being included multiple times, triggering multiple

View File

@@ -3,11 +3,7 @@ use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::Encodable, PublicKey, blockdata::transaction::SubField};
@@ -15,20 +11,20 @@ use monero::{consensus::Encodable, PublicKey, blockdata::transaction::SubField};
use frost::FrostError;
use crate::{
Commitment,
random_scalar,
Commitment, random_scalar,
ringct::{
generate_key_image,
clsag::{ClsagError, ClsagInput, Clsag},
bulletproofs::{MAX_OUTPUTS, Bulletproofs},
RctBase, RctPrunable, RctSignatures
RctBase, RctPrunable, RctSignatures,
},
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
rpc::{Rpc, RpcError},
wallet::{
address::{AddressType, Address}, SpendableOutput, Decoys,
key_image_sort, uniqueness, shared_key, commitment_mask, amount_encryption
}
address::{AddressType, Address},
SpendableOutput, Decoys, key_image_sort, uniqueness, shared_key, commitment_mask,
amount_encryption,
},
};
#[cfg(feature = "multisig")]
use crate::frost::MultisigError;
@@ -44,7 +40,7 @@ struct SendOutput {
R: EdwardsPoint,
dest: EdwardsPoint,
commitment: Commitment,
amount: [u8; 8]
amount: [u8; 8],
}
impl SendOutput {
@@ -52,26 +48,24 @@ impl SendOutput {
rng: &mut R,
unique: [u8; 32],
output: (Address, u64),
o: usize
o: usize,
) -> SendOutput {
let r = random_scalar(rng);
let shared_key = shared_key(
Some(unique).filter(|_| output.0.meta.guaranteed),
r,
&output.0.view,
o
);
let shared_key =
shared_key(Some(unique).filter(|_| output.0.meta.guaranteed), r, &output.0.view, o);
let spend = output.0.spend;
SendOutput {
R: match output.0.meta.kind {
AddressType::Standard => &r * &ED25519_BASEPOINT_TABLE,
AddressType::Integrated(_) => unimplemented!("SendOutput::new doesn't support Integrated addresses"),
AddressType::Subaddress => &r * spend
AddressType::Integrated(_) => {
unimplemented!("SendOutput::new doesn't support Integrated addresses")
}
AddressType::Subaddress => &r * spend,
},
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + spend),
commitment: Commitment::new(commitment_mask(shared_key), output.1),
amount: amount_encryption(output.1, shared_key)
amount: amount_encryption(output.1, shared_key),
}
}
}
@@ -103,7 +97,7 @@ pub enum TransactionError {
FrostError(FrostError),
#[cfg(feature = "multisig")]
#[error("multisig error {0}")]
MultisigError(MultisigError)
MultisigError(MultisigError),
}
async fn prepare_inputs<R: RngCore + CryptoRng>(
@@ -111,7 +105,7 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
rpc: &Rpc,
inputs: &[SpendableOutput],
spend: &Scalar,
tx: &mut Transaction
tx: &mut Transaction,
) -> Result<Vec<(Scalar, EdwardsPoint, ClsagInput)>, TransactionError> {
let mut signable = Vec::with_capacity(inputs.len());
@@ -120,34 +114,33 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
rng,
rpc,
rpc.get_height().await.map_err(|e| TransactionError::RpcError(e))? - 10,
inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
inputs,
)
.await
.map_err(|e| TransactionError::RpcError(e))?;
for (i, input) in inputs.iter().enumerate() {
signable.push((
spend + input.key_offset,
generate_key_image(spend + input.key_offset),
ClsagInput::new(
input.commitment,
decoys[i].clone()
).map_err(|e| TransactionError::ClsagError(e))?
ClsagInput::new(input.commitment, decoys[i].clone())
.map_err(|e| TransactionError::ClsagError(e))?,
));
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: decoys[i].offsets.clone(),
key_image: signable[i].1
key_image: signable[i].1,
});
}
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
tx.prefix.inputs.sort_by(|x, y| if let (
Input::ToKey { key_image: x, ..},
Input::ToKey { key_image: y, ..}
) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
tx.prefix.inputs.sort_by(|x, y| {
if let (Input::ToKey { key_image: x, .. }, Input::ToKey { key_image: y, .. }) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
}
});
Ok(signable)
@@ -156,7 +149,7 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Fee {
pub per_weight: u64,
pub mask: u64
pub mask: u64,
}
impl Fee {
@@ -170,7 +163,7 @@ pub struct SignableTransaction {
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
outputs: Vec<SendOutput>,
fee: u64
fee: u64,
}
impl SignableTransaction {
@@ -178,15 +171,13 @@ impl SignableTransaction {
inputs: Vec<SpendableOutput>,
mut payments: Vec<(Address, u64)>,
change_address: Option<Address>,
fee_rate: Fee
fee_rate: Fee,
) -> Result<SignableTransaction, TransactionError> {
// Make sure all addresses are valid
let test = |addr: Address| {
match addr.meta.kind {
AddressType::Standard => Ok(()),
AddressType::Integrated(..) => Err(TransactionError::InvalidAddress),
AddressType::Subaddress => Ok(())
}
let test = |addr: Address| match addr.meta.kind {
AddressType::Standard => Ok(()),
AddressType::Integrated(..) => Err(TransactionError::InvalidAddress),
AddressType::Subaddress => Ok(()),
};
for payment in &payments {
@@ -229,7 +220,8 @@ impl SignableTransaction {
// If we have yet to add a change output, do so if it's economically viable
if (!change) && change_address.is_some() && (in_amount != out_amount) {
// Check even with the new fee, there's remaining funds
let change_fee = fee_rate.calculate(Transaction::fee_weight(inputs.len(), outputs + 1, extra)) - fee;
let change_fee =
fee_rate.calculate(Transaction::fee_weight(inputs.len(), outputs + 1, extra)) - fee;
if (out_amount + change_fee) < in_amount {
change = true;
outputs += 1;
@@ -246,20 +238,13 @@ impl SignableTransaction {
payments.push((change_address.unwrap(), in_amount - out_amount));
}
Ok(
SignableTransaction {
inputs,
payments,
outputs: vec![],
fee
}
)
Ok(SignableTransaction { inputs, payments, outputs: vec![], fee })
}
fn prepare_outputs<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
uniqueness: [u8; 32]
uniqueness: [u8; 32],
) -> (Vec<Commitment>, Scalar) {
// Shuffle the payments
self.payments.shuffle(rng);
@@ -275,29 +260,23 @@ impl SignableTransaction {
(commitments, sum)
}
fn prepare_transaction(
&self,
commitments: &[Commitment],
bp: Bulletproofs
) -> Transaction {
fn prepare_transaction(&self, commitments: &[Commitment], bp: Bulletproofs) -> Transaction {
// Create the TX extra
// TODO: Review this for canonicity with Monero
let mut extra = vec![];
SubField::TxPublicKey(
PublicKey { point: self.outputs[0].R.compress() }
).consensus_encode(&mut extra).unwrap();
SubField::TxPublicKey(PublicKey { point: self.outputs[0].R.compress() })
.consensus_encode(&mut extra)
.unwrap();
SubField::AdditionalPublickKey(
self.outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect()
).consensus_encode(&mut extra).unwrap();
self.outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect(),
)
.consensus_encode(&mut extra)
.unwrap();
let mut tx_outputs = Vec::with_capacity(self.outputs.len());
let mut ecdh_info = Vec::with_capacity(self.outputs.len());
for o in 0 .. self.outputs.len() {
tx_outputs.push(Output {
amount: 0,
key: self.outputs[o].dest,
tag: None
});
tx_outputs.push(Output { amount: 0, key: self.outputs[o].dest, tag: None });
ecdh_info.push(self.outputs[o].amount);
}
@@ -307,20 +286,20 @@ impl SignableTransaction {
timelock: Timelock::None,
inputs: vec![],
outputs: tx_outputs,
extra
extra,
},
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect()
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![]
}
}
pseudo_outs: vec![],
},
},
}
}
@@ -328,7 +307,7 @@ impl SignableTransaction {
&mut self,
rng: &mut R,
rpc: &Rpc,
spend: &Scalar
spend: &Scalar,
) -> Result<Transaction, TransactionError> {
let mut images = Vec::with_capacity(self.inputs.len());
for input in &self.inputs {
@@ -344,12 +323,11 @@ impl SignableTransaction {
let (commitments, mask_sum) = self.prepare_outputs(
rng,
uniqueness(
&images.iter().map(|image| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: *image
}).collect::<Vec<_>>()
)
&images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
);
let mut tx = self.prepare_transaction(&commitments, Bulletproofs::new(rng, &commitments)?);
@@ -361,7 +339,8 @@ impl SignableTransaction {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
clsags.append(&mut clsag_pairs.iter().map(|clsag| clsag.0.clone()).collect::<Vec<_>>());
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1.clone()).collect::<Vec<_>>());
pseudo_outs
.append(&mut clsag_pairs.iter().map(|clsag| clsag.1.clone()).collect::<Vec<_>>());
}
}
Ok(tx)

View File

@@ -1,25 +1,38 @@
use std::{io::{Read, Cursor}, sync::{Arc, RwLock}, collections::HashMap};
use std::{
io::{Read, Cursor},
sync::{Arc, RwLock},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
use curve25519_dalek::{
traits::Identity,
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use transcript::{Transcript, RecommendedTranscript};
use frost::{
curve::Ed25519,
FrostError, FrostKeys,
sign::{
PreprocessMachine, SignMachine, SignatureMachine,
AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine
}
PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, AlgorithmSignMachine,
AlgorithmSignatureMachine,
},
};
use crate::{
random_scalar, ringct::{clsag::{ClsagInput, ClsagDetails, ClsagMultisig}, bulletproofs::Bulletproofs, RctPrunable},
random_scalar,
ringct::{
clsag::{ClsagInput, ClsagDetails, ClsagMultisig},
bulletproofs::Bulletproofs,
RctPrunable,
},
transaction::{Input, Transaction},
rpc::Rpc,
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness}
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness},
};
pub struct TransactionMachine {
@@ -31,7 +44,7 @@ pub struct TransactionMachine {
decoys: Vec<Decoys>,
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
}
pub struct TransactionSignMachine {
@@ -45,12 +58,12 @@ pub struct TransactionSignMachine {
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>,
our_preprocess: Vec<u8>
our_preprocess: Vec<u8>,
}
pub struct TransactionSignatureMachine {
tx: Transaction,
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
}
impl SignableTransaction {
@@ -60,7 +73,7 @@ impl SignableTransaction {
keys: FrostKeys<Ed25519>,
mut transcript: RecommendedTranscript,
height: usize,
mut included: Vec<u16>
mut included: Vec<u16>,
) -> Result<TransactionMachine, TransactionError> {
let mut inputs = vec![];
for _ in 0 .. self.inputs.len() {
@@ -80,9 +93,10 @@ impl SignableTransaction {
// The data itself will be included, making this unnecessary, yet a lot of this is technically
// unnecessary. Anything which further increases security at almost no cost should be followed
transcript.append_message(b"height", &u64::try_from(height).unwrap().to_le_bytes());
// Also include the spend_key as below only the key offset is included, so this confirms the sum product
// Useful as confirming the sum product confirms the key image, further guaranteeing the one time
// properties noted below
// Also include the spend_key as below only the key offset is included, so this transcripts the
// sum product
// Useful as transcripting the sum product effectively transcripts the key image, further
// guaranteeing the one time properties noted below
transcript.append_message(b"spend_key", &keys.group_key().0.compress().to_bytes());
for input in &self.inputs {
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
@@ -110,14 +124,12 @@ impl SignableTransaction {
clsags.push(
AlgorithmMachine::new(
ClsagMultisig::new(
transcript.clone(),
input.key,
inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?,
ClsagMultisig::new(transcript.clone(), input.key, inputs[i].clone())
.map_err(|e| TransactionError::MultisigError(e))?,
Arc::new(offset),
&included
).map_err(|e| TransactionError::FrostError(e))?
&included,
)
.map_err(|e| TransactionError::FrostError(e))?,
);
}
@@ -132,22 +144,22 @@ impl SignableTransaction {
&mut ChaCha12Rng::from_seed(transcript.rng_seed(b"decoys")),
rpc,
height,
&self.inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
Ok(
TransactionMachine {
signable: self,
i: keys.params().i(),
included,
transcript,
decoys,
inputs,
clsags
}
&self.inputs,
)
.await
.map_err(|e| TransactionError::RpcError(e))?;
Ok(TransactionMachine {
signable: self,
i: keys.params().i(),
included,
transcript,
decoys,
inputs,
clsags,
})
}
}
@@ -157,18 +169,22 @@ impl PreprocessMachine for TransactionMachine {
fn preprocess<R: RngCore + CryptoRng>(
mut self,
rng: &mut R
rng: &mut R,
) -> (TransactionSignMachine, Vec<u8>) {
// Iterate over each CLSAG calling preprocess
let mut serialized = Vec::with_capacity(
// D_{G, H}, E_{G, H}, DLEqs, key image addendum
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len())
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len()),
);
let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess);
clsag
}).collect();
let clsags = self
.clsags
.drain(..)
.map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess);
clsag
})
.collect();
let our_preprocess = serialized.clone();
// We could add further entropy here, and previous versions of this library did so
@@ -194,7 +210,7 @@ impl PreprocessMachine for TransactionMachine {
our_preprocess,
},
serialized
serialized,
)
}
}
@@ -205,14 +221,12 @@ impl SignMachine<Transaction> for TransactionSignMachine {
fn sign<Re: Read>(
mut self,
mut commitments: HashMap<u16, Re>,
msg: &[u8]
msg: &[u8],
) -> Result<(TransactionSignatureMachine, Vec<u8>), FrostError> {
if msg.len() != 0 {
Err(
FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own"
)
)?;
Err(FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own",
))?;
}
// FROST commitments and their DLEqs, and the image and its DLEq
@@ -220,34 +234,46 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// Convert the unified commitments to a Vec of the individual commitments
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
let mut commitments = (0 .. self.clsags.len()).map(|c| {
let mut buf = [0; CLSAG_LEN];
(&self.included).iter().map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own transcripts
// cloned from this TX's initial premise's transcript. For our TX transcript to have the CLSAG
// data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice());
} else {
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf).map_err(|_| FrostError::InvalidCommitment(*l))?;
}
self.transcript.append_message(b"preprocess", &buf);
let mut commitments = (0 .. self.clsags.len())
.map(|c| {
let mut buf = [0; CLSAG_LEN];
(&self.included)
.iter()
.map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own
// transcripts cloned from this TX's initial premise's transcript. For our TX
// transcript to have the CLSAG data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice());
} else {
commitments
.get_mut(l)
.ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf)
.map_err(|_| FrostError::InvalidCommitment(*l))?;
}
self.transcript.append_message(b"preprocess", &buf);
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall, as this is where the TX is (which needs the key images in its
// message), along with where the outputs are determined (where our outputs may need
// these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY(
buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)].try_into().map_err(|_| FrostError::InvalidCommitment(*l))?
).decompress().ok_or(FrostError::InvalidCommitment(*l))?;
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well
// provides the easiest API overall, as this is where the TX is (which needs the key
// images in its message), along with where the outputs are determined (where our
// outputs may need these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY(
buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)]
.try_into()
.map_err(|_| FrostError::InvalidCommitment(*l))?,
)
.decompress()
.ok_or(FrostError::InvalidCommitment(*l))?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()
}).collect::<Result<Vec<_>, _>>()?;
Ok((*l, Cursor::new(buf)))
})
.collect::<Result<HashMap<_, _>, _>>()
})
.collect::<Result<Vec<_>, _>>()?;
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
// above
@@ -265,20 +291,20 @@ impl SignMachine<Transaction> for TransactionSignMachine {
(commitments, output_masks) = self.signable.prepare_outputs(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys")),
uniqueness(
&images.iter().map(|image| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: *image
}).collect::<Vec<_>>()
)
&images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
);
self.signable.prepare_transaction(
&commitments,
Bulletproofs::new(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"bulletproofs")),
&commitments
).unwrap()
&commitments,
)
.unwrap(),
)
};
@@ -291,7 +317,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
self.decoys.swap_remove(0),
self.inputs.swap_remove(0),
self.clsags.swap_remove(0),
commitments.swap_remove(0)
commitments.swap_remove(0),
));
}
sorted.sort_by(|x, y| key_image_sort(&x.0, &y.0));
@@ -308,23 +334,18 @@ impl SignMachine<Transaction> for TransactionSignMachine {
sum_pseudo_outs += mask;
}
tx.prefix.inputs.push(
Input::ToKey {
amount: 0,
key_offsets: value.2.offsets.clone(),
key_image: value.0
}
);
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: value.2.offsets.clone(),
key_image: value.0,
});
*value.3.write().unwrap() = Some(
ClsagDetails::new(
ClsagInput::new(
value.1.commitment,
value.2
).map_err(|_| panic!("Signing an input which isn't present in the ring we created for it"))?,
mask
)
);
*value.3.write().unwrap() = Some(ClsagDetails::new(
ClsagInput::new(value.1.commitment, value.2).map_err(|_| {
panic!("Signing an input which isn't present in the ring we created for it")
})?,
mask,
));
self.clsags.push(value.4);
commitments.push(value.5);
@@ -334,11 +355,15 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// Iterate over each CLSAG calling sign
let mut serialized = Vec::with_capacity(self.clsags.len() * 32);
let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
serialized.extend(&share);
Ok(clsag)
}).collect::<Result<_, _>>()?;
let clsags = self
.clsags
.drain(..)
.map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
serialized.extend(&share);
Ok(clsag)
})
.collect::<Result<_, _>>()?;
Ok((TransactionSignatureMachine { tx, clsags }, serialized))
}
@@ -352,11 +377,14 @@ impl SignatureMachine<Transaction> for TransactionSignatureMachine {
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for clsag in self.clsags {
let (clsag, pseudo_out) = clsag.complete(
shares.iter_mut().map(|(l, shares)| {
let mut buf = [0; 32];
shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()?
shares
.iter_mut()
.map(|(l, shares)| {
let mut buf = [0; 32];
shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
})
.collect::<Result<HashMap<_, _>, _>>()?,
)?;
clsags.push(clsag);
pseudo_outs.push(pseudo_out);

View File

@@ -6,10 +6,13 @@ use serde_json::json;
use monero::{
network::Network,
util::{key::PublicKey, address::Address}
util::{key::PublicKey, address::Address},
};
use monero_serai::{random_scalar, rpc::{EmptyResponse, RpcError, Rpc}};
use monero_serai::{
random_scalar,
rpc::{EmptyResponse, RpcError, Rpc},
};
pub async fn rpc() -> Rpc {
let rpc = Rpc::new("http://127.0.0.1:18081".to_string());
@@ -22,8 +25,9 @@ pub async fn rpc() -> Rpc {
let addr = Address::standard(
Network::Mainnet,
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() },
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() }
).to_string();
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() },
)
.to_string();
// Mine 10 blocks so we have 10 decoys so decoy selection doesn't fail
mine_block(&rpc, &addr).await.unwrap();
@@ -32,11 +36,16 @@ pub async fn rpc() -> Rpc {
}
pub async fn mine_block(rpc: &Rpc, address: &str) -> Result<EmptyResponse, RpcError> {
rpc.rpc_call("json_rpc", Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": 10
},
}))).await
rpc
.rpc_call(
"json_rpc",
Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": 10
},
})),
)
.await
}

View File

@@ -16,9 +16,19 @@ use dalek_ff_group::Scalar;
#[cfg(feature = "multisig")]
use transcript::{Transcript, RecommendedTranscript};
#[cfg(feature = "multisig")]
use frost::{curve::Ed25519, tests::{THRESHOLD, key_gen, sign}};
use frost::{
curve::Ed25519,
tests::{THRESHOLD, key_gen, sign},
};
use monero_serai::{random_scalar, wallet::{ViewPair, address::{Network, AddressType}, SignableTransaction}};
use monero_serai::{
random_scalar,
wallet::{
ViewPair,
address::{Network, AddressType},
SignableTransaction,
},
};
mod rpc;
use crate::rpc::{rpc, mine_block};
@@ -122,9 +132,9 @@ async fn send_core(test: usize, multisig: bool) {
}
}
let mut signable = SignableTransaction::new(
outputs, vec![(addr, amount - 10000000000)], Some(addr), fee
).unwrap();
let mut signable =
SignableTransaction::new(outputs, vec![(addr, amount - 10000000000)], Some(addr), fee)
.unwrap();
if !multisig {
tx = Some(signable.sign(&mut OsRng, &rpc, &spend).await.unwrap());
@@ -135,13 +145,17 @@ async fn send_core(test: usize, multisig: bool) {
for i in 1 ..= THRESHOLD {
machines.insert(
i,
signable.clone().multisig(
&rpc,
(*keys[&i]).clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
(1 ..= THRESHOLD).collect::<Vec<_>>()
).await.unwrap()
signable
.clone()
.multisig(
&rpc,
(*keys[&i]).clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
(1 ..= THRESHOLD).collect::<Vec<_>>(),
)
.await
.unwrap(),
);
}