mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-09 12:49:23 +00:00
Rename the coins folder to networks (#583)
* Rename the coins folder to networks Ethereum isn't a coin. It's a network. Resolves #357. * More renames of coins -> networks in orchestration * Correct paths in tests/ * cargo fmt
This commit is contained in:
311
networks/monero/wallet/src/decoys.rs
Normal file
311
networks/monero/wallet/src/decoys.rs
Normal file
@@ -0,0 +1,311 @@
|
||||
use std_shims::{io, vec::Vec, string::ToString, collections::HashSet};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use rand_distr::{Distribution, Gamma};
|
||||
#[cfg(not(feature = "std"))]
|
||||
use rand_distr::num_traits::Float;
|
||||
|
||||
use curve25519_dalek::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
DEFAULT_LOCK_WINDOW, COINBASE_LOCK_WINDOW, BLOCK_TIME,
|
||||
primitives::{Commitment, Decoys},
|
||||
rpc::{RpcError, DecoyRpc},
|
||||
output::OutputData,
|
||||
WalletOutput,
|
||||
};
|
||||
|
||||
const RECENT_WINDOW: usize = 15;
|
||||
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
|
||||
|
||||
async fn select_n(
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
rpc: &impl DecoyRpc,
|
||||
height: usize,
|
||||
real_output: u64,
|
||||
ring_len: usize,
|
||||
fingerprintable_deterministic: bool,
|
||||
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
|
||||
if height < DEFAULT_LOCK_WINDOW {
|
||||
Err(RpcError::InternalError("not enough blocks to select decoys".to_string()))?;
|
||||
}
|
||||
if height > rpc.get_output_distribution_end_height().await? {
|
||||
Err(RpcError::InternalError(
|
||||
"decoys being requested from blocks this node doesn't have".to_string(),
|
||||
))?;
|
||||
}
|
||||
|
||||
// Get the distribution
|
||||
let distribution = rpc.get_output_distribution(.. height).await?;
|
||||
if distribution.len() < DEFAULT_LOCK_WINDOW {
|
||||
Err(RpcError::InternalError("not enough blocks to select decoys".to_string()))?;
|
||||
}
|
||||
let highest_output_exclusive_bound = distribution[distribution.len() - DEFAULT_LOCK_WINDOW];
|
||||
// This assumes that each miner TX had one output (as sane) and checks we have sufficient
|
||||
// outputs even when excluding them (due to their own timelock requirements)
|
||||
// Considering this a temporal error for very new chains, it's sufficiently sane to have
|
||||
if highest_output_exclusive_bound.saturating_sub(u64::try_from(COINBASE_LOCK_WINDOW).unwrap()) <
|
||||
u64::try_from(ring_len).unwrap()
|
||||
{
|
||||
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
|
||||
}
|
||||
|
||||
// Determine the outputs per second
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
let per_second = {
|
||||
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
||||
let initial = distribution[distribution.len().saturating_sub(blocks + 1)];
|
||||
let outputs = distribution[distribution.len() - 1].saturating_sub(initial);
|
||||
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
||||
};
|
||||
|
||||
// Don't select the real output
|
||||
let mut do_not_select = HashSet::new();
|
||||
do_not_select.insert(real_output);
|
||||
|
||||
let decoy_count = ring_len - 1;
|
||||
let mut res = Vec::with_capacity(decoy_count);
|
||||
|
||||
let mut iters = 0;
|
||||
// Iterates until we have enough decoys
|
||||
// If an iteration only returns a partial set of decoys, the remainder will be obvious as decoys
|
||||
// to the RPC
|
||||
// The length of that remainder is expected to be minimal
|
||||
while res.len() != decoy_count {
|
||||
iters += 1;
|
||||
#[cfg(not(test))]
|
||||
const MAX_ITERS: usize = 10;
|
||||
// When testing on fresh chains, increased iterations can be useful and we don't necessitate
|
||||
// reasonable performance
|
||||
#[cfg(test)]
|
||||
const MAX_ITERS: usize = 100;
|
||||
// Ensure this isn't infinitely looping
|
||||
// We check both that we aren't at the maximum amount of iterations and that the not-yet
|
||||
// selected candidates exceed the amount of candidates necessary to trigger the next iteration
|
||||
if (iters == MAX_ITERS) ||
|
||||
((highest_output_exclusive_bound - u64::try_from(do_not_select.len()).unwrap()) <
|
||||
u64::try_from(ring_len).unwrap())
|
||||
{
|
||||
Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?;
|
||||
}
|
||||
|
||||
let remaining = decoy_count - res.len();
|
||||
let mut candidates = Vec::with_capacity(remaining);
|
||||
while candidates.len() != remaining {
|
||||
// Use a gamma distribution, as Monero does
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
|
||||
// /src/wallet/wallet2.cpp#L142-L143
|
||||
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
if age > TIP_APPLICATION {
|
||||
age -= TIP_APPLICATION;
|
||||
} else {
|
||||
// f64 does not have try_from available, which is why these are written with `as`
|
||||
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
|
||||
let o = (age * per_second) as u64;
|
||||
if o < highest_output_exclusive_bound {
|
||||
// Find which block this points to
|
||||
let i = distribution.partition_point(|s| *s < (highest_output_exclusive_bound - 1 - o));
|
||||
let prev = i.saturating_sub(1);
|
||||
let n = distribution[i] - distribution[prev];
|
||||
if n != 0 {
|
||||
// Select an output from within this block
|
||||
let o = distribution[prev] + (rng.next_u64() % n);
|
||||
if !do_not_select.contains(&o) {
|
||||
candidates.push(o);
|
||||
// This output will either be used or is unusable
|
||||
// In either case, we should not try it again
|
||||
do_not_select.insert(o);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If this is the first time we're requesting these outputs, include the real one as well
|
||||
// Prevents the node we're connected to from having a list of known decoys and then seeing a
|
||||
// TX which uses all of them, with one additional output (the true spend)
|
||||
let real_index = if iters == 0 {
|
||||
candidates.push(real_output);
|
||||
// Sort candidates so the real spends aren't the ones at the end
|
||||
candidates.sort();
|
||||
Some(candidates.binary_search(&real_output).unwrap())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
for (i, output) in rpc
|
||||
.get_unlocked_outputs(&candidates, height, fingerprintable_deterministic)
|
||||
.await?
|
||||
.iter_mut()
|
||||
.enumerate()
|
||||
{
|
||||
// We could check the returned info is equivalent to our expectations, yet that'd allow the
|
||||
// node to malleate the returned info to see if they can cause this error (allowing them to
|
||||
// figure out the output being spent)
|
||||
//
|
||||
// Some degree of this attack (forcing resampling/trying to observe errors) is likely
|
||||
// always possible
|
||||
if real_index == Some(i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If this is an unlocked output, push it to the result
|
||||
if let Some(output) = output.take() {
|
||||
res.push((candidates[i], output));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
async fn select_decoys<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
rpc: &impl DecoyRpc,
|
||||
ring_len: usize,
|
||||
height: usize,
|
||||
input: &WalletOutput,
|
||||
fingerprintable_deterministic: bool,
|
||||
) -> Result<Decoys, RpcError> {
|
||||
// Select all decoys for this transaction, assuming we generate a sane transaction
|
||||
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
||||
// bother with an overage
|
||||
let decoys = select_n(
|
||||
rng,
|
||||
rpc,
|
||||
height,
|
||||
input.relative_id.index_on_blockchain,
|
||||
ring_len,
|
||||
fingerprintable_deterministic,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Form the complete ring
|
||||
let mut ring = decoys;
|
||||
ring.push((input.relative_id.index_on_blockchain, [input.key(), input.commitment().calculate()]));
|
||||
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
/*
|
||||
Monero does have sanity checks which it applies to the selected ring.
|
||||
|
||||
They're statistically unlikely to be hit and only occur when the transaction is published over
|
||||
the RPC (so they are not a relay rule). The RPC allows disabling them, which monero-rpc does to
|
||||
ensure they don't pose a problem.
|
||||
|
||||
They aren't worth the complexity to implement here, especially since they're non-deterministic.
|
||||
*/
|
||||
|
||||
// We need to convert our positional indexes to offset indexes
|
||||
let mut offsets = Vec::with_capacity(ring.len());
|
||||
{
|
||||
offsets.push(ring[0].0);
|
||||
for m in 1 .. ring.len() {
|
||||
offsets.push(ring[m].0 - ring[m - 1].0);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
Decoys::new(
|
||||
offsets,
|
||||
// Binary searches for the real spend since we don't know where it sorted to
|
||||
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)).unwrap(),
|
||||
ring.into_iter().map(|output| output.1).collect(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
/// An output with decoys selected.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct OutputWithDecoys {
|
||||
output: OutputData,
|
||||
decoys: Decoys,
|
||||
}
|
||||
|
||||
impl OutputWithDecoys {
|
||||
/// Select decoys for this output.
|
||||
pub async fn new(
|
||||
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
|
||||
rpc: &impl DecoyRpc,
|
||||
ring_len: usize,
|
||||
height: usize,
|
||||
output: WalletOutput,
|
||||
) -> Result<OutputWithDecoys, RpcError> {
|
||||
let decoys = select_decoys(rng, rpc, ring_len, height, &output, false).await?;
|
||||
Ok(OutputWithDecoys { output: output.data.clone(), decoys })
|
||||
}
|
||||
|
||||
/// Select a set of decoys for this output with a deterministic process.
|
||||
///
|
||||
/// This function will always output the same set of decoys when called with the same arguments.
|
||||
/// This makes it very useful in multisignature contexts, where instead of having one participant
|
||||
/// select the decoys, everyone can locally select the decoys while coming to the same result.
|
||||
///
|
||||
/// The set of decoys selected may be fingerprintable as having been produced by this
|
||||
/// methodology.
|
||||
pub async fn fingerprintable_deterministic_new(
|
||||
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
|
||||
rpc: &impl DecoyRpc,
|
||||
ring_len: usize,
|
||||
height: usize,
|
||||
output: WalletOutput,
|
||||
) -> Result<OutputWithDecoys, RpcError> {
|
||||
let decoys = select_decoys(rng, rpc, ring_len, height, &output, true).await?;
|
||||
Ok(OutputWithDecoys { output: output.data.clone(), decoys })
|
||||
}
|
||||
|
||||
/// The key this output may be spent by.
|
||||
pub fn key(&self) -> EdwardsPoint {
|
||||
self.output.key()
|
||||
}
|
||||
|
||||
/// The scalar to add to the private spend key for it to be the discrete logarithm of this
|
||||
/// output's key.
|
||||
pub fn key_offset(&self) -> Scalar {
|
||||
self.output.key_offset
|
||||
}
|
||||
|
||||
/// The commitment this output created.
|
||||
pub fn commitment(&self) -> &Commitment {
|
||||
&self.output.commitment
|
||||
}
|
||||
|
||||
/// The decoys this output selected.
|
||||
pub fn decoys(&self) -> &Decoys {
|
||||
&self.decoys
|
||||
}
|
||||
|
||||
/// Write the OutputWithDecoys.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.output.write(w)?;
|
||||
self.decoys.write(w)
|
||||
}
|
||||
|
||||
/// Serialize the OutputWithDecoys to a `Vec<u8>`.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(128);
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
/// Read an OutputWithDecoys.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
|
||||
Ok(Self { output: OutputData::read(r)?, decoys: Decoys::read(r)? })
|
||||
}
|
||||
}
|
||||
309
networks/monero/wallet/src/extra.rs
Normal file
309
networks/monero/wallet/src/extra.rs
Normal file
@@ -0,0 +1,309 @@
|
||||
use core::ops::BitXor;
|
||||
use std_shims::{
|
||||
vec,
|
||||
vec::Vec,
|
||||
io::{self, Read, BufRead, Write},
|
||||
};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
|
||||
use monero_serai::io::*;
|
||||
|
||||
pub(crate) const MAX_TX_EXTRA_PADDING_COUNT: usize = 255;
|
||||
const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
|
||||
|
||||
const PAYMENT_ID_MARKER: u8 = 0;
|
||||
const ENCRYPTED_PAYMENT_ID_MARKER: u8 = 1;
|
||||
// Used as it's the highest value not interpretable as a continued VarInt
|
||||
pub(crate) const ARBITRARY_DATA_MARKER: u8 = 127;
|
||||
|
||||
/// The max amount of data which will fit within a blob of arbitrary data.
|
||||
// 1 byte is used for the marker
|
||||
pub const MAX_ARBITRARY_DATA_SIZE: usize = MAX_TX_EXTRA_NONCE_SIZE - 1;
|
||||
|
||||
/// A Payment ID.
|
||||
///
|
||||
/// This is a legacy method of identifying why Monero was sent to the receiver.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum PaymentId {
|
||||
/// A deprecated form of payment ID which is no longer supported.
|
||||
Unencrypted([u8; 32]),
|
||||
/// An encrypted payment ID.
|
||||
Encrypted([u8; 8]),
|
||||
}
|
||||
|
||||
impl BitXor<[u8; 8]> for PaymentId {
|
||||
type Output = PaymentId;
|
||||
|
||||
fn bitxor(self, bytes: [u8; 8]) -> PaymentId {
|
||||
match self {
|
||||
// Don't perform the xor since this isn't intended to be encrypted with xor
|
||||
PaymentId::Unencrypted(_) => self,
|
||||
PaymentId::Encrypted(id) => {
|
||||
PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PaymentId {
|
||||
/// Write the PaymentId.
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
PaymentId::Unencrypted(id) => {
|
||||
w.write_all(&[PAYMENT_ID_MARKER])?;
|
||||
w.write_all(id)?;
|
||||
}
|
||||
PaymentId::Encrypted(id) => {
|
||||
w.write_all(&[ENCRYPTED_PAYMENT_ID_MARKER])?;
|
||||
w.write_all(id)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Serialize the PaymentId to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(1 + 8);
|
||||
self.write(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
|
||||
/// Read a PaymentId.
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<PaymentId> {
|
||||
Ok(match read_byte(r)? {
|
||||
0 => PaymentId::Unencrypted(read_bytes(r)?),
|
||||
1 => PaymentId::Encrypted(read_bytes(r)?),
|
||||
_ => Err(io::Error::other("unknown payment ID type"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A field within the TX extra.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum ExtraField {
|
||||
/// Padding.
|
||||
///
|
||||
/// This is a block of zeroes within the TX extra.
|
||||
Padding(usize),
|
||||
/// The transaction key.
|
||||
///
|
||||
/// This is a commitment to the randomness used for deriving outputs.
|
||||
PublicKey(EdwardsPoint),
|
||||
/// The nonce field.
|
||||
///
|
||||
/// This is used for data, such as payment IDs.
|
||||
Nonce(Vec<u8>),
|
||||
/// The field for merge-mining.
|
||||
///
|
||||
/// This is used within miner transactions who are merge-mining Monero to specify the foreign
|
||||
/// block they mined.
|
||||
MergeMining(usize, [u8; 32]),
|
||||
/// The additional transaction keys.
|
||||
///
|
||||
/// These are the per-output commitments to the randomness used for deriving outputs.
|
||||
PublicKeys(Vec<EdwardsPoint>),
|
||||
/// The 'mysterious' Minergate tag.
|
||||
///
|
||||
/// This was used by a closed source entity without documentation. Support for parsing it was
|
||||
/// added to reduce extra which couldn't be decoded.
|
||||
MysteriousMinergate(Vec<u8>),
|
||||
}
|
||||
|
||||
impl ExtraField {
|
||||
/// Write the ExtraField.
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
ExtraField::Padding(size) => {
|
||||
w.write_all(&[0])?;
|
||||
for _ in 1 .. *size {
|
||||
write_byte(&0u8, w)?;
|
||||
}
|
||||
}
|
||||
ExtraField::PublicKey(key) => {
|
||||
w.write_all(&[1])?;
|
||||
w.write_all(&key.compress().to_bytes())?;
|
||||
}
|
||||
ExtraField::Nonce(data) => {
|
||||
w.write_all(&[2])?;
|
||||
write_vec(write_byte, data, w)?;
|
||||
}
|
||||
ExtraField::MergeMining(height, merkle) => {
|
||||
w.write_all(&[3])?;
|
||||
write_varint(&u64::try_from(*height).unwrap(), w)?;
|
||||
w.write_all(merkle)?;
|
||||
}
|
||||
ExtraField::PublicKeys(keys) => {
|
||||
w.write_all(&[4])?;
|
||||
write_vec(write_point, keys, w)?;
|
||||
}
|
||||
ExtraField::MysteriousMinergate(data) => {
|
||||
w.write_all(&[0xDE])?;
|
||||
write_vec(write_byte, data, w)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Serialize the ExtraField to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(1 + 8);
|
||||
self.write(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
|
||||
/// Read an ExtraField.
|
||||
pub fn read<R: BufRead>(r: &mut R) -> io::Result<ExtraField> {
|
||||
Ok(match read_byte(r)? {
|
||||
0 => ExtraField::Padding({
|
||||
// Read until either non-zero, max padding count, or end of buffer
|
||||
let mut size: usize = 1;
|
||||
loop {
|
||||
let buf = r.fill_buf()?;
|
||||
let mut n_consume = 0;
|
||||
for v in buf {
|
||||
if *v != 0u8 {
|
||||
Err(io::Error::other("non-zero value after padding"))?
|
||||
}
|
||||
n_consume += 1;
|
||||
size += 1;
|
||||
if size > MAX_TX_EXTRA_PADDING_COUNT {
|
||||
Err(io::Error::other("padding exceeded max count"))?
|
||||
}
|
||||
}
|
||||
if n_consume == 0 {
|
||||
break;
|
||||
}
|
||||
r.consume(n_consume);
|
||||
}
|
||||
size
|
||||
}),
|
||||
1 => ExtraField::PublicKey(read_point(r)?),
|
||||
2 => ExtraField::Nonce({
|
||||
let nonce = read_vec(read_byte, r)?;
|
||||
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
||||
Err(io::Error::other("too long nonce"))?;
|
||||
}
|
||||
nonce
|
||||
}),
|
||||
3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?),
|
||||
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
|
||||
0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, r)?),
|
||||
_ => Err(io::Error::other("unknown extra field"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of decoding a transaction's extra field.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Extra(pub(crate) Vec<ExtraField>);
|
||||
impl Extra {
|
||||
/// The keys within this extra.
|
||||
///
|
||||
/// This returns all keys specified with `PublicKey` and the first set of keys specified with
|
||||
/// `PublicKeys`, so long as they're well-formed.
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
|
||||
// /src/wallet/wallet2.cpp#L2290-L2300
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/wallet/wallet2.cpp#L2337-L2340
|
||||
pub fn keys(&self) -> Option<(Vec<EdwardsPoint>, Option<Vec<EdwardsPoint>>)> {
|
||||
let mut keys = vec![];
|
||||
let mut additional = None;
|
||||
for field in &self.0 {
|
||||
match field.clone() {
|
||||
ExtraField::PublicKey(this_key) => keys.push(this_key),
|
||||
ExtraField::PublicKeys(these_additional) => {
|
||||
additional = additional.or(Some(these_additional))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
// Don't return any keys if this was non-standard and didn't include the primary key
|
||||
if keys.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((keys, additional))
|
||||
}
|
||||
}
|
||||
|
||||
/// The payment ID embedded within this extra.
|
||||
// Monero finds the first nonce field and reads the payment ID from it:
|
||||
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
||||
// src/wallet/wallet2.cpp#L2709-L2752
|
||||
pub fn payment_id(&self) -> Option<PaymentId> {
|
||||
for field in &self.0 {
|
||||
if let ExtraField::Nonce(data) = field {
|
||||
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// The arbitrary data within this extra.
|
||||
///
|
||||
/// This uses a marker custom to monero-wallet.
|
||||
pub fn data(&self) -> Vec<Vec<u8>> {
|
||||
let mut res = vec![];
|
||||
for field in &self.0 {
|
||||
if let ExtraField::Nonce(data) = field {
|
||||
if data[0] == ARBITRARY_DATA_MARKER {
|
||||
res.push(data[1 ..].to_vec());
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn new(key: EdwardsPoint, additional: Vec<EdwardsPoint>) -> Extra {
|
||||
let mut res = Extra(Vec::with_capacity(3));
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/cryptonote_basic/cryptonote_format_utils.cpp#L627-L633
|
||||
// We only support pushing nonces which come after these in the sort order
|
||||
res.0.push(ExtraField::PublicKey(key));
|
||||
if !additional.is_empty() {
|
||||
res.0.push(ExtraField::PublicKeys(additional));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn push_nonce(&mut self, nonce: Vec<u8>) {
|
||||
self.0.push(ExtraField::Nonce(nonce));
|
||||
}
|
||||
|
||||
/// Write the Extra.
|
||||
///
|
||||
/// This is not of deterministic length nor length-prefixed. It should only be written to a
|
||||
/// buffer which will be delimited.
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for field in &self.0 {
|
||||
field.write(w)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Serialize the Extra to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
|
||||
/// Read an `Extra`.
|
||||
///
|
||||
/// This is not of deterministic length nor length-prefixed. It should only be read from a buffer
|
||||
/// already delimited.
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
pub fn read<R: BufRead>(r: &mut R) -> io::Result<Extra> {
|
||||
let mut res = Extra(vec![]);
|
||||
// Extra reads until EOF
|
||||
// We take a BufRead so we can detect when the buffer is empty
|
||||
// `fill_buf` returns the current buffer, filled if empty, only empty if the reader is
|
||||
// exhausted
|
||||
while !r.fill_buf()?.is_empty() {
|
||||
res.0.push(ExtraField::read(r)?);
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
160
networks/monero/wallet/src/lib.rs
Normal file
160
networks/monero/wallet/src/lib.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{Scalar, EdwardsPoint};
|
||||
|
||||
use monero_serai::{
|
||||
io::write_varint,
|
||||
primitives::{Commitment, keccak256, keccak256_to_scalar},
|
||||
ringct::EncryptedAmount,
|
||||
transaction::Input,
|
||||
};
|
||||
|
||||
pub use monero_serai::*;
|
||||
|
||||
pub use monero_rpc as rpc;
|
||||
|
||||
pub use monero_address as address;
|
||||
|
||||
mod view_pair;
|
||||
pub use view_pair::{ViewPair, GuaranteedViewPair};
|
||||
|
||||
/// Structures and functionality for working with transactions' extra fields.
|
||||
pub mod extra;
|
||||
pub(crate) use extra::{PaymentId, Extra};
|
||||
|
||||
pub(crate) mod output;
|
||||
pub use output::WalletOutput;
|
||||
|
||||
mod scan;
|
||||
pub use scan::{Scanner, GuaranteedScanner};
|
||||
|
||||
mod decoys;
|
||||
pub use decoys::OutputWithDecoys;
|
||||
|
||||
/// Structs and functionality for sending transactions.
|
||||
pub mod send;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
struct SharedKeyDerivations {
|
||||
// Hs("view_tag" || 8Ra || o)
|
||||
view_tag: u8,
|
||||
// Hs(uniqueness || 8Ra || o) where uniqueness may be empty
|
||||
shared_key: Scalar,
|
||||
}
|
||||
|
||||
impl SharedKeyDerivations {
|
||||
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
|
||||
fn uniqueness(inputs: &[Input]) -> [u8; 32] {
|
||||
let mut u = b"uniqueness".to_vec();
|
||||
for input in inputs {
|
||||
match input {
|
||||
// If Gen, this should be the only input, making this loop somewhat pointless
|
||||
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
||||
Input::Gen(height) => {
|
||||
write_varint(height, &mut u).unwrap();
|
||||
}
|
||||
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
|
||||
}
|
||||
}
|
||||
keccak256(u)
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn output_derivations(
|
||||
uniqueness: Option<[u8; 32]>,
|
||||
ecdh: Zeroizing<EdwardsPoint>,
|
||||
o: usize,
|
||||
) -> Zeroizing<SharedKeyDerivations> {
|
||||
// 8Ra
|
||||
let mut output_derivation = Zeroizing::new(
|
||||
Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(),
|
||||
);
|
||||
|
||||
// || o
|
||||
{
|
||||
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
|
||||
write_varint(&o, output_derivation).unwrap();
|
||||
}
|
||||
|
||||
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
|
||||
|
||||
// uniqueness ||
|
||||
let output_derivation = if let Some(uniqueness) = uniqueness {
|
||||
Zeroizing::new([uniqueness.as_ref(), &output_derivation].concat())
|
||||
} else {
|
||||
output_derivation
|
||||
};
|
||||
|
||||
Zeroizing::new(SharedKeyDerivations {
|
||||
view_tag,
|
||||
shared_key: keccak256_to_scalar(&output_derivation),
|
||||
})
|
||||
}
|
||||
|
||||
// H(8Ra || 0x8d)
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn payment_id_xor(ecdh: Zeroizing<EdwardsPoint>) -> [u8; 8] {
|
||||
// 8Ra
|
||||
let output_derivation = Zeroizing::new(
|
||||
Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(),
|
||||
);
|
||||
|
||||
let mut payment_id_xor = [0; 8];
|
||||
payment_id_xor
|
||||
.copy_from_slice(&keccak256([output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
||||
payment_id_xor
|
||||
}
|
||||
|
||||
fn commitment_mask(&self) -> Scalar {
|
||||
let mut mask = b"commitment_mask".to_vec();
|
||||
mask.extend(self.shared_key.as_bytes());
|
||||
let res = keccak256_to_scalar(&mask);
|
||||
mask.zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
fn compact_amount_encryption(&self, amount: u64) -> [u8; 8] {
|
||||
let mut amount_mask = Zeroizing::new(b"amount".to_vec());
|
||||
amount_mask.extend(self.shared_key.to_bytes());
|
||||
let mut amount_mask = keccak256(&amount_mask);
|
||||
|
||||
let mut amount_mask_8 = [0; 8];
|
||||
amount_mask_8.copy_from_slice(&amount_mask[.. 8]);
|
||||
amount_mask.zeroize();
|
||||
|
||||
(amount ^ u64::from_le_bytes(amount_mask_8)).to_le_bytes()
|
||||
}
|
||||
|
||||
fn decrypt(&self, enc_amount: &EncryptedAmount) -> Commitment {
|
||||
match enc_amount {
|
||||
// TODO: Add a test vector for this
|
||||
EncryptedAmount::Original { mask, amount } => {
|
||||
let mask_shared_sec = keccak256(self.shared_key.as_bytes());
|
||||
let mask =
|
||||
Scalar::from_bytes_mod_order(*mask) - Scalar::from_bytes_mod_order(mask_shared_sec);
|
||||
|
||||
let amount_shared_sec = keccak256(mask_shared_sec);
|
||||
let amount_scalar =
|
||||
Scalar::from_bytes_mod_order(*amount) - Scalar::from_bytes_mod_order(amount_shared_sec);
|
||||
// d2b from rctTypes.cpp
|
||||
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
|
||||
|
||||
Commitment::new(mask, amount)
|
||||
}
|
||||
EncryptedAmount::Compact { amount } => Commitment::new(
|
||||
self.commitment_mask(),
|
||||
u64::from_le_bytes(self.compact_amount_encryption(u64::from_le_bytes(*amount))),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
352
networks/monero/wallet/src/output.rs
Normal file
352
networks/monero/wallet/src/output.rs
Normal file
@@ -0,0 +1,352 @@
|
||||
use std_shims::{
|
||||
vec,
|
||||
vec::Vec,
|
||||
io::{self, Read, Write},
|
||||
};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use curve25519_dalek::{Scalar, edwards::EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
io::*, primitives::Commitment, transaction::Timelock, address::SubaddressIndex, extra::PaymentId,
|
||||
};
|
||||
|
||||
/// An absolute output ID, defined as its transaction hash and output index.
|
||||
///
|
||||
/// This is not the output's key as multiple outputs may share an output key.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct AbsoluteId {
|
||||
pub(crate) transaction: [u8; 32],
|
||||
pub(crate) index_in_transaction: u32,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for AbsoluteId {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("AbsoluteId")
|
||||
.field("transaction", &hex::encode(self.transaction))
|
||||
.field("index_in_transaction", &self.index_in_transaction)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl AbsoluteId {
|
||||
/// Write the AbsoluteId.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.transaction)?;
|
||||
w.write_all(&self.index_in_transaction.to_le_bytes())
|
||||
}
|
||||
|
||||
/// Read an AbsoluteId.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
|
||||
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u32(r)? })
|
||||
}
|
||||
}
|
||||
|
||||
/// An output's relative ID.
|
||||
///
|
||||
/// This is defined as the output's index on the blockchain.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct RelativeId {
|
||||
pub(crate) index_on_blockchain: u64,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for RelativeId {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt.debug_struct("RelativeId").field("index_on_blockchain", &self.index_on_blockchain).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl RelativeId {
|
||||
/// Write the RelativeId.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.index_on_blockchain.to_le_bytes())
|
||||
}
|
||||
|
||||
/// Read an RelativeId.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||
Ok(RelativeId { index_on_blockchain: read_u64(r)? })
|
||||
}
|
||||
}
|
||||
|
||||
/// The data within an output, as necessary to spend the output.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct OutputData {
|
||||
pub(crate) key: EdwardsPoint,
|
||||
pub(crate) key_offset: Scalar,
|
||||
pub(crate) commitment: Commitment,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for OutputData {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("OutputData")
|
||||
.field("key", &hex::encode(self.key.compress().0))
|
||||
.field("key_offset", &hex::encode(self.key_offset.to_bytes()))
|
||||
.field("commitment", &self.commitment)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl OutputData {
|
||||
/// The key this output may be spent by.
|
||||
pub(crate) fn key(&self) -> EdwardsPoint {
|
||||
self.key
|
||||
}
|
||||
|
||||
/// The scalar to add to the private spend key for it to be the discrete logarithm of this
|
||||
/// output's key.
|
||||
pub(crate) fn key_offset(&self) -> Scalar {
|
||||
self.key_offset
|
||||
}
|
||||
|
||||
/// The commitment this output created.
|
||||
pub(crate) fn commitment(&self) -> &Commitment {
|
||||
&self.commitment
|
||||
}
|
||||
|
||||
/// Write the OutputData.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub(crate) fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.key.compress().to_bytes())?;
|
||||
w.write_all(&self.key_offset.to_bytes())?;
|
||||
self.commitment.write(w)
|
||||
}
|
||||
|
||||
/*
|
||||
/// Serialize the OutputData to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(32 + 32 + 40);
|
||||
self.write(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
*/
|
||||
|
||||
/// Read an OutputData.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub(crate) fn read<R: Read>(r: &mut R) -> io::Result<OutputData> {
|
||||
Ok(OutputData {
|
||||
key: read_point(r)?,
|
||||
key_offset: read_scalar(r)?,
|
||||
commitment: Commitment::read(r)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The metadata for an output.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct Metadata {
|
||||
pub(crate) additional_timelock: Timelock,
|
||||
pub(crate) subaddress: Option<SubaddressIndex>,
|
||||
pub(crate) payment_id: Option<PaymentId>,
|
||||
pub(crate) arbitrary_data: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for Metadata {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("Metadata")
|
||||
.field("additional_timelock", &self.additional_timelock)
|
||||
.field("subaddress", &self.subaddress)
|
||||
.field("payment_id", &self.payment_id)
|
||||
.field("arbitrary_data", &self.arbitrary_data.iter().map(hex::encode).collect::<Vec<_>>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
/// Write the Metadata.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.additional_timelock.write(w)?;
|
||||
|
||||
if let Some(subaddress) = self.subaddress {
|
||||
w.write_all(&[1])?;
|
||||
w.write_all(&subaddress.account().to_le_bytes())?;
|
||||
w.write_all(&subaddress.address().to_le_bytes())?;
|
||||
} else {
|
||||
w.write_all(&[0])?;
|
||||
}
|
||||
|
||||
if let Some(payment_id) = self.payment_id {
|
||||
w.write_all(&[1])?;
|
||||
payment_id.write(w)?;
|
||||
} else {
|
||||
w.write_all(&[0])?;
|
||||
}
|
||||
|
||||
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
|
||||
for part in &self.arbitrary_data {
|
||||
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
|
||||
w.write_all(part)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Read a Metadata.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
fn read<R: Read>(r: &mut R) -> io::Result<Metadata> {
|
||||
let additional_timelock = Timelock::read(r)?;
|
||||
|
||||
let subaddress = match read_byte(r)? {
|
||||
0 => None,
|
||||
1 => Some(
|
||||
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
|
||||
.ok_or_else(|| io::Error::other("invalid subaddress in metadata"))?,
|
||||
),
|
||||
_ => Err(io::Error::other("invalid subaddress is_some boolean in metadata"))?,
|
||||
};
|
||||
|
||||
Ok(Metadata {
|
||||
additional_timelock,
|
||||
subaddress,
|
||||
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
|
||||
arbitrary_data: {
|
||||
let mut data = vec![];
|
||||
for _ in 0 .. read_u32(r)? {
|
||||
let len = read_byte(r)?;
|
||||
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
|
||||
}
|
||||
data
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A scanned output and all associated data.
|
||||
///
|
||||
/// This struct contains all data necessary to spend this output, or handle it as a payment.
|
||||
///
|
||||
/// This struct is bound to a specific instance of the blockchain. If the blockchain reorganizes
|
||||
/// the block this struct is bound to, it MUST be discarded. If any outputs are mutual to both
|
||||
/// blockchains, scanning the new blockchain will yield those outputs again.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct WalletOutput {
|
||||
/// The absolute ID for this transaction.
|
||||
pub(crate) absolute_id: AbsoluteId,
|
||||
/// The ID for this transaction, relative to the blockchain.
|
||||
pub(crate) relative_id: RelativeId,
|
||||
/// The output's data.
|
||||
pub(crate) data: OutputData,
|
||||
/// Associated metadata relevant for handling it as a payment.
|
||||
pub(crate) metadata: Metadata,
|
||||
}
|
||||
|
||||
impl WalletOutput {
|
||||
/// The hash of the transaction which created this output.
|
||||
pub fn transaction(&self) -> [u8; 32] {
|
||||
self.absolute_id.transaction
|
||||
}
|
||||
|
||||
/// The index of the output within the transaction.
|
||||
pub fn index_in_transaction(&self) -> u32 {
|
||||
self.absolute_id.index_in_transaction
|
||||
}
|
||||
|
||||
/// The index of the output on the blockchain.
|
||||
pub fn index_on_blockchain(&self) -> u64 {
|
||||
self.relative_id.index_on_blockchain
|
||||
}
|
||||
|
||||
/// The key this output may be spent by.
|
||||
pub fn key(&self) -> EdwardsPoint {
|
||||
self.data.key()
|
||||
}
|
||||
|
||||
/// The scalar to add to the private spend key for it to be the discrete logarithm of this
|
||||
/// output's key.
|
||||
pub fn key_offset(&self) -> Scalar {
|
||||
self.data.key_offset()
|
||||
}
|
||||
|
||||
/// The commitment this output created.
|
||||
pub fn commitment(&self) -> &Commitment {
|
||||
self.data.commitment()
|
||||
}
|
||||
|
||||
/// The additional timelock this output is subject to.
|
||||
///
|
||||
/// All outputs are subject to the '10-block lock', a 10-block window after their inclusion
|
||||
/// on-chain during which they cannot be spent. Outputs may be additionally timelocked. This
|
||||
/// function only returns the additional timelock.
|
||||
pub fn additional_timelock(&self) -> Timelock {
|
||||
self.metadata.additional_timelock
|
||||
}
|
||||
|
||||
/// The index of the subaddress this output was identified as sent to.
|
||||
pub fn subaddress(&self) -> Option<SubaddressIndex> {
|
||||
self.metadata.subaddress
|
||||
}
|
||||
|
||||
/// The payment ID included with this output.
|
||||
///
|
||||
/// This field may be `Some` even if wallet2 would not return a payment ID. This will happen if
|
||||
/// the scanned output belongs to the subaddress which spent Monero within the transaction which
|
||||
/// created the output. If multiple subaddresses spent Monero within this transactions, the key
|
||||
/// image with the highest index is determined to be the subaddress considered as the one
|
||||
/// spending.
|
||||
// TODO: Clarify and cite for point A ("highest index spent key image"??)
|
||||
pub fn payment_id(&self) -> Option<PaymentId> {
|
||||
self.metadata.payment_id
|
||||
}
|
||||
|
||||
/// The arbitrary data from the `extra` field of the transaction which created this output.
|
||||
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
|
||||
&self.metadata.arbitrary_data
|
||||
}
|
||||
|
||||
/// Write the WalletOutput.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.absolute_id.write(w)?;
|
||||
self.relative_id.write(w)?;
|
||||
self.data.write(w)?;
|
||||
self.metadata.write(w)
|
||||
}
|
||||
|
||||
/// Serialize the WalletOutput to a `Vec<u8>`.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(128);
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
/// Read a WalletOutput.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<WalletOutput> {
|
||||
Ok(WalletOutput {
|
||||
absolute_id: AbsoluteId::read(r)?,
|
||||
relative_id: RelativeId::read(r)?,
|
||||
data: OutputData::read(r)?,
|
||||
metadata: Metadata::read(r)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
419
networks/monero/wallet/src/scan.rs
Normal file
419
networks/monero/wallet/src/scan.rs
Normal file
@@ -0,0 +1,419 @@
|
||||
use core::ops::Deref;
|
||||
use std_shims::{alloc::format, vec, vec::Vec, string::ToString, collections::HashMap};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
|
||||
|
||||
use monero_rpc::{RpcError, Rpc};
|
||||
use monero_serai::{
|
||||
io::*,
|
||||
primitives::Commitment,
|
||||
transaction::{Timelock, Pruned, Transaction},
|
||||
block::Block,
|
||||
};
|
||||
use crate::{
|
||||
address::SubaddressIndex, ViewPair, GuaranteedViewPair, output::*, PaymentId, Extra,
|
||||
SharedKeyDerivations,
|
||||
};
|
||||
|
||||
/// A collection of potentially additionally timelocked outputs.
|
||||
#[derive(Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Timelocked(Vec<WalletOutput>);
|
||||
|
||||
impl Timelocked {
|
||||
/// Return the outputs which aren't subject to an additional timelock.
|
||||
#[must_use]
|
||||
pub fn not_additionally_locked(self) -> Vec<WalletOutput> {
|
||||
let mut res = vec![];
|
||||
for output in &self.0 {
|
||||
if output.additional_timelock() == Timelock::None {
|
||||
res.push(output.clone());
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Return the outputs whose additional timelock unlocks by the specified block/time.
|
||||
///
|
||||
/// Additional timelocks are almost never used outside of miner transactions, and are
|
||||
/// increasingly planned for removal. Ignoring non-miner additionally-timelocked outputs is
|
||||
/// recommended.
|
||||
///
|
||||
/// `block` is the block number of the block the additional timelock must be satsified by.
|
||||
///
|
||||
/// `time` is represented in seconds since the epoch. Please note Monero uses an on-chain
|
||||
/// deterministic clock for time which is subject to variance from the real world time. This time
|
||||
/// argument will be evaluated against Monero's clock, not the local system's clock.
|
||||
#[must_use]
|
||||
pub fn additional_timelock_satisfied_by(self, block: usize, time: u64) -> Vec<WalletOutput> {
|
||||
let mut res = vec![];
|
||||
for output in &self.0 {
|
||||
if (output.additional_timelock() <= Timelock::Block(block)) ||
|
||||
(output.additional_timelock() <= Timelock::Time(time))
|
||||
{
|
||||
res.push(output.clone());
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Ignore the timelocks and return all outputs within this container.
|
||||
#[must_use]
|
||||
pub fn ignore_additional_timelock(mut self) -> Vec<WalletOutput> {
|
||||
let mut res = vec![];
|
||||
core::mem::swap(&mut self.0, &mut res);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct InternalScanner {
|
||||
pair: ViewPair,
|
||||
guaranteed: bool,
|
||||
subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
|
||||
}
|
||||
|
||||
impl Zeroize for InternalScanner {
|
||||
fn zeroize(&mut self) {
|
||||
self.pair.zeroize();
|
||||
self.guaranteed.zeroize();
|
||||
|
||||
// This may not be effective, unfortunately
|
||||
for (mut key, mut value) in self.subaddresses.drain() {
|
||||
key.zeroize();
|
||||
value.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Drop for InternalScanner {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl ZeroizeOnDrop for InternalScanner {}
|
||||
|
||||
impl InternalScanner {
|
||||
fn new(pair: ViewPair, guaranteed: bool) -> Self {
|
||||
let mut subaddresses = HashMap::new();
|
||||
subaddresses.insert(pair.spend().compress(), None);
|
||||
Self { pair, guaranteed, subaddresses }
|
||||
}
|
||||
|
||||
fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
|
||||
let (spend, _) = self.pair.subaddress_keys(subaddress);
|
||||
self.subaddresses.insert(spend.compress(), Some(subaddress));
|
||||
}
|
||||
|
||||
fn scan_transaction(
|
||||
&self,
|
||||
tx_start_index_on_blockchain: u64,
|
||||
tx_hash: [u8; 32],
|
||||
tx: &Transaction<Pruned>,
|
||||
) -> Result<Timelocked, RpcError> {
|
||||
// Only scan TXs creating RingCT outputs
|
||||
// For the full details on why this check is equivalent, please see the documentation in `scan`
|
||||
if tx.version() != 2 {
|
||||
return Ok(Timelocked(vec![]));
|
||||
}
|
||||
|
||||
// Read the extra field
|
||||
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()) else {
|
||||
return Ok(Timelocked(vec![]));
|
||||
};
|
||||
|
||||
let Some((tx_keys, additional)) = extra.keys() else {
|
||||
return Ok(Timelocked(vec![]));
|
||||
};
|
||||
let payment_id = extra.payment_id();
|
||||
|
||||
let mut res = vec![];
|
||||
for (o, output) in tx.prefix().outputs.iter().enumerate() {
|
||||
let Some(output_key) = decompress_point(output.key.to_bytes()) else { continue };
|
||||
|
||||
// Monero checks with each TX key and with the additional key for this output
|
||||
|
||||
// This will be None if there's no additional keys, Some(None) if there's additional keys
|
||||
// yet not one for this output (which is non-standard), and Some(Some(_)) if there's an
|
||||
// additional key for this output
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/cryptonote_basic/cryptonote_format_utils.cpp#L1060-L1070
|
||||
let additional = additional.as_ref().map(|additional| additional.get(o));
|
||||
|
||||
#[allow(clippy::manual_let_else)]
|
||||
for key in tx_keys.iter().map(|key| Some(Some(key))).chain(core::iter::once(additional)) {
|
||||
// Get the key, or continue if there isn't one
|
||||
let key = match key {
|
||||
Some(Some(key)) => key,
|
||||
Some(None) | None => continue,
|
||||
};
|
||||
// Calculate the ECDH
|
||||
let ecdh = Zeroizing::new(self.pair.view.deref() * key);
|
||||
let output_derivations = SharedKeyDerivations::output_derivations(
|
||||
if self.guaranteed {
|
||||
Some(SharedKeyDerivations::uniqueness(&tx.prefix().inputs))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ecdh.clone(),
|
||||
o,
|
||||
);
|
||||
|
||||
// Check the view tag matches, if there is a view tag
|
||||
if let Some(actual_view_tag) = output.view_tag {
|
||||
if actual_view_tag != output_derivations.view_tag {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// P - shared == spend
|
||||
let Some(subaddress) = ({
|
||||
// The output key may be of torsion [0, 8)
|
||||
// Our subtracting of a prime-order element means any torsion will be preserved
|
||||
// If someone wanted to malleate output keys with distinct torsions, only one will be
|
||||
// scanned accordingly (the one which has matching torsion of the spend key)
|
||||
let subaddress_spend_key =
|
||||
output_key - (&output_derivations.shared_key * ED25519_BASEPOINT_TABLE);
|
||||
self.subaddresses.get(&subaddress_spend_key.compress())
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
let subaddress = *subaddress;
|
||||
|
||||
// The key offset is this shared key
|
||||
let mut key_offset = output_derivations.shared_key;
|
||||
if let Some(subaddress) = subaddress {
|
||||
// And if this was to a subaddress, it's additionally the offset from subaddress spend
|
||||
// key to the normal spend key
|
||||
key_offset += self.pair.subaddress_derivation(subaddress);
|
||||
}
|
||||
// Since we've found an output to us, get its amount
|
||||
let mut commitment = Commitment::zero();
|
||||
|
||||
// Miner transaction
|
||||
if let Some(amount) = output.amount {
|
||||
commitment.amount = amount;
|
||||
// Regular transaction
|
||||
} else {
|
||||
let Transaction::V2 { proofs: Some(ref proofs), .. } = &tx else {
|
||||
// Invalid transaction, as of consensus rules at the time of writing this code
|
||||
Err(RpcError::InvalidNode("non-miner v2 transaction without RCT proofs".to_string()))?
|
||||
};
|
||||
|
||||
commitment = match proofs.base.encrypted_amounts.get(o) {
|
||||
Some(amount) => output_derivations.decrypt(amount),
|
||||
// Invalid transaction, as of consensus rules at the time of writing this code
|
||||
None => Err(RpcError::InvalidNode(
|
||||
"RCT proofs without an encrypted amount per output".to_string(),
|
||||
))?,
|
||||
};
|
||||
|
||||
// Rebuild the commitment to verify it
|
||||
if Some(&commitment.calculate()) != proofs.base.commitments.get(o) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Decrypt the payment ID
|
||||
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
|
||||
|
||||
res.push(WalletOutput {
|
||||
absolute_id: AbsoluteId {
|
||||
transaction: tx_hash,
|
||||
index_in_transaction: o.try_into().unwrap(),
|
||||
},
|
||||
relative_id: RelativeId {
|
||||
index_on_blockchain: tx_start_index_on_blockchain + u64::try_from(o).unwrap(),
|
||||
},
|
||||
data: OutputData { key: output_key, key_offset, commitment },
|
||||
metadata: Metadata {
|
||||
additional_timelock: tx.prefix().additional_timelock,
|
||||
subaddress,
|
||||
payment_id,
|
||||
arbitrary_data: extra.data(),
|
||||
},
|
||||
});
|
||||
|
||||
// Break to prevent public keys from being included multiple times, triggering multiple
|
||||
// inclusions of the same output
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Timelocked(res))
|
||||
}
|
||||
|
||||
async fn scan(&mut self, rpc: &impl Rpc, block: &Block) -> Result<Timelocked, RpcError> {
|
||||
if block.header.hardfork_version > 16 {
|
||||
Err(RpcError::InternalError(format!(
|
||||
"scanning a hardfork {} block, when we only support up to 16",
|
||||
block.header.hardfork_version
|
||||
)))?;
|
||||
}
|
||||
|
||||
// We obtain all TXs in full
|
||||
let mut txs_with_hashes = vec![(
|
||||
block.miner_transaction.hash(),
|
||||
Transaction::<Pruned>::from(block.miner_transaction.clone()),
|
||||
)];
|
||||
let txs = rpc.get_pruned_transactions(&block.transactions).await?;
|
||||
for (hash, tx) in block.transactions.iter().zip(txs) {
|
||||
txs_with_hashes.push((*hash, tx));
|
||||
}
|
||||
|
||||
/*
|
||||
Requesting the output index for each output we sucessfully scan would cause a loss of privacy
|
||||
We could instead request the output indexes for all outputs we scan, yet this would notably
|
||||
increase the amount of RPC calls we make.
|
||||
|
||||
We solve this by requesting the output index for the first RingCT output in the block, which
|
||||
should be within the miner transaction. Then, as we scan transactions, we update the output
|
||||
index ourselves.
|
||||
|
||||
Please note we only will scan RingCT outputs so we only need to track the RingCT output
|
||||
index. This decision was made due to spending CN outputs potentially having burdensome
|
||||
requirements (the need to make a v1 TX due to insufficient decoys).
|
||||
|
||||
We bound ourselves to only scanning RingCT outputs by only scanning v2 transactions. This is
|
||||
safe and correct since:
|
||||
|
||||
1) v1 transactions cannot create RingCT outputs.
|
||||
|
||||
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
/src/cryptonote_basic/cryptonote_format_utils.cpp#L866-L869
|
||||
|
||||
2) v2 miner transactions implicitly create RingCT outputs.
|
||||
|
||||
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
/src/blockchain_db/blockchain_db.cpp#L232-L241
|
||||
|
||||
3) v2 transactions must create RingCT outputs.
|
||||
|
||||
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
|
||||
/src/cryptonote_core/blockchain.cpp#L3055-L3065
|
||||
|
||||
That does bound on the hard fork version being >= 3, yet all v2 TXs have a hard fork
|
||||
version > 3.
|
||||
|
||||
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
/src/cryptonote_core/blockchain.cpp#L3417
|
||||
*/
|
||||
|
||||
// Get the starting index
|
||||
let mut tx_start_index_on_blockchain = {
|
||||
let mut tx_start_index_on_blockchain = None;
|
||||
for (hash, tx) in &txs_with_hashes {
|
||||
// If this isn't a RingCT output, or there are no outputs, move to the next TX
|
||||
if (!matches!(tx, Transaction::V2 { .. })) || tx.prefix().outputs.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let index = *rpc.get_o_indexes(*hash).await?.first().ok_or_else(|| {
|
||||
RpcError::InvalidNode(
|
||||
"requested output indexes for a TX with outputs and got none".to_string(),
|
||||
)
|
||||
})?;
|
||||
tx_start_index_on_blockchain = Some(index);
|
||||
break;
|
||||
}
|
||||
let Some(tx_start_index_on_blockchain) = tx_start_index_on_blockchain else {
|
||||
// Block had no RingCT outputs
|
||||
return Ok(Timelocked(vec![]));
|
||||
};
|
||||
tx_start_index_on_blockchain
|
||||
};
|
||||
|
||||
let mut res = Timelocked(vec![]);
|
||||
for (hash, tx) in txs_with_hashes {
|
||||
// Push all outputs into our result
|
||||
{
|
||||
let mut this_txs_outputs = vec![];
|
||||
core::mem::swap(
|
||||
&mut self.scan_transaction(tx_start_index_on_blockchain, hash, &tx)?.0,
|
||||
&mut this_txs_outputs,
|
||||
);
|
||||
res.0.extend(this_txs_outputs);
|
||||
}
|
||||
|
||||
// Update the RingCT starting index for the next TX
|
||||
if matches!(tx, Transaction::V2 { .. }) {
|
||||
tx_start_index_on_blockchain += u64::try_from(tx.prefix().outputs.len()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
// If the block's version is >= 12, drop all unencrypted payment IDs
|
||||
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
||||
// src/wallet/wallet2.cpp#L2739-L2744
|
||||
if block.header.hardfork_version >= 12 {
|
||||
for output in &mut res.0 {
|
||||
if matches!(output.metadata.payment_id, Some(PaymentId::Unencrypted(_))) {
|
||||
output.metadata.payment_id = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
/// A transaction scanner to find outputs received.
|
||||
///
|
||||
/// When an output is successfully scanned, the output key MUST be checked against the local
|
||||
/// database for lack of prior observation. If it was prior observed, that output is an instance
|
||||
/// of the
|
||||
/// [burning bug](https://web.getmonero.org/2018/09/25/a-post-mortum-of-the-burning-bug.html) and
|
||||
/// MAY be unspendable. Only the prior received output(s) or the newly received output will be
|
||||
/// spendable (as spending one will burn all of them).
|
||||
///
|
||||
/// Once checked, the output key MUST be saved to the local database so future checks can be
|
||||
/// performed.
|
||||
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Scanner(InternalScanner);
|
||||
|
||||
impl Scanner {
|
||||
/// Create a Scanner from a ViewPair.
|
||||
pub fn new(pair: ViewPair) -> Self {
|
||||
Self(InternalScanner::new(pair, false))
|
||||
}
|
||||
|
||||
/// Register a subaddress to scan for.
|
||||
///
|
||||
/// Subaddresses must be explicitly registered ahead of time in order to be successfully scanned.
|
||||
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
|
||||
self.0.register_subaddress(subaddress)
|
||||
}
|
||||
|
||||
/// Scan a block.
|
||||
pub async fn scan(&mut self, rpc: &impl Rpc, block: &Block) -> Result<Timelocked, RpcError> {
|
||||
self.0.scan(rpc, block).await
|
||||
}
|
||||
}
|
||||
|
||||
/// A transaction scanner to find outputs received which are guaranteed to be spendable.
|
||||
///
|
||||
/// 'Guaranteed' outputs, or transactions outputs to the burning bug, are not officially specified
|
||||
/// by the Monero project. They should only be used if necessary. No support outside of
|
||||
/// monero-wallet is promised.
|
||||
///
|
||||
/// "guaranteed to be spendable" assumes satisfaction of any timelocks in effect.
|
||||
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct GuaranteedScanner(InternalScanner);
|
||||
|
||||
impl GuaranteedScanner {
|
||||
/// Create a GuaranteedScanner from a GuaranteedViewPair.
|
||||
pub fn new(pair: GuaranteedViewPair) -> Self {
|
||||
Self(InternalScanner::new(pair.0, true))
|
||||
}
|
||||
|
||||
/// Register a subaddress to scan for.
|
||||
///
|
||||
/// Subaddresses must be explicitly registered ahead of time in order to be successfully scanned.
|
||||
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
|
||||
self.0.register_subaddress(subaddress)
|
||||
}
|
||||
|
||||
/// Scan a block.
|
||||
pub async fn scan(&mut self, rpc: &impl Rpc, block: &Block) -> Result<Timelocked, RpcError> {
|
||||
self.0.scan(rpc, block).await
|
||||
}
|
||||
}
|
||||
137
networks/monero/wallet/src/send/eventuality.rs
Normal file
137
networks/monero/wallet/src/send/eventuality.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use std_shims::{vec::Vec, io};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use crate::{
|
||||
ringct::PrunedRctProofs,
|
||||
transaction::{Input, Timelock, Pruned, Transaction},
|
||||
send::SignableTransaction,
|
||||
};
|
||||
|
||||
/// The eventual output of a SignableTransaction.
|
||||
///
|
||||
/// If a SignableTransaction is signed and published on-chain, it will create a Transaction
|
||||
/// identifiable to whoever else has the same SignableTransaction (with the same outgoing view
|
||||
/// key). This structure enables checking if a Transaction is in fact such an output, as it can.
|
||||
///
|
||||
/// Since Monero is a privacy coin without outgoing view keys, this only performs a fuzzy match.
|
||||
/// The fuzzy match executes over the outputs and associated data necessary to work with the
|
||||
/// outputs (the transaction randomness, ciphertexts). This transaction does not check if the
|
||||
/// inputs intended to be spent where actually the inputs spent (as infeasible).
|
||||
///
|
||||
/// The transaction randomness does bind to the inputs intended to be spent, so an on-chain
|
||||
/// transaction will not match for multiple `Eventuality`s unless the `SignableTransaction`s they
|
||||
/// were built from were in conflict (and their intended transactions cannot simultaneously exist
|
||||
/// on-chain).
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Eventuality(SignableTransaction);
|
||||
|
||||
impl From<SignableTransaction> for Eventuality {
|
||||
fn from(tx: SignableTransaction) -> Eventuality {
|
||||
Eventuality(tx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eventuality {
|
||||
/// Return the `extra` field any transaction following this intent would use.
|
||||
///
|
||||
/// This enables building a HashMap of Extra -> Eventuality for efficiently fetching the
|
||||
/// `Eventuality` an on-chain transaction may complete.
|
||||
///
|
||||
/// This extra is cryptographically bound to the inputs intended to be spent. If the
|
||||
/// `SignableTransaction`s the `Eventuality`s are built from are not in conflict (their intended
|
||||
/// transactions can simultaneously exist on-chain), then each extra will only have a single
|
||||
/// Eventuality associated (barring a cryptographic problem considered hard failing).
|
||||
pub fn extra(&self) -> Vec<u8> {
|
||||
self.0.extra()
|
||||
}
|
||||
|
||||
/// Return if this TX matches the SignableTransaction this was created from.
|
||||
///
|
||||
/// Matching the SignableTransaction means this transaction created the expected outputs, they're
|
||||
/// scannable, they're not locked, and this transaction claims to use the intended inputs (though
|
||||
/// this is not guaranteed). This 'claim' is evaluated by this transaction using the transaction
|
||||
/// keys derived from the intended inputs. This ensures two SignableTransactions with the same
|
||||
/// intended payments don't match for each other's `Eventuality`s (as they'll have distinct
|
||||
/// inputs intended).
|
||||
#[must_use]
|
||||
pub fn matches(&self, tx: &Transaction<Pruned>) -> bool {
|
||||
// Verify extra
|
||||
if self.0.extra() != tx.prefix().extra {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Also ensure no timelock was set
|
||||
if tx.prefix().additional_timelock != Timelock::None {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check the amount of inputs aligns
|
||||
if tx.prefix().inputs.len() != self.0.inputs.len() {
|
||||
return false;
|
||||
}
|
||||
// Collect the key images used by this transaction
|
||||
let Ok(key_images) = tx
|
||||
.prefix()
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|input| match input {
|
||||
Input::Gen(_) => Err(()),
|
||||
Input::ToKey { key_image, .. } => Ok(*key_image),
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Check the outputs
|
||||
if self.0.outputs(&key_images) != tx.prefix().outputs {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check the encrypted amounts and commitments
|
||||
let commitments_and_encrypted_amounts = self.0.commitments_and_encrypted_amounts(&key_images);
|
||||
let Transaction::V2 { proofs: Some(PrunedRctProofs { ref base, .. }), .. } = tx else {
|
||||
return false;
|
||||
};
|
||||
if base.commitments !=
|
||||
commitments_and_encrypted_amounts
|
||||
.iter()
|
||||
.map(|(commitment, _)| commitment.calculate())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if base.encrypted_amounts !=
|
||||
commitments_and_encrypted_amounts.into_iter().map(|(_, amount)| amount).collect::<Vec<_>>()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
/// Write the Eventuality.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.0.write(w)
|
||||
}
|
||||
|
||||
/// Serialize the Eventuality to a `Vec<u8>`.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
self.0.serialize()
|
||||
}
|
||||
|
||||
/// Read a Eventuality.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Eventuality> {
|
||||
Ok(Eventuality(SignableTransaction::read(r)?))
|
||||
}
|
||||
}
|
||||
581
networks/monero/wallet/src/send/mod.rs
Normal file
581
networks/monero/wallet/src/send/mod.rs
Normal file
@@ -0,0 +1,581 @@
|
||||
use core::{ops::Deref, fmt};
|
||||
use std_shims::{
|
||||
io, vec,
|
||||
vec::Vec,
|
||||
string::{String, ToString},
|
||||
};
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use rand::seq::SliceRandom;
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::FrostError;
|
||||
|
||||
use crate::{
|
||||
io::*,
|
||||
generators::{MAX_COMMITMENTS, hash_to_point},
|
||||
ringct::{
|
||||
clsag::{ClsagError, ClsagContext, Clsag},
|
||||
RctType, RctPrunable, RctProofs,
|
||||
},
|
||||
transaction::Transaction,
|
||||
address::{Network, SubaddressIndex, MoneroAddress},
|
||||
extra::MAX_ARBITRARY_DATA_SIZE,
|
||||
rpc::FeeRate,
|
||||
ViewPair, GuaranteedViewPair, OutputWithDecoys,
|
||||
};
|
||||
|
||||
mod tx_keys;
|
||||
mod tx;
|
||||
mod eventuality;
|
||||
pub use eventuality::Eventuality;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod multisig;
|
||||
#[cfg(feature = "multisig")]
|
||||
pub use multisig::{TransactionMachine, TransactionSignMachine, TransactionSignatureMachine};
|
||||
|
||||
pub(crate) fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering {
|
||||
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
enum ChangeEnum {
|
||||
AddressOnly(MoneroAddress),
|
||||
Standard { view_pair: ViewPair, subaddress: Option<SubaddressIndex> },
|
||||
Guaranteed { view_pair: GuaranteedViewPair, subaddress: Option<SubaddressIndex> },
|
||||
}
|
||||
|
||||
impl fmt::Debug for ChangeEnum {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
ChangeEnum::AddressOnly(addr) => {
|
||||
f.debug_struct("ChangeEnum::AddressOnly").field("addr", &addr).finish()
|
||||
}
|
||||
ChangeEnum::Standard { subaddress, .. } => f
|
||||
.debug_struct("ChangeEnum::Standard")
|
||||
.field("subaddress", &subaddress)
|
||||
.finish_non_exhaustive(),
|
||||
ChangeEnum::Guaranteed { subaddress, .. } => f
|
||||
.debug_struct("ChangeEnum::Guaranteed")
|
||||
.field("subaddress", &subaddress)
|
||||
.finish_non_exhaustive(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Specification for a change output.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Change(Option<ChangeEnum>);
|
||||
|
||||
impl Change {
|
||||
/// Create a change output specification.
|
||||
///
|
||||
/// This take the view key as Monero assumes it has the view key for change outputs. It optimizes
|
||||
/// its wallet protocol accordingly.
|
||||
pub fn new(view_pair: ViewPair, subaddress: Option<SubaddressIndex>) -> Change {
|
||||
Change(Some(ChangeEnum::Standard { view_pair, subaddress }))
|
||||
}
|
||||
|
||||
/// Create a change output specification for a guaranteed view pair.
|
||||
///
|
||||
/// This take the view key as Monero assumes it has the view key for change outputs. It optimizes
|
||||
/// its wallet protocol accordingly.
|
||||
pub fn guaranteed(view_pair: GuaranteedViewPair, subaddress: Option<SubaddressIndex>) -> Change {
|
||||
Change(Some(ChangeEnum::Guaranteed { view_pair, subaddress }))
|
||||
}
|
||||
|
||||
/// Create a fingerprintable change output specification.
|
||||
///
|
||||
/// You MUST assume this will harm your privacy. Only use this if you know what you're doing.
|
||||
///
|
||||
/// If the change address is Some, this will be unable to optimize the transaction as the
|
||||
/// Monero wallet protocol expects it can (due to presumably having the view key for the change
|
||||
/// output). If a transaction should be optimized, and isn'tm it will be fingerprintable.
|
||||
///
|
||||
/// If the change address is None, there are two fingerprints:
|
||||
///
|
||||
/// 1) The change in the TX is shunted to the fee (making it fingerprintable).
|
||||
///
|
||||
/// 2) If there are two outputs in the TX, Monero would create a payment ID for the non-change
|
||||
/// output so an observer can't tell apart TXs with a payment ID from TXs without a payment
|
||||
/// ID. monero-wallet will simply not create a payment ID in this case, revealing it's a
|
||||
/// monero-wallet TX without change.
|
||||
pub fn fingerprintable(address: Option<MoneroAddress>) -> Change {
|
||||
if let Some(address) = address {
|
||||
Change(Some(ChangeEnum::AddressOnly(address)))
|
||||
} else {
|
||||
Change(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
enum InternalPayment {
|
||||
Payment(MoneroAddress, u64),
|
||||
Change(ChangeEnum),
|
||||
}
|
||||
|
||||
impl InternalPayment {
|
||||
fn address(&self) -> MoneroAddress {
|
||||
match self {
|
||||
InternalPayment::Payment(addr, _) => *addr,
|
||||
InternalPayment::Change(change) => match change {
|
||||
ChangeEnum::AddressOnly(addr) => *addr,
|
||||
// Network::Mainnet as the network won't effect the derivations
|
||||
ChangeEnum::Standard { view_pair, subaddress } => match subaddress {
|
||||
Some(subaddress) => view_pair.subaddress(Network::Mainnet, *subaddress),
|
||||
None => view_pair.legacy_address(Network::Mainnet),
|
||||
},
|
||||
ChangeEnum::Guaranteed { view_pair, subaddress } => {
|
||||
view_pair.address(Network::Mainnet, *subaddress, None)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error while sending Monero.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||
pub enum SendError {
|
||||
/// The RingCT type to produce proofs for this transaction with weren't supported.
|
||||
#[cfg_attr(feature = "std", error("this library doesn't yet support that RctType"))]
|
||||
UnsupportedRctType,
|
||||
/// The transaction had no inputs specified.
|
||||
#[cfg_attr(feature = "std", error("no inputs"))]
|
||||
NoInputs,
|
||||
/// The decoy quantity was invalid for the specified RingCT type.
|
||||
#[cfg_attr(feature = "std", error("invalid number of decoys"))]
|
||||
InvalidDecoyQuantity,
|
||||
/// The transaction had no outputs specified.
|
||||
#[cfg_attr(feature = "std", error("no outputs"))]
|
||||
NoOutputs,
|
||||
/// The transaction had too many outputs specified.
|
||||
#[cfg_attr(feature = "std", error("too many outputs"))]
|
||||
TooManyOutputs,
|
||||
/// The transaction did not have a change output, and did not have two outputs.
|
||||
///
|
||||
/// Monero requires all transactions have at least two outputs, assuming one payment and one
|
||||
/// change (or at least one dummy and one change). Accordingly, specifying no change and only
|
||||
/// one payment prevents creating a valid transaction
|
||||
#[cfg_attr(feature = "std", error("only one output and no change address"))]
|
||||
NoChange,
|
||||
/// Multiple addresses had payment IDs specified.
|
||||
///
|
||||
/// Only one payment ID is allowed per transaction.
|
||||
#[cfg_attr(feature = "std", error("multiple addresses with payment IDs"))]
|
||||
MultiplePaymentIds,
|
||||
/// Too much arbitrary data was specified.
|
||||
#[cfg_attr(feature = "std", error("too much data"))]
|
||||
TooMuchArbitraryData,
|
||||
/// The created transaction was too large.
|
||||
#[cfg_attr(feature = "std", error("too large of a transaction"))]
|
||||
TooLargeTransaction,
|
||||
/// This transaction could not pay for itself.
|
||||
#[cfg_attr(
|
||||
feature = "std",
|
||||
error(
|
||||
"not enough funds (inputs {inputs}, outputs {outputs}, necessary_fee {necessary_fee:?})"
|
||||
)
|
||||
)]
|
||||
NotEnoughFunds {
|
||||
/// The amount of funds the inputs contributed.
|
||||
inputs: u64,
|
||||
/// The amount of funds the outputs required.
|
||||
outputs: u64,
|
||||
/// The fee necessary to be paid on top.
|
||||
///
|
||||
/// If this is None, it is because the fee was not calculated as the outputs alone caused this
|
||||
/// error.
|
||||
necessary_fee: Option<u64>,
|
||||
},
|
||||
/// This transaction is being signed with the wrong private key.
|
||||
#[cfg_attr(feature = "std", error("wrong spend private key"))]
|
||||
WrongPrivateKey,
|
||||
/// This transaction was read from a bytestream which was malicious.
|
||||
#[cfg_attr(
|
||||
feature = "std",
|
||||
error("this SignableTransaction was created by deserializing a malicious serialization")
|
||||
)]
|
||||
MaliciousSerialization,
|
||||
/// There was an error when working with the CLSAGs.
|
||||
#[cfg_attr(feature = "std", error("clsag error ({0})"))]
|
||||
ClsagError(ClsagError),
|
||||
/// There was an error when working with FROST.
|
||||
#[cfg(feature = "multisig")]
|
||||
#[cfg_attr(feature = "std", error("frost error {0}"))]
|
||||
FrostError(FrostError),
|
||||
}
|
||||
|
||||
/// A signable transaction.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct SignableTransaction {
|
||||
rct_type: RctType,
|
||||
outgoing_view_key: Zeroizing<[u8; 32]>,
|
||||
inputs: Vec<OutputWithDecoys>,
|
||||
payments: Vec<InternalPayment>,
|
||||
data: Vec<Vec<u8>>,
|
||||
fee_rate: FeeRate,
|
||||
}
|
||||
|
||||
struct SignableTransactionWithKeyImages {
|
||||
intent: SignableTransaction,
|
||||
key_images: Vec<EdwardsPoint>,
|
||||
}
|
||||
|
||||
impl SignableTransaction {
|
||||
fn validate(&self) -> Result<(), SendError> {
|
||||
match self.rct_type {
|
||||
RctType::ClsagBulletproof | RctType::ClsagBulletproofPlus => {}
|
||||
_ => Err(SendError::UnsupportedRctType)?,
|
||||
}
|
||||
|
||||
if self.inputs.is_empty() {
|
||||
Err(SendError::NoInputs)?;
|
||||
}
|
||||
for input in &self.inputs {
|
||||
if input.decoys().len() !=
|
||||
match self.rct_type {
|
||||
RctType::ClsagBulletproof => 11,
|
||||
RctType::ClsagBulletproofPlus => 16,
|
||||
_ => panic!("unsupported RctType"),
|
||||
}
|
||||
{
|
||||
Err(SendError::InvalidDecoyQuantity)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Check we have at least one non-change output
|
||||
if !self.payments.iter().any(|payment| matches!(payment, InternalPayment::Payment(_, _))) {
|
||||
Err(SendError::NoOutputs)?;
|
||||
}
|
||||
// If we don't have at least two outputs, as required by Monero, error
|
||||
if self.payments.len() < 2 {
|
||||
Err(SendError::NoChange)?;
|
||||
}
|
||||
// Check we don't have multiple Change outputs due to decoding a malicious serialization
|
||||
{
|
||||
let mut change_count = 0;
|
||||
for payment in &self.payments {
|
||||
change_count += usize::from(u8::from(matches!(payment, InternalPayment::Change(_))));
|
||||
}
|
||||
if change_count > 1 {
|
||||
Err(SendError::MaliciousSerialization)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure there's at most one payment ID
|
||||
{
|
||||
let mut payment_ids = 0;
|
||||
for payment in &self.payments {
|
||||
payment_ids += usize::from(u8::from(payment.address().payment_id().is_some()));
|
||||
}
|
||||
if payment_ids > 1 {
|
||||
Err(SendError::MultiplePaymentIds)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.payments.len() > MAX_COMMITMENTS {
|
||||
Err(SendError::TooManyOutputs)?;
|
||||
}
|
||||
|
||||
// Check the length of each arbitrary data
|
||||
for part in &self.data {
|
||||
if part.len() > MAX_ARBITRARY_DATA_SIZE {
|
||||
Err(SendError::TooMuchArbitraryData)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Check the length of TX extra
|
||||
// https://github.com/monero-project/monero/pull/8733
|
||||
const MAX_EXTRA_SIZE: usize = 1060;
|
||||
if self.extra().len() > MAX_EXTRA_SIZE {
|
||||
Err(SendError::TooMuchArbitraryData)?;
|
||||
}
|
||||
|
||||
// Make sure we have enough funds
|
||||
let in_amount = self.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
||||
let payments_amount = self
|
||||
.payments
|
||||
.iter()
|
||||
.filter_map(|payment| match payment {
|
||||
InternalPayment::Payment(_, amount) => Some(amount),
|
||||
InternalPayment::Change(_) => None,
|
||||
})
|
||||
.sum::<u64>();
|
||||
let (weight, necessary_fee) = self.weight_and_necessary_fee();
|
||||
if in_amount < (payments_amount + necessary_fee) {
|
||||
Err(SendError::NotEnoughFunds {
|
||||
inputs: in_amount,
|
||||
outputs: payments_amount,
|
||||
necessary_fee: Some(necessary_fee),
|
||||
})?;
|
||||
}
|
||||
|
||||
// The limit is half the no-penalty block size
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/wallet/wallet2.cpp#L110766-L11085
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/cryptonote_config.h#L61
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/cryptonote_config.h#L64
|
||||
const MAX_TX_SIZE: usize = (300_000 / 2) - 600;
|
||||
if weight >= MAX_TX_SIZE {
|
||||
Err(SendError::TooLargeTransaction)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a new SignableTransaction.
|
||||
///
|
||||
/// `outgoing_view_key` is used to seed the RNGs for this transaction. Anyone with knowledge of
|
||||
/// the outgoing view key will be able to identify a transaction produced with this methodology,
|
||||
/// and the data within it. Accordingly, it must be treated as a private key.
|
||||
///
|
||||
/// `data` represents arbitrary data which will be embedded into the transaction's `extra` field.
|
||||
/// The embedding occurs using an `ExtraField::Nonce` with a custom marker byte (as to not
|
||||
/// conflict with a payment ID).
|
||||
pub fn new(
|
||||
rct_type: RctType,
|
||||
outgoing_view_key: Zeroizing<[u8; 32]>,
|
||||
inputs: Vec<OutputWithDecoys>,
|
||||
payments: Vec<(MoneroAddress, u64)>,
|
||||
change: Change,
|
||||
data: Vec<Vec<u8>>,
|
||||
fee_rate: FeeRate,
|
||||
) -> Result<SignableTransaction, SendError> {
|
||||
// Re-format the payments and change into a consolidated payments list
|
||||
let mut payments = payments
|
||||
.into_iter()
|
||||
.map(|(addr, amount)| InternalPayment::Payment(addr, amount))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if let Some(change) = change.0 {
|
||||
payments.push(InternalPayment::Change(change));
|
||||
}
|
||||
|
||||
let mut res =
|
||||
SignableTransaction { rct_type, outgoing_view_key, inputs, payments, data, fee_rate };
|
||||
res.validate()?;
|
||||
|
||||
// Shuffle the payments
|
||||
{
|
||||
let mut rng = res.seeded_rng(b"shuffle_payments");
|
||||
res.payments.shuffle(&mut rng);
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// The fee rate this transaction uses.
|
||||
pub fn fee_rate(&self) -> FeeRate {
|
||||
self.fee_rate
|
||||
}
|
||||
|
||||
/// The fee this transaction requires.
|
||||
///
|
||||
/// This is distinct from the fee this transaction will use. If no change output is specified,
|
||||
/// all unspent coins will be shunted to the fee.
|
||||
pub fn necessary_fee(&self) -> u64 {
|
||||
self.weight_and_necessary_fee().1
|
||||
}
|
||||
|
||||
/// Write a SignableTransaction.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
fn write_payment<W: io::Write>(payment: &InternalPayment, w: &mut W) -> io::Result<()> {
|
||||
match payment {
|
||||
InternalPayment::Payment(addr, amount) => {
|
||||
w.write_all(&[0])?;
|
||||
write_vec(write_byte, addr.to_string().as_bytes(), w)?;
|
||||
w.write_all(&amount.to_le_bytes())
|
||||
}
|
||||
InternalPayment::Change(change) => match change {
|
||||
ChangeEnum::AddressOnly(addr) => {
|
||||
w.write_all(&[1])?;
|
||||
write_vec(write_byte, addr.to_string().as_bytes(), w)
|
||||
}
|
||||
ChangeEnum::Standard { view_pair, subaddress } => {
|
||||
w.write_all(&[2])?;
|
||||
write_point(&view_pair.spend(), w)?;
|
||||
write_scalar(&view_pair.view, w)?;
|
||||
if let Some(subaddress) = subaddress {
|
||||
w.write_all(&subaddress.account().to_le_bytes())?;
|
||||
w.write_all(&subaddress.address().to_le_bytes())
|
||||
} else {
|
||||
w.write_all(&0u32.to_le_bytes())?;
|
||||
w.write_all(&0u32.to_le_bytes())
|
||||
}
|
||||
}
|
||||
ChangeEnum::Guaranteed { view_pair, subaddress } => {
|
||||
w.write_all(&[3])?;
|
||||
write_point(&view_pair.spend(), w)?;
|
||||
write_scalar(&view_pair.0.view, w)?;
|
||||
if let Some(subaddress) = subaddress {
|
||||
w.write_all(&subaddress.account().to_le_bytes())?;
|
||||
w.write_all(&subaddress.address().to_le_bytes())
|
||||
} else {
|
||||
w.write_all(&0u32.to_le_bytes())?;
|
||||
w.write_all(&0u32.to_le_bytes())
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
write_byte(&u8::from(self.rct_type), w)?;
|
||||
w.write_all(self.outgoing_view_key.as_slice())?;
|
||||
write_vec(OutputWithDecoys::write, &self.inputs, w)?;
|
||||
write_vec(write_payment, &self.payments, w)?;
|
||||
write_vec(|data, w| write_vec(write_byte, data, w), &self.data, w)?;
|
||||
self.fee_rate.write(w)
|
||||
}
|
||||
|
||||
/// Serialize the SignableTransaction to a `Vec<u8>`.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = Vec::with_capacity(256);
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
|
||||
/// Read a `SignableTransaction`.
|
||||
///
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn read<R: io::Read>(r: &mut R) -> io::Result<SignableTransaction> {
|
||||
fn read_address<R: io::Read>(r: &mut R) -> io::Result<MoneroAddress> {
|
||||
String::from_utf8(read_vec(read_byte, r)?)
|
||||
.ok()
|
||||
.and_then(|str| MoneroAddress::from_str_with_unchecked_network(&str).ok())
|
||||
.ok_or_else(|| io::Error::other("invalid address"))
|
||||
}
|
||||
|
||||
fn read_payment<R: io::Read>(r: &mut R) -> io::Result<InternalPayment> {
|
||||
Ok(match read_byte(r)? {
|
||||
0 => InternalPayment::Payment(read_address(r)?, read_u64(r)?),
|
||||
1 => InternalPayment::Change(ChangeEnum::AddressOnly(read_address(r)?)),
|
||||
2 => InternalPayment::Change(ChangeEnum::Standard {
|
||||
view_pair: ViewPair::new(read_point(r)?, Zeroizing::new(read_scalar(r)?))
|
||||
.map_err(io::Error::other)?,
|
||||
subaddress: SubaddressIndex::new(read_u32(r)?, read_u32(r)?),
|
||||
}),
|
||||
3 => InternalPayment::Change(ChangeEnum::Guaranteed {
|
||||
view_pair: GuaranteedViewPair::new(read_point(r)?, Zeroizing::new(read_scalar(r)?))
|
||||
.map_err(io::Error::other)?,
|
||||
subaddress: SubaddressIndex::new(read_u32(r)?, read_u32(r)?),
|
||||
}),
|
||||
_ => Err(io::Error::other("invalid payment"))?,
|
||||
})
|
||||
}
|
||||
|
||||
let res = SignableTransaction {
|
||||
rct_type: RctType::try_from(read_byte(r)?)
|
||||
.map_err(|()| io::Error::other("unsupported/invalid RctType"))?,
|
||||
outgoing_view_key: Zeroizing::new(read_bytes(r)?),
|
||||
inputs: read_vec(OutputWithDecoys::read, r)?,
|
||||
payments: read_vec(read_payment, r)?,
|
||||
data: read_vec(|r| read_vec(read_byte, r), r)?,
|
||||
fee_rate: FeeRate::read(r)?,
|
||||
};
|
||||
match res.validate() {
|
||||
Ok(()) => {}
|
||||
Err(e) => Err(io::Error::other(e))?,
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn with_key_images(mut self, key_images: Vec<EdwardsPoint>) -> SignableTransactionWithKeyImages {
|
||||
debug_assert_eq!(self.inputs.len(), key_images.len());
|
||||
|
||||
// Sort the inputs by their key images
|
||||
let mut sorted_inputs = self.inputs.into_iter().zip(key_images).collect::<Vec<_>>();
|
||||
sorted_inputs
|
||||
.sort_by(|(_, key_image_a), (_, key_image_b)| key_image_sort(key_image_a, key_image_b));
|
||||
|
||||
self.inputs = Vec::with_capacity(sorted_inputs.len());
|
||||
let mut key_images = Vec::with_capacity(sorted_inputs.len());
|
||||
for (input, key_image) in sorted_inputs {
|
||||
self.inputs.push(input);
|
||||
key_images.push(key_image);
|
||||
}
|
||||
|
||||
SignableTransactionWithKeyImages { intent: self, key_images }
|
||||
}
|
||||
|
||||
/// Sign this transaction.
|
||||
pub fn sign(
|
||||
self,
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
sender_spend_key: &Zeroizing<Scalar>,
|
||||
) -> Result<Transaction, SendError> {
|
||||
// Calculate the key images
|
||||
let mut key_images = vec![];
|
||||
for input in &self.inputs {
|
||||
let input_key = Zeroizing::new(sender_spend_key.deref() + input.key_offset());
|
||||
if (input_key.deref() * ED25519_BASEPOINT_TABLE) != input.key() {
|
||||
Err(SendError::WrongPrivateKey)?;
|
||||
}
|
||||
let key_image = input_key.deref() * hash_to_point(input.key().compress().to_bytes());
|
||||
key_images.push(key_image);
|
||||
}
|
||||
|
||||
// Convert to a SignableTransactionWithKeyImages
|
||||
let tx = self.with_key_images(key_images);
|
||||
|
||||
// Prepare the CLSAG signatures
|
||||
let mut clsag_signs = Vec::with_capacity(tx.intent.inputs.len());
|
||||
for input in &tx.intent.inputs {
|
||||
// Re-derive the input key as this will be in a different order
|
||||
let input_key = Zeroizing::new(sender_spend_key.deref() + input.key_offset());
|
||||
clsag_signs.push((
|
||||
input_key,
|
||||
ClsagContext::new(input.decoys().clone(), input.commitment().clone())
|
||||
.map_err(SendError::ClsagError)?,
|
||||
));
|
||||
}
|
||||
|
||||
// Get the output commitments' mask sum
|
||||
let mask_sum = tx.intent.sum_output_masks(&tx.key_images);
|
||||
|
||||
// Get the actual TX, just needing the CLSAGs
|
||||
let mut tx = tx.transaction_without_signatures();
|
||||
|
||||
// Sign the CLSAGs
|
||||
let clsags_and_pseudo_outs =
|
||||
Clsag::sign(rng, clsag_signs, mask_sum, tx.signature_hash().unwrap())
|
||||
.map_err(SendError::ClsagError)?;
|
||||
|
||||
// Fill in the CLSAGs/pseudo-outs
|
||||
let inputs_len = tx.prefix().inputs.len();
|
||||
let Transaction::V2 {
|
||||
proofs:
|
||||
Some(RctProofs {
|
||||
prunable: RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. },
|
||||
..
|
||||
}),
|
||||
..
|
||||
} = tx
|
||||
else {
|
||||
panic!("not signing clsag?")
|
||||
};
|
||||
*clsags = Vec::with_capacity(inputs_len);
|
||||
*pseudo_outs = Vec::with_capacity(inputs_len);
|
||||
for (clsag, pseudo_out) in clsags_and_pseudo_outs {
|
||||
clsags.push(clsag);
|
||||
pseudo_outs.push(pseudo_out);
|
||||
}
|
||||
|
||||
// Return the signed TX
|
||||
Ok(tx)
|
||||
}
|
||||
}
|
||||
304
networks/monero/wallet/src/send/multisig.rs
Normal file
304
networks/monero/wallet/src/send/multisig.rs
Normal file
@@ -0,0 +1,304 @@
|
||||
use std_shims::{
|
||||
vec::Vec,
|
||||
io::{self, Read},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use group::ff::Field;
|
||||
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
||||
use dalek_ff_group as dfg;
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use frost::{
|
||||
curve::Ed25519,
|
||||
Participant, FrostError, ThresholdKeys,
|
||||
dkg::lagrange,
|
||||
sign::{
|
||||
Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine,
|
||||
AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
|
||||
},
|
||||
};
|
||||
|
||||
use monero_serai::{
|
||||
ringct::{
|
||||
clsag::{ClsagContext, ClsagMultisigMaskSender, ClsagAddendum, ClsagMultisig},
|
||||
RctPrunable, RctProofs,
|
||||
},
|
||||
transaction::Transaction,
|
||||
};
|
||||
use crate::send::{SendError, SignableTransaction, key_image_sort};
|
||||
|
||||
/// Initial FROST machine to produce a signed transaction.
|
||||
pub struct TransactionMachine {
|
||||
signable: SignableTransaction,
|
||||
|
||||
i: Participant,
|
||||
|
||||
// The key image generator, and the scalar offset from the spend key
|
||||
key_image_generators_and_offsets: Vec<(EdwardsPoint, Scalar)>,
|
||||
clsags: Vec<(ClsagMultisigMaskSender, AlgorithmMachine<Ed25519, ClsagMultisig>)>,
|
||||
}
|
||||
|
||||
/// Second FROST machine to produce a signed transaction.
|
||||
pub struct TransactionSignMachine {
|
||||
signable: SignableTransaction,
|
||||
|
||||
i: Participant,
|
||||
|
||||
key_image_generators_and_offsets: Vec<(EdwardsPoint, Scalar)>,
|
||||
clsags: Vec<(ClsagMultisigMaskSender, AlgorithmSignMachine<Ed25519, ClsagMultisig>)>,
|
||||
|
||||
our_preprocess: Vec<Preprocess<Ed25519, ClsagAddendum>>,
|
||||
}
|
||||
|
||||
/// Final FROST machine to produce a signed transaction.
|
||||
pub struct TransactionSignatureMachine {
|
||||
tx: Transaction,
|
||||
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
|
||||
}
|
||||
|
||||
impl SignableTransaction {
|
||||
/// Create a FROST signing machine out of this signable transaction.
|
||||
pub fn multisig(self, keys: &ThresholdKeys<Ed25519>) -> Result<TransactionMachine, SendError> {
|
||||
let mut clsags = vec![];
|
||||
|
||||
let mut key_image_generators_and_offsets = vec![];
|
||||
for input in &self.inputs {
|
||||
// Check this is the right set of keys
|
||||
let offset = keys.offset(dfg::Scalar(input.key_offset()));
|
||||
if offset.group_key().0 != input.key() {
|
||||
Err(SendError::WrongPrivateKey)?;
|
||||
}
|
||||
|
||||
let context = ClsagContext::new(input.decoys().clone(), input.commitment().clone())
|
||||
.map_err(SendError::ClsagError)?;
|
||||
let (clsag, clsag_mask_send) = ClsagMultisig::new(
|
||||
RecommendedTranscript::new(b"Monero Multisignature Transaction"),
|
||||
context,
|
||||
);
|
||||
key_image_generators_and_offsets.push((
|
||||
clsag.key_image_generator(),
|
||||
keys.current_offset().unwrap_or(dfg::Scalar::ZERO).0 + input.key_offset(),
|
||||
));
|
||||
clsags.push((clsag_mask_send, AlgorithmMachine::new(clsag, offset)));
|
||||
}
|
||||
|
||||
Ok(TransactionMachine {
|
||||
signable: self,
|
||||
i: keys.params().i(),
|
||||
key_image_generators_and_offsets,
|
||||
clsags,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl PreprocessMachine for TransactionMachine {
|
||||
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
|
||||
type Signature = Transaction;
|
||||
type SignMachine = TransactionSignMachine;
|
||||
|
||||
fn preprocess<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
) -> (TransactionSignMachine, Self::Preprocess) {
|
||||
// Iterate over each CLSAG calling preprocess
|
||||
let mut preprocesses = Vec::with_capacity(self.clsags.len());
|
||||
let clsags = self
|
||||
.clsags
|
||||
.drain(..)
|
||||
.map(|(clsag_mask_send, clsag)| {
|
||||
let (clsag, preprocess) = clsag.preprocess(rng);
|
||||
preprocesses.push(preprocess);
|
||||
(clsag_mask_send, clsag)
|
||||
})
|
||||
.collect();
|
||||
let our_preprocess = preprocesses.clone();
|
||||
|
||||
(
|
||||
TransactionSignMachine {
|
||||
signable: self.signable,
|
||||
|
||||
i: self.i,
|
||||
|
||||
key_image_generators_and_offsets: self.key_image_generators_and_offsets,
|
||||
clsags,
|
||||
|
||||
our_preprocess,
|
||||
},
|
||||
preprocesses,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl SignMachine<Transaction> for TransactionSignMachine {
|
||||
type Params = ();
|
||||
type Keys = ThresholdKeys<Ed25519>;
|
||||
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
|
||||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
||||
type SignatureMachine = TransactionSignatureMachine;
|
||||
|
||||
fn cache(self) -> CachedPreprocess {
|
||||
unimplemented!(
|
||||
"Monero transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn from_cache(
|
||||
(): (),
|
||||
_: ThresholdKeys<Ed25519>,
|
||||
_: CachedPreprocess,
|
||||
) -> (Self, Self::Preprocess) {
|
||||
unimplemented!(
|
||||
"Monero transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||
self.clsags.iter().map(|clsag| clsag.1.read_preprocess(reader)).collect()
|
||||
}
|
||||
|
||||
fn sign(
|
||||
self,
|
||||
mut commitments: HashMap<Participant, Self::Preprocess>,
|
||||
msg: &[u8],
|
||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
||||
if !msg.is_empty() {
|
||||
panic!("message was passed to the TransactionMachine when it generates its own");
|
||||
}
|
||||
|
||||
// We do not need to be included here, yet this set of signers has yet to be validated
|
||||
// We explicitly remove ourselves to ensure we aren't included twice, if we were redundantly
|
||||
// included
|
||||
commitments.remove(&self.i);
|
||||
|
||||
// Find out who's included
|
||||
let mut included = commitments.keys().copied().collect::<Vec<_>>();
|
||||
// This push won't duplicate due to the above removal
|
||||
included.push(self.i);
|
||||
// unstable sort may reorder elements of equal order
|
||||
// Given our lack of duplicates, we should have no elements of equal order
|
||||
included.sort_unstable();
|
||||
|
||||
// Start calculating the key images, as needed on the TX level
|
||||
let mut key_images = vec![EdwardsPoint::identity(); self.clsags.len()];
|
||||
for (image, (generator, offset)) in
|
||||
key_images.iter_mut().zip(&self.key_image_generators_and_offsets)
|
||||
{
|
||||
*image = generator * offset;
|
||||
}
|
||||
|
||||
// Convert the serialized nonces commitments to a parallelized Vec
|
||||
let mut commitments = (0 .. self.clsags.len())
|
||||
.map(|c| {
|
||||
included
|
||||
.iter()
|
||||
.map(|l| {
|
||||
let preprocess = if *l == self.i {
|
||||
self.our_preprocess[c].clone()
|
||||
} else {
|
||||
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone()
|
||||
};
|
||||
|
||||
// While here, calculate the key image as needed to call sign
|
||||
// The CLSAG algorithm will independently calculate the key image/verify these shares
|
||||
key_images[c] +=
|
||||
preprocess.addendum.key_image_share().0 * lagrange::<dfg::Scalar>(*l, &included).0;
|
||||
|
||||
Ok((*l, preprocess))
|
||||
})
|
||||
.collect::<Result<HashMap<_, _>, _>>()
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// The above inserted our own preprocess into these maps (which is unnecessary)
|
||||
// Remove it now
|
||||
for map in &mut commitments {
|
||||
map.remove(&self.i);
|
||||
}
|
||||
|
||||
// The actual TX will have sorted its inputs by key image
|
||||
// We apply the same sort now to our CLSAG machines
|
||||
let mut clsags = Vec::with_capacity(self.clsags.len());
|
||||
for ((key_image, clsag), commitments) in key_images.iter().zip(self.clsags).zip(commitments) {
|
||||
clsags.push((key_image, clsag, commitments));
|
||||
}
|
||||
clsags.sort_by(|x, y| key_image_sort(x.0, y.0));
|
||||
let clsags =
|
||||
clsags.into_iter().map(|(_, clsag, commitments)| (clsag, commitments)).collect::<Vec<_>>();
|
||||
|
||||
// Specify the TX's key images
|
||||
let tx = self.signable.with_key_images(key_images);
|
||||
|
||||
// We now need to decide the masks for each CLSAG
|
||||
let clsag_len = clsags.len();
|
||||
let output_masks = tx.intent.sum_output_masks(&tx.key_images);
|
||||
let mut rng = tx.intent.seeded_rng(b"multisig_pseudo_out_masks");
|
||||
let mut sum_pseudo_outs = Scalar::ZERO;
|
||||
let mut to_sign = Vec::with_capacity(clsag_len);
|
||||
for (i, ((clsag_mask_send, clsag), commitments)) in clsags.into_iter().enumerate() {
|
||||
let mut mask = Scalar::random(&mut rng);
|
||||
if i == (clsag_len - 1) {
|
||||
mask = output_masks - sum_pseudo_outs;
|
||||
} else {
|
||||
sum_pseudo_outs += mask;
|
||||
}
|
||||
clsag_mask_send.send(mask);
|
||||
to_sign.push((clsag, commitments));
|
||||
}
|
||||
|
||||
let tx = tx.transaction_without_signatures();
|
||||
let msg = tx.signature_hash().unwrap();
|
||||
|
||||
// Iterate over each CLSAG calling sign
|
||||
let mut shares = Vec::with_capacity(to_sign.len());
|
||||
let clsags = to_sign
|
||||
.drain(..)
|
||||
.map(|(clsag, commitments)| {
|
||||
let (clsag, share) = clsag.sign(commitments, &msg)?;
|
||||
shares.push(share);
|
||||
Ok(clsag)
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
Ok((TransactionSignatureMachine { tx, clsags }, shares))
|
||||
}
|
||||
}
|
||||
|
||||
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
||||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
||||
|
||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
||||
self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect()
|
||||
}
|
||||
|
||||
fn complete(
|
||||
mut self,
|
||||
shares: HashMap<Participant, Self::SignatureShare>,
|
||||
) -> Result<Transaction, FrostError> {
|
||||
let mut tx = self.tx;
|
||||
match tx {
|
||||
Transaction::V2 {
|
||||
proofs:
|
||||
Some(RctProofs {
|
||||
prunable: RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. },
|
||||
..
|
||||
}),
|
||||
..
|
||||
} => {
|
||||
for (c, clsag) in self.clsags.drain(..).enumerate() {
|
||||
let (clsag, pseudo_out) = clsag.complete(
|
||||
shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::<HashMap<_, _>>(),
|
||||
)?;
|
||||
clsags.push(clsag);
|
||||
pseudo_outs.push(pseudo_out);
|
||||
}
|
||||
}
|
||||
_ => unreachable!("attempted to sign a multisig TX which wasn't CLSAG"),
|
||||
}
|
||||
Ok(tx)
|
||||
}
|
||||
}
|
||||
323
networks/monero/wallet/src/send/tx.rs
Normal file
323
networks/monero/wallet/src/send/tx.rs
Normal file
@@ -0,0 +1,323 @@
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE},
|
||||
Scalar, EdwardsPoint,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
io::{varint_len, write_varint},
|
||||
primitives::Commitment,
|
||||
ringct::{
|
||||
clsag::Clsag, bulletproofs::Bulletproof, EncryptedAmount, RctType, RctBase, RctPrunable,
|
||||
RctProofs,
|
||||
},
|
||||
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
|
||||
extra::{ARBITRARY_DATA_MARKER, PaymentId, Extra},
|
||||
send::{InternalPayment, SignableTransaction, SignableTransactionWithKeyImages},
|
||||
};
|
||||
|
||||
impl SignableTransaction {
|
||||
// Output the inputs for this transaction.
|
||||
pub(crate) fn inputs(&self, key_images: &[EdwardsPoint]) -> Vec<Input> {
|
||||
debug_assert_eq!(self.inputs.len(), key_images.len());
|
||||
|
||||
let mut res = Vec::with_capacity(self.inputs.len());
|
||||
for (input, key_image) in self.inputs.iter().zip(key_images) {
|
||||
res.push(Input::ToKey {
|
||||
amount: None,
|
||||
key_offsets: input.decoys().offsets().to_vec(),
|
||||
key_image: *key_image,
|
||||
});
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Output the outputs for this transaction.
|
||||
pub(crate) fn outputs(&self, key_images: &[EdwardsPoint]) -> Vec<Output> {
|
||||
let shared_key_derivations = self.shared_key_derivations(key_images);
|
||||
debug_assert_eq!(self.payments.len(), shared_key_derivations.len());
|
||||
|
||||
let mut res = Vec::with_capacity(self.payments.len());
|
||||
for (payment, shared_key_derivations) in self.payments.iter().zip(&shared_key_derivations) {
|
||||
let key =
|
||||
(&shared_key_derivations.shared_key * ED25519_BASEPOINT_TABLE) + payment.address().spend();
|
||||
res.push(Output {
|
||||
key: key.compress(),
|
||||
amount: None,
|
||||
view_tag: (match self.rct_type {
|
||||
RctType::ClsagBulletproof => false,
|
||||
RctType::ClsagBulletproofPlus => true,
|
||||
_ => panic!("unsupported RctType"),
|
||||
})
|
||||
.then_some(shared_key_derivations.view_tag),
|
||||
});
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Calculate the TX extra for this transaction.
|
||||
pub(crate) fn extra(&self) -> Vec<u8> {
|
||||
let (tx_key, additional_keys) = self.transaction_keys_pub();
|
||||
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
|
||||
let payment_id_xors = self.payment_id_xors();
|
||||
debug_assert_eq!(self.payments.len(), payment_id_xors.len());
|
||||
|
||||
let amount_of_keys = 1 + additional_keys.len();
|
||||
let mut extra = Extra::new(tx_key, additional_keys);
|
||||
|
||||
if let Some((id, id_xor)) =
|
||||
self.payments.iter().zip(&payment_id_xors).find_map(|(payment, payment_id_xor)| {
|
||||
payment.address().payment_id().map(|id| (id, payment_id_xor))
|
||||
})
|
||||
{
|
||||
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
|
||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
||||
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
|
||||
extra.push_nonce(id_vec);
|
||||
} else {
|
||||
// If there's no payment ID, we push a dummy (as wallet2 does) if there's only one payment
|
||||
if (self.payments.len() == 2) &&
|
||||
self.payments.iter().any(|payment| matches!(payment, InternalPayment::Change(_)))
|
||||
{
|
||||
let (_, payment_id_xor) = self
|
||||
.payments
|
||||
.iter()
|
||||
.zip(&payment_id_xors)
|
||||
.find(|(payment, _)| matches!(payment, InternalPayment::Payment(_, _)))
|
||||
.expect("multiple change outputs?");
|
||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
||||
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
|
||||
PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap();
|
||||
extra.push_nonce(id_vec);
|
||||
}
|
||||
}
|
||||
|
||||
// Include data if present
|
||||
for part in &self.data {
|
||||
let mut arb = vec![ARBITRARY_DATA_MARKER];
|
||||
arb.extend(part);
|
||||
extra.push_nonce(arb);
|
||||
}
|
||||
|
||||
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
|
||||
extra.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub(crate) fn weight_and_necessary_fee(&self) -> (usize, u64) {
|
||||
/*
|
||||
This transaction is variable length to:
|
||||
- The decoy offsets (fixed)
|
||||
- The TX extra (variable to key images, requiring an interactive protocol)
|
||||
|
||||
Thankfully, the TX extra *length* is fixed. Accordingly, we can calculate the inevitable TX's
|
||||
weight at this time with a shimmed transaction.
|
||||
*/
|
||||
let base_weight = {
|
||||
let mut key_images = Vec::with_capacity(self.inputs.len());
|
||||
let mut clsags = Vec::with_capacity(self.inputs.len());
|
||||
let mut pseudo_outs = Vec::with_capacity(self.inputs.len());
|
||||
for _ in &self.inputs {
|
||||
key_images.push(ED25519_BASEPOINT_POINT);
|
||||
clsags.push(Clsag {
|
||||
D: ED25519_BASEPOINT_POINT,
|
||||
s: vec![
|
||||
Scalar::ZERO;
|
||||
match self.rct_type {
|
||||
RctType::ClsagBulletproof => 11,
|
||||
RctType::ClsagBulletproofPlus => 16,
|
||||
_ => unreachable!("unsupported RCT type"),
|
||||
}
|
||||
],
|
||||
c1: Scalar::ZERO,
|
||||
});
|
||||
pseudo_outs.push(ED25519_BASEPOINT_POINT);
|
||||
}
|
||||
let mut encrypted_amounts = Vec::with_capacity(self.payments.len());
|
||||
let mut bp_commitments = Vec::with_capacity(self.payments.len());
|
||||
let mut commitments = Vec::with_capacity(self.payments.len());
|
||||
for _ in &self.payments {
|
||||
encrypted_amounts.push(EncryptedAmount::Compact { amount: [0; 8] });
|
||||
bp_commitments.push(Commitment::zero());
|
||||
commitments.push(ED25519_BASEPOINT_POINT);
|
||||
}
|
||||
|
||||
let padded_log2 = {
|
||||
let mut log2_find = 0;
|
||||
while (1 << log2_find) < self.payments.len() {
|
||||
log2_find += 1;
|
||||
}
|
||||
log2_find
|
||||
};
|
||||
// This is log2 the padded amount of IPA rows
|
||||
// We have 64 rows per commitment, so we need 64 * c IPA rows
|
||||
// We rewrite this as 2**6 * c
|
||||
// By finding the padded log2 of c, we get 2**6 * 2**p
|
||||
// This declares the log2 to be 6 + p
|
||||
let lr_len = 6 + padded_log2;
|
||||
|
||||
let bulletproof = match self.rct_type {
|
||||
RctType::ClsagBulletproof => {
|
||||
let mut bp = Vec::with_capacity(((9 + (2 * lr_len)) * 32) + 2);
|
||||
let push_point = |bp: &mut Vec<u8>| {
|
||||
bp.push(1);
|
||||
bp.extend([0; 31]);
|
||||
};
|
||||
let push_scalar = |bp: &mut Vec<u8>| bp.extend([0; 32]);
|
||||
for _ in 0 .. 4 {
|
||||
push_point(&mut bp);
|
||||
}
|
||||
for _ in 0 .. 2 {
|
||||
push_scalar(&mut bp);
|
||||
}
|
||||
for _ in 0 .. 2 {
|
||||
write_varint(&lr_len, &mut bp).unwrap();
|
||||
for _ in 0 .. lr_len {
|
||||
push_point(&mut bp);
|
||||
}
|
||||
}
|
||||
for _ in 0 .. 3 {
|
||||
push_scalar(&mut bp);
|
||||
}
|
||||
Bulletproof::read(&mut bp.as_slice()).expect("made an invalid dummy BP")
|
||||
}
|
||||
RctType::ClsagBulletproofPlus => {
|
||||
let mut bp = Vec::with_capacity(((6 + (2 * lr_len)) * 32) + 2);
|
||||
let push_point = |bp: &mut Vec<u8>| {
|
||||
bp.push(1);
|
||||
bp.extend([0; 31]);
|
||||
};
|
||||
let push_scalar = |bp: &mut Vec<u8>| bp.extend([0; 32]);
|
||||
for _ in 0 .. 3 {
|
||||
push_point(&mut bp);
|
||||
}
|
||||
for _ in 0 .. 3 {
|
||||
push_scalar(&mut bp);
|
||||
}
|
||||
for _ in 0 .. 2 {
|
||||
write_varint(&lr_len, &mut bp).unwrap();
|
||||
for _ in 0 .. lr_len {
|
||||
push_point(&mut bp);
|
||||
}
|
||||
}
|
||||
Bulletproof::read_plus(&mut bp.as_slice()).expect("made an invalid dummy BP+")
|
||||
}
|
||||
_ => panic!("unsupported RctType"),
|
||||
};
|
||||
|
||||
// `- 1` to remove the one byte for the 0 fee
|
||||
Transaction::V2 {
|
||||
prefix: TransactionPrefix {
|
||||
additional_timelock: Timelock::None,
|
||||
inputs: self.inputs(&key_images),
|
||||
outputs: self.outputs(&key_images),
|
||||
extra: self.extra(),
|
||||
},
|
||||
proofs: Some(RctProofs {
|
||||
base: RctBase { fee: 0, encrypted_amounts, pseudo_outs: vec![], commitments },
|
||||
prunable: RctPrunable::Clsag { bulletproof, clsags, pseudo_outs },
|
||||
}),
|
||||
}
|
||||
.weight() -
|
||||
1
|
||||
};
|
||||
|
||||
// We now have the base weight, without the fee encoded
|
||||
// The fee itself will impact the weight as its encoding is [1, 9] bytes long
|
||||
let mut possible_weights = Vec::with_capacity(9);
|
||||
for i in 1 ..= 9 {
|
||||
possible_weights.push(base_weight + i);
|
||||
}
|
||||
debug_assert_eq!(possible_weights.len(), 9);
|
||||
|
||||
// We now calculate the fee which would be used for each weight
|
||||
let mut possible_fees = Vec::with_capacity(9);
|
||||
for weight in possible_weights {
|
||||
possible_fees.push(self.fee_rate.calculate_fee_from_weight(weight));
|
||||
}
|
||||
|
||||
// We now look for the fee whose length matches the length used to derive it
|
||||
let mut weight_and_fee = None;
|
||||
for (fee_len, possible_fee) in possible_fees.into_iter().enumerate() {
|
||||
let fee_len = 1 + fee_len;
|
||||
debug_assert!(1 <= fee_len);
|
||||
debug_assert!(fee_len <= 9);
|
||||
|
||||
// We use the first fee whose encoded length is not larger than the length used within this
|
||||
// weight
|
||||
// This should be because the lengths are equal, yet means if somehow none are equal, this
|
||||
// will still terminate successfully
|
||||
if varint_len(possible_fee) <= fee_len {
|
||||
weight_and_fee = Some((base_weight + fee_len, possible_fee));
|
||||
break;
|
||||
}
|
||||
}
|
||||
weight_and_fee.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl SignableTransactionWithKeyImages {
|
||||
pub(crate) fn transaction_without_signatures(&self) -> Transaction {
|
||||
let commitments_and_encrypted_amounts =
|
||||
self.intent.commitments_and_encrypted_amounts(&self.key_images);
|
||||
let mut commitments = Vec::with_capacity(self.intent.payments.len());
|
||||
let mut bp_commitments = Vec::with_capacity(self.intent.payments.len());
|
||||
let mut encrypted_amounts = Vec::with_capacity(self.intent.payments.len());
|
||||
for (commitment, encrypted_amount) in commitments_and_encrypted_amounts {
|
||||
commitments.push(commitment.calculate());
|
||||
bp_commitments.push(commitment);
|
||||
encrypted_amounts.push(encrypted_amount);
|
||||
}
|
||||
let bulletproof = {
|
||||
let mut bp_rng = self.intent.seeded_rng(b"bulletproof");
|
||||
(match self.intent.rct_type {
|
||||
RctType::ClsagBulletproof => Bulletproof::prove(&mut bp_rng, bp_commitments),
|
||||
RctType::ClsagBulletproofPlus => Bulletproof::prove_plus(&mut bp_rng, bp_commitments),
|
||||
_ => panic!("unsupported RctType"),
|
||||
})
|
||||
.expect("couldn't prove BP(+)s for this many payments despite checking in constructor?")
|
||||
};
|
||||
|
||||
Transaction::V2 {
|
||||
prefix: TransactionPrefix {
|
||||
additional_timelock: Timelock::None,
|
||||
inputs: self.intent.inputs(&self.key_images),
|
||||
outputs: self.intent.outputs(&self.key_images),
|
||||
extra: self.intent.extra(),
|
||||
},
|
||||
proofs: Some(RctProofs {
|
||||
base: RctBase {
|
||||
fee: if self
|
||||
.intent
|
||||
.payments
|
||||
.iter()
|
||||
.any(|payment| matches!(payment, InternalPayment::Change(_)))
|
||||
{
|
||||
// The necessary fee is the fee
|
||||
self.intent.weight_and_necessary_fee().1
|
||||
} else {
|
||||
// If we don't have a change output, the difference is the fee
|
||||
let inputs =
|
||||
self.intent.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
||||
let payments = self
|
||||
.intent
|
||||
.payments
|
||||
.iter()
|
||||
.filter_map(|payment| match payment {
|
||||
InternalPayment::Payment(_, amount) => Some(amount),
|
||||
InternalPayment::Change(_) => None,
|
||||
})
|
||||
.sum::<u64>();
|
||||
// Safe since the constructor checks inputs >= (payments + fee)
|
||||
inputs - payments
|
||||
},
|
||||
encrypted_amounts,
|
||||
pseudo_outs: vec![],
|
||||
commitments,
|
||||
},
|
||||
prunable: RctPrunable::Clsag { bulletproof, clsags: vec![], pseudo_outs: vec![] },
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
246
networks/monero/wallet/src/send/tx_keys.rs
Normal file
246
networks/monero/wallet/src/send/tx_keys.rs
Normal file
@@ -0,0 +1,246 @@
|
||||
use core::ops::Deref;
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
use rand_core::SeedableRng;
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
primitives::{keccak256, Commitment},
|
||||
ringct::EncryptedAmount,
|
||||
SharedKeyDerivations, OutputWithDecoys,
|
||||
send::{ChangeEnum, InternalPayment, SignableTransaction, key_image_sort},
|
||||
};
|
||||
|
||||
impl SignableTransaction {
|
||||
pub(crate) fn seeded_rng(&self, dst: &'static [u8]) -> ChaCha20Rng {
|
||||
// Apply the DST
|
||||
let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]);
|
||||
transcript.extend(dst);
|
||||
|
||||
// Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript
|
||||
transcript.extend(self.outgoing_view_key.as_slice());
|
||||
|
||||
// Ensure uniqueness across transactions by binding to a use-once object
|
||||
// The keys for the inputs is binding to their key images, making them use-once
|
||||
let mut input_keys = self.inputs.iter().map(OutputWithDecoys::key).collect::<Vec<_>>();
|
||||
// We sort the inputs mid-way through TX construction, so apply our own sort to ensure a
|
||||
// consistent order
|
||||
// We use the key image sort as it's applicable and well-defined, not because these are key
|
||||
// images
|
||||
input_keys.sort_by(key_image_sort);
|
||||
for key in input_keys {
|
||||
transcript.extend(key.compress().to_bytes());
|
||||
}
|
||||
|
||||
ChaCha20Rng::from_seed(keccak256(&transcript))
|
||||
}
|
||||
|
||||
fn has_payments_to_subaddresses(&self) -> bool {
|
||||
self.payments.iter().any(|payment| match payment {
|
||||
InternalPayment::Payment(addr, _) => addr.is_subaddress(),
|
||||
InternalPayment::Change(change) => match change {
|
||||
ChangeEnum::AddressOnly(addr) => addr.is_subaddress(),
|
||||
// These aren't considered payments to subaddresses as we don't need to send to them as
|
||||
// subaddresses
|
||||
// We can calculate the shared key using the view key, as if we were receiving, instead
|
||||
ChangeEnum::Standard { .. } | ChangeEnum::Guaranteed { .. } => false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn should_use_additional_keys(&self) -> bool {
|
||||
let has_payments_to_subaddresses = self.has_payments_to_subaddresses();
|
||||
if !has_payments_to_subaddresses {
|
||||
return false;
|
||||
}
|
||||
|
||||
let has_change_view = self.payments.iter().any(|payment| match payment {
|
||||
InternalPayment::Payment(_, _) => false,
|
||||
InternalPayment::Change(change) => match change {
|
||||
ChangeEnum::AddressOnly(_) => false,
|
||||
ChangeEnum::Standard { .. } | ChangeEnum::Guaranteed { .. } => true,
|
||||
},
|
||||
});
|
||||
|
||||
/*
|
||||
If sending to a subaddress, the shared key is not `rG` yet `rB`. Because of this, a
|
||||
per-subaddress shared key is necessary, causing the usage of additional keys.
|
||||
|
||||
The one exception is if we're sending to a subaddress in a 2-output transaction. The second
|
||||
output, the change output, will attempt scanning the singular key `rB` with `v rB`. While we
|
||||
cannot calculate `r vB` with just `r` (as that'd require `vB` when we presumably only have
|
||||
`vG` when sending), since we do in fact have `v` (due to it being our own view key for our
|
||||
change output), we can still calculate the shared secret.
|
||||
*/
|
||||
has_payments_to_subaddresses && !((self.payments.len() == 2) && has_change_view)
|
||||
}
|
||||
|
||||
// Calculate the transaction keys used as randomness.
|
||||
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
|
||||
let mut rng = self.seeded_rng(b"transaction_keys");
|
||||
|
||||
let tx_key = Zeroizing::new(Scalar::random(&mut rng));
|
||||
|
||||
let mut additional_keys = vec![];
|
||||
if self.should_use_additional_keys() {
|
||||
for _ in 0 .. self.payments.len() {
|
||||
additional_keys.push(Zeroizing::new(Scalar::random(&mut rng)));
|
||||
}
|
||||
}
|
||||
(tx_key, additional_keys)
|
||||
}
|
||||
|
||||
fn ecdhs(&self) -> Vec<Zeroizing<EdwardsPoint>> {
|
||||
let (tx_key, additional_keys) = self.transaction_keys();
|
||||
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
|
||||
let (tx_key_pub, additional_keys_pub) = self.transaction_keys_pub();
|
||||
debug_assert_eq!(additional_keys_pub.len(), additional_keys.len());
|
||||
|
||||
let mut res = Vec::with_capacity(self.payments.len());
|
||||
for (i, payment) in self.payments.iter().enumerate() {
|
||||
let addr = payment.address();
|
||||
let key_to_use =
|
||||
if addr.is_subaddress() { additional_keys.get(i).unwrap_or(&tx_key) } else { &tx_key };
|
||||
|
||||
let ecdh = match payment {
|
||||
// If we don't have the view key, use the key dedicated for this address (r A)
|
||||
InternalPayment::Payment(_, _) |
|
||||
InternalPayment::Change(ChangeEnum::AddressOnly { .. }) => {
|
||||
Zeroizing::new(key_to_use.deref() * addr.view())
|
||||
}
|
||||
// If we do have the view key, use the commitment to the key (a R)
|
||||
InternalPayment::Change(ChangeEnum::Standard { view_pair, .. }) => {
|
||||
Zeroizing::new(view_pair.view.deref() * tx_key_pub)
|
||||
}
|
||||
InternalPayment::Change(ChangeEnum::Guaranteed { view_pair, .. }) => {
|
||||
Zeroizing::new(view_pair.0.view.deref() * tx_key_pub)
|
||||
}
|
||||
};
|
||||
|
||||
res.push(ecdh);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Calculate the shared keys and the necessary derivations.
|
||||
pub(crate) fn shared_key_derivations(
|
||||
&self,
|
||||
key_images: &[EdwardsPoint],
|
||||
) -> Vec<Zeroizing<SharedKeyDerivations>> {
|
||||
let ecdhs = self.ecdhs();
|
||||
|
||||
let uniqueness = SharedKeyDerivations::uniqueness(&self.inputs(key_images));
|
||||
|
||||
let mut res = Vec::with_capacity(self.payments.len());
|
||||
for (i, (payment, ecdh)) in self.payments.iter().zip(ecdhs).enumerate() {
|
||||
let addr = payment.address();
|
||||
res.push(SharedKeyDerivations::output_derivations(
|
||||
addr.is_guaranteed().then_some(uniqueness),
|
||||
ecdh,
|
||||
i,
|
||||
));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Calculate the payment ID XOR masks.
|
||||
pub(crate) fn payment_id_xors(&self) -> Vec<[u8; 8]> {
|
||||
let mut res = Vec::with_capacity(self.payments.len());
|
||||
for ecdh in self.ecdhs() {
|
||||
res.push(SharedKeyDerivations::payment_id_xor(ecdh));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Calculate the transaction_keys' commitments.
|
||||
//
|
||||
// These depend on the payments. Commitments for payments to subaddresses use the spend key for
|
||||
// the generator.
|
||||
pub(crate) fn transaction_keys_pub(&self) -> (EdwardsPoint, Vec<EdwardsPoint>) {
|
||||
let (tx_key, additional_keys) = self.transaction_keys();
|
||||
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
|
||||
|
||||
// The single transaction key uses the subaddress's spend key as its generator
|
||||
let has_payments_to_subaddresses = self.has_payments_to_subaddresses();
|
||||
let should_use_additional_keys = self.should_use_additional_keys();
|
||||
if has_payments_to_subaddresses && (!should_use_additional_keys) {
|
||||
debug_assert_eq!(additional_keys.len(), 0);
|
||||
|
||||
let InternalPayment::Payment(addr, _) = self
|
||||
.payments
|
||||
.iter()
|
||||
.find(|payment| matches!(payment, InternalPayment::Payment(_, _)))
|
||||
.expect("payment to subaddress yet no payment")
|
||||
else {
|
||||
panic!("filtered payment wasn't a payment")
|
||||
};
|
||||
|
||||
return (tx_key.deref() * addr.spend(), vec![]);
|
||||
}
|
||||
|
||||
if should_use_additional_keys {
|
||||
let mut additional_keys_pub = vec![];
|
||||
for (additional_key, payment) in additional_keys.into_iter().zip(&self.payments) {
|
||||
let addr = payment.address();
|
||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
|
||||
// /src/device/device_default.cpp#L308-L312
|
||||
if addr.is_subaddress() {
|
||||
additional_keys_pub.push(additional_key.deref() * addr.spend());
|
||||
} else {
|
||||
additional_keys_pub.push(additional_key.deref() * ED25519_BASEPOINT_TABLE)
|
||||
}
|
||||
}
|
||||
return (tx_key.deref() * ED25519_BASEPOINT_TABLE, additional_keys_pub);
|
||||
}
|
||||
|
||||
debug_assert!(!has_payments_to_subaddresses);
|
||||
debug_assert!(!should_use_additional_keys);
|
||||
(tx_key.deref() * ED25519_BASEPOINT_TABLE, vec![])
|
||||
}
|
||||
|
||||
pub(crate) fn commitments_and_encrypted_amounts(
|
||||
&self,
|
||||
key_images: &[EdwardsPoint],
|
||||
) -> Vec<(Commitment, EncryptedAmount)> {
|
||||
let shared_key_derivations = self.shared_key_derivations(key_images);
|
||||
|
||||
let mut res = Vec::with_capacity(self.payments.len());
|
||||
for (payment, shared_key_derivations) in self.payments.iter().zip(shared_key_derivations) {
|
||||
let amount = match payment {
|
||||
InternalPayment::Payment(_, amount) => *amount,
|
||||
InternalPayment::Change(_) => {
|
||||
let inputs = self.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
||||
let payments = self
|
||||
.payments
|
||||
.iter()
|
||||
.filter_map(|payment| match payment {
|
||||
InternalPayment::Payment(_, amount) => Some(amount),
|
||||
InternalPayment::Change(_) => None,
|
||||
})
|
||||
.sum::<u64>();
|
||||
let necessary_fee = self.weight_and_necessary_fee().1;
|
||||
// Safe since the constructor checked this TX has enough funds for itself
|
||||
inputs - (payments + necessary_fee)
|
||||
}
|
||||
};
|
||||
let commitment = Commitment::new(shared_key_derivations.commitment_mask(), amount);
|
||||
let encrypted_amount = EncryptedAmount::Compact {
|
||||
amount: shared_key_derivations.compact_amount_encryption(amount),
|
||||
};
|
||||
res.push((commitment, encrypted_amount));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn sum_output_masks(&self, key_images: &[EdwardsPoint]) -> Scalar {
|
||||
self
|
||||
.commitments_and_encrypted_amounts(key_images)
|
||||
.into_iter()
|
||||
.map(|(commitment, _)| commitment.mask)
|
||||
.sum()
|
||||
}
|
||||
}
|
||||
197
networks/monero/wallet/src/tests/extra.rs
Normal file
197
networks/monero/wallet/src/tests/extra.rs
Normal file
@@ -0,0 +1,197 @@
|
||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use crate::{
|
||||
io::write_varint,
|
||||
extra::{MAX_TX_EXTRA_PADDING_COUNT, ExtraField, Extra},
|
||||
};
|
||||
|
||||
// Tests derived from
|
||||
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
||||
// tests/unit_tests/test_tx_utils.cpp
|
||||
// which is licensed
|
||||
#[rustfmt::skip]
|
||||
/*
|
||||
Copyright (c) 2014-2022, The Monero Project
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Parts of the project are originally copyright (c) 2012-2013 The Cryptonote
|
||||
developers
|
||||
|
||||
Parts of the project are originally copyright (c) 2014 The Boolberry
|
||||
developers, distributed under the MIT licence:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
const PUB_KEY_BYTES: [u8; 33] = [
|
||||
1, 30, 208, 98, 162, 133, 64, 85, 83, 112, 91, 188, 89, 211, 24, 131, 39, 154, 22, 228, 80, 63,
|
||||
198, 141, 173, 111, 244, 183, 4, 149, 186, 140, 230,
|
||||
];
|
||||
|
||||
fn pub_key() -> EdwardsPoint {
|
||||
CompressedEdwardsY(PUB_KEY_BYTES[1 .. PUB_KEY_BYTES.len()].try_into().expect("invalid pub key"))
|
||||
.decompress()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn test_write_buf(extra: &Extra, buf: &[u8]) {
|
||||
let mut w: Vec<u8> = vec![];
|
||||
Extra::write(extra, &mut w).unwrap();
|
||||
assert_eq!(buf, w);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_extra() {
|
||||
let buf: Vec<u8> = vec![];
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert!(extra.0.is_empty());
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn padding_only_size_1() {
|
||||
let buf: Vec<u8> = vec![0];
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::Padding(1)]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn padding_only_size_2() {
|
||||
let buf: Vec<u8> = vec![0, 0];
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::Padding(2)]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn padding_only_max_size() {
|
||||
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT];
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::Padding(MAX_TX_EXTRA_PADDING_COUNT)]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn padding_only_exceed_max_size() {
|
||||
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT + 1];
|
||||
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_padding_only() {
|
||||
let buf: Vec<u8> = vec![0, 42];
|
||||
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pub_key_only() {
|
||||
let buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_nonce_only() {
|
||||
let buf: Vec<u8> = vec![2, 1, 42];
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::Nonce(vec![42])]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_nonce_only_wrong_size() {
|
||||
let mut buf: Vec<u8> = vec![0; 20];
|
||||
buf[0] = 2;
|
||||
buf[1] = 255;
|
||||
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pub_key_and_padding() {
|
||||
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
|
||||
buf.extend([
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
]);
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key()), ExtraField::Padding(76)]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pub_key_and_invalid_padding() {
|
||||
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
|
||||
buf.extend([0, 1]);
|
||||
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_mysterious_minergate_only() {
|
||||
let buf: Vec<u8> = vec![222, 1, 42];
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![42])]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_mysterious_minergate_only_large() {
|
||||
let mut buf: Vec<u8> = vec![222];
|
||||
write_varint(&512u64, &mut buf).unwrap();
|
||||
buf.extend_from_slice(&vec![0; 512]);
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![0; 512])]);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_mysterious_minergate_only_wrong_size() {
|
||||
let mut buf: Vec<u8> = vec![0; 20];
|
||||
buf[0] = 222;
|
||||
buf[1] = 255;
|
||||
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_mysterious_minergate_and_pub_key() {
|
||||
let mut buf: Vec<u8> = vec![222, 1, 42];
|
||||
buf.extend(PUB_KEY_BYTES.to_vec());
|
||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(
|
||||
extra.0,
|
||||
vec![ExtraField::MysteriousMinergate(vec![42]), ExtraField::PublicKey(pub_key())]
|
||||
);
|
||||
test_write_buf(&extra, &buf);
|
||||
}
|
||||
1
networks/monero/wallet/src/tests/mod.rs
Normal file
1
networks/monero/wallet/src/tests/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
mod extra;
|
||||
144
networks/monero/wallet/src/view_pair.rs
Normal file
144
networks/monero/wallet/src/view_pair.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use core::ops::Deref;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
primitives::keccak256_to_scalar,
|
||||
address::{Network, AddressType, SubaddressIndex, MoneroAddress},
|
||||
};
|
||||
|
||||
/// An error while working with a ViewPair.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||
pub enum ViewPairError {
|
||||
/// The spend key was torsioned.
|
||||
///
|
||||
/// Torsioned spend keys are of questionable spendability. This library avoids that question by
|
||||
/// rejecting such ViewPairs.
|
||||
// CLSAG seems to support it if the challenge does a torsion clear, FCMP++ should ship with a
|
||||
// torsion clear, yet it's not worth it to modify CLSAG sign to generate challenges until the
|
||||
// torsion clears and ensure spendability (nor can we reasonably guarantee that in the future)
|
||||
#[cfg_attr(feature = "std", error("torsioned spend key"))]
|
||||
TorsionedSpendKey,
|
||||
}
|
||||
|
||||
/// The pair of keys necessary to scan transactions.
|
||||
///
|
||||
/// This is composed of the public spend key and the private view key.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct ViewPair {
|
||||
spend: EdwardsPoint,
|
||||
pub(crate) view: Zeroizing<Scalar>,
|
||||
}
|
||||
|
||||
impl ViewPair {
|
||||
/// Create a new ViewPair.
|
||||
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> Result<Self, ViewPairError> {
|
||||
if !spend.is_torsion_free() {
|
||||
Err(ViewPairError::TorsionedSpendKey)?;
|
||||
}
|
||||
Ok(ViewPair { spend, view })
|
||||
}
|
||||
|
||||
/// The public spend key for this ViewPair.
|
||||
pub fn spend(&self) -> EdwardsPoint {
|
||||
self.spend
|
||||
}
|
||||
|
||||
/// The public view key for this ViewPair.
|
||||
pub fn view(&self) -> EdwardsPoint {
|
||||
self.view.deref() * ED25519_BASEPOINT_TABLE
|
||||
}
|
||||
|
||||
pub(crate) fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
|
||||
keccak256_to_scalar(Zeroizing::new(
|
||||
[
|
||||
b"SubAddr\0".as_ref(),
|
||||
Zeroizing::new(self.view.to_bytes()).as_ref(),
|
||||
&index.account().to_le_bytes(),
|
||||
&index.address().to_le_bytes(),
|
||||
]
|
||||
.concat(),
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
|
||||
let scalar = self.subaddress_derivation(index);
|
||||
let spend = self.spend + (&scalar * ED25519_BASEPOINT_TABLE);
|
||||
let view = self.view.deref() * spend;
|
||||
(spend, view)
|
||||
}
|
||||
|
||||
/// Derive a legacy address from this ViewPair.
|
||||
///
|
||||
/// Subaddresses SHOULD be used instead.
|
||||
pub fn legacy_address(&self, network: Network) -> MoneroAddress {
|
||||
MoneroAddress::new(network, AddressType::Legacy, self.spend, self.view())
|
||||
}
|
||||
|
||||
/// Derive a legacy integrated address from this ViewPair.
|
||||
///
|
||||
/// Subaddresses SHOULD be used instead.
|
||||
pub fn legacy_integrated_address(&self, network: Network, payment_id: [u8; 8]) -> MoneroAddress {
|
||||
MoneroAddress::new(network, AddressType::LegacyIntegrated(payment_id), self.spend, self.view())
|
||||
}
|
||||
|
||||
/// Derive a subaddress from this ViewPair.
|
||||
pub fn subaddress(&self, network: Network, subaddress: SubaddressIndex) -> MoneroAddress {
|
||||
let (spend, view) = self.subaddress_keys(subaddress);
|
||||
MoneroAddress::new(network, AddressType::Subaddress, spend, view)
|
||||
}
|
||||
}
|
||||
|
||||
/// The pair of keys necessary to scan outputs immune to the burning bug.
|
||||
///
|
||||
/// This is composed of the public spend key and a non-zero private view key.
|
||||
///
|
||||
/// 'Guaranteed' outputs, or transactions outputs to the burning bug, are not officially specified
|
||||
/// by the Monero project. They should only be used if necessary. No support outside of
|
||||
/// monero-wallet is promised.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub struct GuaranteedViewPair(pub(crate) ViewPair);
|
||||
|
||||
impl GuaranteedViewPair {
|
||||
/// Create a new GuaranteedViewPair.
|
||||
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> Result<Self, ViewPairError> {
|
||||
ViewPair::new(spend, view).map(GuaranteedViewPair)
|
||||
}
|
||||
|
||||
/// The public spend key for this GuaranteedViewPair.
|
||||
pub fn spend(&self) -> EdwardsPoint {
|
||||
self.0.spend()
|
||||
}
|
||||
|
||||
/// The public view key for this GuaranteedViewPair.
|
||||
pub fn view(&self) -> EdwardsPoint {
|
||||
self.0.view()
|
||||
}
|
||||
|
||||
/// Returns an address with the provided specification.
|
||||
///
|
||||
/// The returned address will be a featured address with the guaranteed flag set. These should
|
||||
/// not be presumed to be interoperable with any other software.
|
||||
pub fn address(
|
||||
&self,
|
||||
network: Network,
|
||||
subaddress: Option<SubaddressIndex>,
|
||||
payment_id: Option<[u8; 8]>,
|
||||
) -> MoneroAddress {
|
||||
let (spend, view) = if let Some(index) = subaddress {
|
||||
self.0.subaddress_keys(index)
|
||||
} else {
|
||||
(self.spend(), self.view())
|
||||
};
|
||||
|
||||
MoneroAddress::new(
|
||||
network,
|
||||
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed: true },
|
||||
spend,
|
||||
view,
|
||||
)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user