Rewrite monero-wallet's send code

I have yet to redo the multisig code and the builder. This should be much
cleaner, albeit slower due to redoing work.

This compiles with clippy --all-features. I have to finish the multisig/builder
for --all-targets to work (and start updating the rest of Serai).
This commit is contained in:
Luke Parker
2024-06-27 07:36:45 -04:00
parent b3b0edb82f
commit 70c36ed06c
14 changed files with 1189 additions and 1791 deletions

View File

@@ -54,7 +54,7 @@ pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
w.write_all(&[*byte])
}
/// Write a number, VarInt-encoded,.
/// Write a number, VarInt-encoded.
///
/// This will panic if the VarInt exceeds u64::MAX.
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
@@ -81,7 +81,7 @@ pub fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()>
w.write_all(&point.compress().to_bytes())
}
/// Write a list of elements, without length-prefixing,.
/// Write a list of elements, without length-prefixing.
pub fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
@@ -93,7 +93,7 @@ pub fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
Ok(())
}
/// Write a list of elements, with length-prefixing,.
/// Write a list of elements, with length-prefixing.
pub fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
@@ -115,17 +115,17 @@ pub fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
Ok(read_bytes::<_, 1>(r)?[0])
}
/// Read a u16, little-endian encoded,.
/// Read a u16, little-endian encoded.
pub fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
read_bytes(r).map(u16::from_le_bytes)
}
/// Read a u32, little-endian encoded,.
/// Read a u32, little-endian encoded.
pub fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
read_bytes(r).map(u32::from_le_bytes)
}
/// Read a u64, little-endian encoded,.
/// Read a u64, little-endian encoded.
pub fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
read_bytes(r).map(u64::from_le_bytes)
}

View File

@@ -222,9 +222,12 @@ fn core(
/// The CLSAG signature, as used in Monero.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Clsag {
D: EdwardsPoint,
s: Vec<Scalar>,
c1: Scalar,
/// The difference of the commitment randomnesses, scaling the key image generator.
pub D: EdwardsPoint,
/// The responses for each ring member.
pub s: Vec<Scalar>,
/// The first challenge in the ring.
pub c1: Scalar,
}
struct ClsagSignCore {
@@ -383,7 +386,7 @@ impl Clsag {
Ok(())
}
/// The weight a CLSAG will take within a Monero transaction.
/// The length a CLSAG will take once serialized.
pub fn fee_weight(ring_len: usize) -> usize {
(ring_len * 32) + 32 + 32
}

View File

@@ -745,6 +745,8 @@ pub trait Rpc: Sync + Clone + Debug {
/// Get the currently estimated fee rate from the node.
///
/// This may be manipulated to unsafe levels and MUST be sanity checked.
///
/// This MUST NOT be expected to be deterministic in any way.
// TODO: Take a sanity check argument
async fn get_fee_rate(&self, priority: FeePriority) -> Result<FeeRate, RpcError> {
#[derive(Deserialize, Debug)]

View File

@@ -74,6 +74,7 @@ impl AddressType {
matches!(self, AddressType::Featured { subaddress: true, .. })
}
// TODO: wallet-core PaymentId? TX extra crate imported here?
pub fn payment_id(&self) -> Option<[u8; 8]> {
if let AddressType::Integrated(id) = self {
Some(*id)

View File

@@ -11,7 +11,7 @@ use curve25519_dalek::edwards::EdwardsPoint;
use monero_serai::{DEFAULT_LOCK_WINDOW, COINBASE_LOCK_WINDOW, BLOCK_TIME};
use monero_rpc::{RpcError, Rpc};
use crate::SpendableOutput;
use crate::scan::SpendableOutput;
const RECENT_WINDOW: usize = 15;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;

View File

@@ -4,23 +4,15 @@
#![cfg_attr(not(feature = "std"), no_std)]
use core::ops::Deref;
use std_shims::{
io as stdio,
collections::{HashSet, HashMap},
};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
use monero_serai::{
io::{read_byte, read_u16, write_varint},
io::write_varint,
primitives::{Commitment, keccak256, keccak256_to_scalar},
ringct::{RctType, EncryptedAmount},
ringct::EncryptedAmount,
transaction::Input,
};
@@ -29,13 +21,12 @@ pub use monero_serai::*;
pub use monero_rpc as rpc;
pub mod extra;
pub(crate) use extra::{PaymentId, ExtraField, Extra};
pub(crate) use extra::{PaymentId, Extra};
pub use monero_address as address;
use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, MoneroAddress};
mod scan;
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
pub mod scan;
#[cfg(feature = "std")]
pub mod decoys;
@@ -46,246 +37,12 @@ pub mod decoys {
}
pub use decoys::{DecoySelection, Decoys};
mod send;
pub use send::{FeePriority, FeeRate, TransactionError, Change, SignableTransaction, Eventuality};
#[cfg(feature = "std")]
pub use send::SignableTransactionBuilder;
#[cfg(feature = "multisig")]
pub(crate) use send::InternalPayment;
#[cfg(feature = "multisig")]
pub use send::TransactionMachine;
pub mod send;
/* TODO
#[cfg(test)]
mod tests;
/// Monero protocol version.
///
/// v15 is omitted as v15 was simply v14 and v16 being active at the same time, with regards to the
/// transactions supported. Accordingly, v16 should be used during v15.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[allow(non_camel_case_types)]
pub enum Protocol {
v14,
v16,
Custom {
ring_len: usize,
bp_plus: bool,
optimal_rct_type: RctType,
view_tags: bool,
v16_fee: bool,
},
}
impl TryFrom<u8> for Protocol {
type Error = ();
fn try_from(version: u8) -> Result<Self, ()> {
Ok(match version {
14 => Protocol::v14, // TODO: 13 | 14?
15 | 16 => Protocol::v16,
_ => Err(())?,
})
}
}
impl Protocol {
/// Amount of ring members under this protocol version.
pub fn ring_len(&self) -> usize {
match self {
Protocol::v14 => 11,
Protocol::v16 => 16,
Protocol::Custom { ring_len, .. } => *ring_len,
}
}
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
///
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
pub fn bp_plus(&self) -> bool {
match self {
Protocol::v14 => false,
Protocol::v16 => true,
Protocol::Custom { bp_plus, .. } => *bp_plus,
}
}
// TODO: Make this an Option when we support pre-RCT protocols
pub fn optimal_rct_type(&self) -> RctType {
match self {
Protocol::v14 => RctType::ClsagBulletproof,
Protocol::v16 => RctType::ClsagBulletproofPlus,
Protocol::Custom { optimal_rct_type, .. } => *optimal_rct_type,
}
}
/// Whether or not the specified version uses view tags.
pub fn view_tags(&self) -> bool {
match self {
Protocol::v14 => false,
Protocol::v16 => true,
Protocol::Custom { view_tags, .. } => *view_tags,
}
}
/// Whether or not the specified version uses the fee algorithm from Monero
/// hard fork version 16 (released in v18 binaries).
pub fn v16_fee(&self) -> bool {
match self {
Protocol::v14 => false,
Protocol::v16 => true,
Protocol::Custom { v16_fee, .. } => *v16_fee,
}
}
pub fn write<W: stdio::Write>(&self, w: &mut W) -> stdio::Result<()> {
match self {
Protocol::v14 => w.write_all(&[0, 14]),
Protocol::v16 => w.write_all(&[0, 16]),
Protocol::Custom { ring_len, bp_plus, optimal_rct_type, view_tags, v16_fee } => {
// Custom, version 0
w.write_all(&[1, 0])?;
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
w.write_all(&[u8::from(*bp_plus)])?;
w.write_all(&[u8::from(*optimal_rct_type)])?;
w.write_all(&[u8::from(*view_tags)])?;
w.write_all(&[u8::from(*v16_fee)])
}
}
}
pub fn read<R: stdio::Read>(r: &mut R) -> stdio::Result<Protocol> {
Ok(match read_byte(r)? {
// Monero protocol
0 => match read_byte(r)? {
14 => Protocol::v14,
16 => Protocol::v16,
_ => Err(stdio::Error::other("unrecognized monero protocol"))?,
},
// Custom
1 => match read_byte(r)? {
0 => Protocol::Custom {
ring_len: read_u16(r)?.into(),
bp_plus: match read_byte(r)? {
0 => false,
1 => true,
_ => Err(stdio::Error::other("invalid bool serialization"))?,
},
optimal_rct_type: RctType::try_from(read_byte(r)?)
.map_err(|()| stdio::Error::other("invalid RctType serialization"))?,
view_tags: match read_byte(r)? {
0 => false,
1 => true,
_ => Err(stdio::Error::other("invalid bool serialization"))?,
},
v16_fee: match read_byte(r)? {
0 => false,
1 => true,
_ => Err(stdio::Error::other("invalid bool serialization"))?,
},
},
_ => Err(stdio::Error::other("unrecognized custom protocol serialization"))?,
},
_ => Err(stdio::Error::other("unrecognized protocol serialization"))?,
})
}
}
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
}
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
let mut u = b"uniqueness".to_vec();
for input in inputs {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
}
keccak256(u)
}
// Hs("view_tag" || 8Ra || o), Hs(8Ra || o), and H(8Ra || 0x8d) with uniqueness inclusion in the
// Scalar as an option
#[allow(non_snake_case)]
pub(crate) fn shared_key(
uniqueness: Option<[u8; 32]>,
ecdh: EdwardsPoint,
o: usize,
) -> (u8, Scalar, [u8; 8]) {
// 8Ra
let mut output_derivation = ecdh.mul_by_cofactor().compress().to_bytes().to_vec();
let mut payment_id_xor = [0; 8];
payment_id_xor
.copy_from_slice(&keccak256([output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
// || o
write_varint(&o, &mut output_derivation).unwrap();
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
// uniqueness ||
let shared_key = if let Some(uniqueness) = uniqueness {
[uniqueness.as_ref(), &output_derivation].concat()
} else {
output_derivation
};
(view_tag, keccak256_to_scalar(shared_key), payment_id_xor)
}
pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
let mut mask = b"commitment_mask".to_vec();
mask.extend(shared_key.to_bytes());
keccak256_to_scalar(mask)
}
pub(crate) fn compact_amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
(amount ^ u64::from_le_bytes(keccak256(amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
}
pub trait EncryptedAmountExt {
/// Decrypt an EncryptedAmount into the Commitment it encrypts.
///
/// The caller must verify the decrypted Commitment matches with the actual Commitment used
/// within in the Monero protocol.
fn decrypt(&self, key: Scalar) -> Commitment;
}
impl EncryptedAmountExt for EncryptedAmount {
/// Decrypt an EncryptedAmount into the Commitment it encrypts.
///
/// The caller must verify the decrypted Commitment matches with the actual Commitment used
/// within in the Monero protocol.
fn decrypt(&self, key: Scalar) -> Commitment {
match self {
// TODO: Add a test vector for this
EncryptedAmount::Original { mask, amount } => {
let mask_shared_sec = keccak256(key.as_bytes());
let mask =
Scalar::from_bytes_mod_order(*mask) - Scalar::from_bytes_mod_order(mask_shared_sec);
let amount_shared_sec = keccak256(mask_shared_sec);
let amount_scalar =
Scalar::from_bytes_mod_order(*amount) - Scalar::from_bytes_mod_order(amount_shared_sec);
// d2b from rctTypes.cpp
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
Commitment::new(mask, amount)
}
EncryptedAmount::Compact { amount } => Commitment::new(
commitment_mask(key),
u64::from_le_bytes(compact_amount_encryption(u64::from_le_bytes(*amount), key)),
),
}
}
}
*/
/// The private view key and public spend key, enabling scanning transactions.
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
@@ -356,67 +113,112 @@ impl ViewPair {
}
}
/// Transaction scanner.
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
/// ignored.
#[derive(Clone)]
pub struct Scanner {
pair: ViewPair,
// Also contains the spend key as None
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
pub(crate) fn compact_amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
(amount ^ u64::from_le_bytes(keccak256(amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
}
impl Zeroize for Scanner {
fn zeroize(&mut self) {
self.pair.zeroize();
// These may not be effective, unfortunately
for (mut key, mut value) in self.subaddresses.drain() {
key.zeroize();
value.zeroize();
}
if let Some(ref mut burning_bug) = self.burning_bug.take() {
for mut output in burning_bug.drain() {
output.zeroize();
}
}
}
#[derive(Clone, PartialEq, Eq, Zeroize)]
struct SharedKeyDerivations {
// Hs("view_tag" || 8Ra || o)
view_tag: u8,
// Hs(uniqueness || 8Ra || o) where uniqueness may be empty
shared_key: Scalar,
}
impl Drop for Scanner {
fn drop(&mut self) {
self.zeroize();
impl SharedKeyDerivations {
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
fn uniqueness(inputs: &[Input]) -> [u8; 32] {
let mut u = b"uniqueness".to_vec();
for input in inputs {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
}
keccak256(u)
}
impl ZeroizeOnDrop for Scanner {}
#[allow(clippy::needless_pass_by_value)]
fn output_derivations(
uniqueness: Option<[u8; 32]>,
ecdh: Zeroizing<EdwardsPoint>,
o: usize,
) -> Zeroizing<SharedKeyDerivations> {
// 8Ra
let mut output_derivation = Zeroizing::new(
Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(),
);
impl Scanner {
/// Create a Scanner from a ViewPair.
///
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
///
/// When an output is successfully scanned, the output key MUST be saved to disk.
///
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
///
/// If None is passed, a modified shared key derivation is used which is immune to the burning
/// bug (specifically the Guaranteed feature from Featured Addresses).
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
let mut subaddresses = HashMap::new();
subaddresses.insert(pair.spend.compress(), None);
Scanner { pair, subaddresses, burning_bug }
// || o
{
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
write_varint(&o, output_derivation).unwrap();
}
/// Register a subaddress.
// There used to be an address function here, yet it wasn't safe. It could generate addresses
// incompatible with the Scanner. While we could return None for that, then we have the issue
// of runtime failures to generate an address.
// Removing that API was the simplest option.
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
let (spend, _) = self.pair.subaddress_keys(subaddress);
self.subaddresses.insert(spend.compress(), Some(subaddress));
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
// uniqueness ||
let output_derivation = if let Some(uniqueness) = uniqueness {
Zeroizing::new([uniqueness.as_ref(), &output_derivation].concat())
} else {
output_derivation
};
Zeroizing::new(SharedKeyDerivations {
view_tag,
shared_key: keccak256_to_scalar(&output_derivation),
})
}
// H(8Ra || 0x8d)
// TODO: Make this itself a PaymentId
#[allow(clippy::needless_pass_by_value)]
fn payment_id_xor(ecdh: Zeroizing<EdwardsPoint>) -> [u8; 8] {
// 8Ra
let output_derivation = Zeroizing::new(
Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(),
);
let mut payment_id_xor = [0; 8];
payment_id_xor
.copy_from_slice(&keccak256([output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
payment_id_xor
}
fn commitment_mask(&self) -> Scalar {
let mut mask = b"commitment_mask".to_vec();
mask.extend(self.shared_key.as_bytes());
let res = keccak256_to_scalar(&mask);
mask.zeroize();
res
}
fn decrypt(&self, enc_amount: &EncryptedAmount) -> Commitment {
match enc_amount {
// TODO: Add a test vector for this
EncryptedAmount::Original { mask, amount } => {
let mask_shared_sec = keccak256(self.shared_key.as_bytes());
let mask =
Scalar::from_bytes_mod_order(*mask) - Scalar::from_bytes_mod_order(mask_shared_sec);
let amount_shared_sec = keccak256(mask_shared_sec);
let amount_scalar =
Scalar::from_bytes_mod_order(*amount) - Scalar::from_bytes_mod_order(amount_shared_sec);
// d2b from rctTypes.cpp
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
Commitment::new(mask, amount)
}
EncryptedAmount::Compact { amount } => Commitment::new(
self.commitment_mask(),
u64::from_le_bytes(compact_amount_encryption(u64::from_le_bytes(*amount), self.shared_key)),
),
}
}
}

View File

@@ -3,13 +3,13 @@ use std::sync::{Arc, RwLock};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use crate::{
Protocol, address::MoneroAddress, FeeRate, SpendableOutput, Change, Decoys, SignableTransaction,
WalletProtocol, address::MoneroAddress, FeeRate, SpendableOutput, Change, Decoys, SignableTransaction,
TransactionError, extra::MAX_ARBITRARY_DATA_SIZE,
};
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
struct SignableTransactionBuilderInternal {
protocol: Protocol,
protocol: WalletProtocol,
fee_rate: FeeRate,
r_seed: Option<Zeroizing<[u8; 32]>>,
@@ -22,7 +22,7 @@ struct SignableTransactionBuilderInternal {
impl SignableTransactionBuilderInternal {
// Takes in the change address so users don't miss that they have to manually set one
// If they don't, all leftover funds will become part of the fee
fn new(protocol: Protocol, fee_rate: FeeRate, change_address: Change) -> Self {
fn new(protocol: WalletProtocol, fee_rate: FeeRate, change_address: Change) -> Self {
Self {
protocol,
fee_rate,
@@ -87,7 +87,7 @@ impl SignableTransactionBuilder {
Self(self.0.clone())
}
pub fn new(protocol: Protocol, fee_rate: FeeRate, change_address: Change) -> Self {
pub fn new(protocol: WalletProtocol, fee_rate: FeeRate, change_address: Change) -> Self {
Self(Arc::new(RwLock::new(SignableTransactionBuilderInternal::new(
protocol,
fee_rate,

View File

@@ -1,13 +1,18 @@
use core::ops::Deref;
use std_shims::{
io::{self, Read, Write},
vec::Vec,
string::ToString,
io::{self, Read, Write},
collections::{HashSet, HashMap},
};
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use monero_rpc::{RpcError, Rpc};
use monero_serai::{
@@ -16,15 +21,13 @@ use monero_serai::{
transaction::{Input, Timelock, Transaction},
block::Block,
};
use crate::{
PaymentId, Extra, address::SubaddressIndex, Scanner, EncryptedAmountExt, uniqueness, shared_key,
};
use crate::{address::SubaddressIndex, ViewPair, PaymentId, Extra, SharedKeyDerivations};
/// An absolute output ID, defined as its transaction hash and output index.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct AbsoluteId {
pub tx: [u8; 32],
pub o: u8,
pub o: u32,
}
impl core::fmt::Debug for AbsoluteId {
@@ -36,17 +39,17 @@ impl core::fmt::Debug for AbsoluteId {
impl AbsoluteId {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.tx)?;
w.write_all(&[self.o])
w.write_all(&self.o.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 1);
let mut serialized = Vec::with_capacity(32 + 4);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_byte(r)? })
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_u32(r)? })
}
}
@@ -244,7 +247,10 @@ impl SpendableOutput {
self.global_index = *rpc
.get_o_indexes(self.output.absolute.tx)
.await?
.get(usize::from(self.output.absolute.o))
.get(
usize::try_from(self.output.absolute.o)
.map_err(|_| RpcError::InternalError("output's index didn't fit within a usize"))?,
)
.ok_or(RpcError::InvalidNode(
"node returned output indexes didn't include an index for this output".to_string(),
))?;
@@ -330,6 +336,72 @@ impl<O: Clone + Zeroize> Timelocked<O> {
}
}
/// Transaction scanner.
///
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
/// ignored.
#[derive(Clone)]
pub struct Scanner {
pair: ViewPair,
// Also contains the spend key as None
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
}
impl Zeroize for Scanner {
fn zeroize(&mut self) {
self.pair.zeroize();
// These may not be effective, unfortunately
for (mut key, mut value) in self.subaddresses.drain() {
key.zeroize();
value.zeroize();
}
if let Some(ref mut burning_bug) = self.burning_bug.take() {
for mut output in burning_bug.drain() {
output.zeroize();
}
}
}
}
impl Drop for Scanner {
fn drop(&mut self) {
self.zeroize();
}
}
impl ZeroizeOnDrop for Scanner {}
impl Scanner {
/// Create a Scanner from a ViewPair.
///
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
///
/// When an output is successfully scanned, the output key MUST be saved to disk.
///
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
///
/// If None is passed, a modified shared key derivation is used which is immune to the burning
/// bug (specifically the Guaranteed feature from Featured Addresses).
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
let mut subaddresses = HashMap::new();
subaddresses.insert(pair.spend.compress(), None);
Scanner { pair, subaddresses, burning_bug }
}
/// Register a subaddress.
// There used to be an address function here, yet it wasn't safe. It could generate addresses
// incompatible with the Scanner. While we could return None for that, then we have the issue
// of runtime failures to generate an address.
// Removing that API was the simplest option.
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
let (spend, _) = self.pair.subaddress_keys(subaddress);
self.subaddresses.insert(spend.compress(), Some(subaddress));
}
}
impl Scanner {
/// Scan a transaction to discover the received outputs.
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
@@ -379,23 +451,29 @@ impl Scanner {
break;
}
};
let (view_tag, shared_key, payment_id_xor) = shared_key(
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix().inputs)) } else { None },
self.pair.view.deref() * key,
let ecdh = Zeroizing::new(self.pair.view.deref() * key);
let output_derivations = SharedKeyDerivations::output_derivations(
if self.burning_bug.is_none() {
Some(SharedKeyDerivations::uniqueness(&tx.prefix().inputs))
} else {
None
},
ecdh.clone(),
o,
);
let payment_id = payment_id.map(|id| id ^ payment_id_xor);
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
if let Some(actual_view_tag) = output.view_tag {
if actual_view_tag != view_tag {
if actual_view_tag != output_derivations.view_tag {
continue;
}
}
// P - shared == spend
let subaddress =
self.subaddresses.get(&(output_key - (&shared_key * ED25519_BASEPOINT_TABLE)).compress());
let subaddress = self.subaddresses.get(
&(output_key - (&output_derivations.shared_key * ED25519_BASEPOINT_TABLE)).compress(),
);
if subaddress.is_none() {
continue;
}
@@ -407,7 +485,7 @@ impl Scanner {
// If we did though, it'd enable bypassing the included burning bug protection
assert!(output_key.is_torsion_free());
let mut key_offset = shared_key;
let mut key_offset = output_derivations.shared_key;
if let Some(subaddress) = subaddress {
key_offset += self.pair.subaddress_derivation(subaddress);
}
@@ -424,7 +502,7 @@ impl Scanner {
};
commitment = match proofs.base.encrypted_amounts.get(o) {
Some(amount) => amount.decrypt(shared_key),
Some(amount) => output_derivations.decrypt(amount),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => break,

View File

@@ -0,0 +1,178 @@
use zeroize::Zeroize;
use crate::{
ringct::RctProofs,
transaction::{Input, Timelock, Transaction},
send::SignableTransaction,
};
/// The eventual output of a SignableTransaction.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Eventuality(SignableTransaction);
impl From<SignableTransaction> for Eventuality {
fn from(tx: SignableTransaction) -> Eventuality {
Eventuality(tx)
}
}
impl Eventuality {
/// Return the extra any TX following this intent would use.
///
/// This enables building a HashMap of Extra -> Eventuality for efficiently checking if an
/// on-chain transaction may match one of several eventuality.
///
/// This extra is cryptographically bound to the set of outputs intended to be spent as inputs.
/// This means two SignableTransactions for the same set of payments will have distinct extras.
/// This does not guarantee the matched transaction actually spent the intended outputs.
pub fn extra(&self) -> Vec<u8> {
self.0.extra()
}
/// Return if this TX matches the SignableTransaction this was created from.
///
/// Matching the SignableTransaction means this transaction created the expected outputs, they're
/// scannable, they're not locked, and this transaction claims to use the intended inputs (though
/// this is not guaranteed). This 'claim' is evaluated by this transaction using the transaction
/// keys derived from the intended inputs. This ensures two SignableTransactions with the same
/// intended payments don't match for each other's `Eventuality`s (as they'll have distinct
/// inputs intended).
#[must_use]
pub fn matches(&self, tx: &Transaction) -> bool {
// Verify extra
if self.0.extra() != tx.prefix().extra {
return false;
}
// Also ensure no timelock was set
if tx.prefix().timelock != Timelock::None {
return false;
}
// Check the amount of inputs aligns
if tx.prefix().inputs.len() != self.0.inputs.len() {
return false;
}
// Collect the key images used by this transaction
let Ok(key_images) = tx
.prefix()
.inputs
.iter()
.map(|input| match input {
Input::Gen(_) => Err(()),
Input::ToKey { key_image, .. } => Ok(*key_image),
})
.collect::<Result<Vec<_>, _>>()
else {
return false;
};
// Check the outputs
if self.0.outputs(&key_images) != tx.prefix().outputs {
return false;
}
// Check the encrypted amounts and commitments
let commitments_and_encrypted_amounts = self.0.commitments_and_encrypted_amounts(&key_images);
let Transaction::V2 { proofs: Some(RctProofs { ref base, .. }), .. } = tx else {
return false;
};
if base.commitments !=
commitments_and_encrypted_amounts
.iter()
.map(|(commitment, _)| commitment.calculate())
.collect::<Vec<_>>()
{
return false;
}
if base.encrypted_amounts !=
commitments_and_encrypted_amounts.into_iter().map(|(_, amount)| amount).collect::<Vec<_>>()
{
return false;
}
true
}
/*
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
self.protocol.write(w)?;
write_raw_vec(write_byte, self.r_seed.as_ref(), w)?;
write_vec(write_point, &self.inputs, w)?;
fn write_payment<W: io::Write>(payment: &InternalPayment, w: &mut W) -> io::Result<()> {
match payment {
InternalPayment::Payment(payment, need_dummy_payment_id) => {
w.write_all(&[0])?;
write_vec(write_byte, payment.0.to_string().as_bytes(), w)?;
w.write_all(&payment.1.to_le_bytes())?;
if *need_dummy_payment_id {
w.write_all(&[1])
} else {
w.write_all(&[0])
}
}
InternalPayment::Change(change, change_view) => {
w.write_all(&[1])?;
write_vec(write_byte, change.0.to_string().as_bytes(), w)?;
w.write_all(&change.1.to_le_bytes())?;
if let Some(view) = change_view.as_ref() {
w.write_all(&[1])?;
write_scalar(view, w)
} else {
w.write_all(&[0])
}
}
}
}
write_vec(write_payment, &self.payments, w)?;
write_vec(write_byte, &self.extra, w)
}
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(128);
self.write(&mut buf).unwrap();
buf
}
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Eventuality> {
fn read_address<R: io::Read>(r: &mut R) -> io::Result<MoneroAddress> {
String::from_utf8(read_vec(read_byte, r)?)
.ok()
.and_then(|str| MoneroAddress::from_str_raw(&str).ok())
.ok_or_else(|| io::Error::other("invalid address"))
}
fn read_payment<R: io::Read>(r: &mut R) -> io::Result<InternalPayment> {
Ok(match read_byte(r)? {
0 => InternalPayment::Payment(
(read_address(r)?, read_u64(r)?),
match read_byte(r)? {
0 => false,
1 => true,
_ => Err(io::Error::other("invalid need additional"))?,
},
),
1 => InternalPayment::Change(
(read_address(r)?, read_u64(r)?),
match read_byte(r)? {
0 => None,
1 => Some(Zeroizing::new(read_scalar(r)?)),
_ => Err(io::Error::other("invalid change view"))?,
},
),
_ => Err(io::Error::other("invalid payment"))?,
})
}
Ok(Eventuality {
protocol: RctType::read(r)?,
r_seed: Zeroizing::new(read_bytes::<_, 32>(r)?),
inputs: read_vec(read_point, r)?,
payments: read_vec(read_payment, r)?,
extra: read_vec(read_byte, r)?,
})
}
*/
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,516 +0,0 @@
use core::ops::Deref;
use std_shims::{
vec::Vec,
string::ToString,
io::{self, Read, Write},
};
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero_rpc::{RpcError, Rpc};
use monero_serai::{
io::*,
primitives::Commitment,
transaction::{Input, Timelock, Transaction},
block::Block,
};
use crate::{
PaymentId, Extra, address::SubaddressIndex, Scanner, EncryptedAmountExt, uniqueness, shared_key,
};
/// An absolute output ID, defined as its transaction hash and output index.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct AbsoluteId {
pub tx: [u8; 32],
pub o: u8,
}
impl core::fmt::Debug for AbsoluteId {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt.debug_struct("AbsoluteId").field("tx", &hex::encode(self.tx)).field("o", &self.o).finish()
}
}
impl AbsoluteId {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.tx)?;
w.write_all(&[self.o])
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 1);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_byte(r)? })
}
}
/// The data contained with an output.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct OutputData {
pub key: EdwardsPoint,
/// Absolute difference between the spend key and the key in this output
pub key_offset: Scalar,
pub commitment: Commitment,
}
impl core::fmt::Debug for OutputData {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("OutputData")
.field("key", &hex::encode(self.key.compress().0))
.field("key_offset", &hex::encode(self.key_offset.to_bytes()))
.field("commitment", &self.commitment)
.finish()
}
}
impl OutputData {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.key.compress().to_bytes())?;
w.write_all(&self.key_offset.to_bytes())?;
w.write_all(&self.commitment.mask.to_bytes())?;
w.write_all(&self.commitment.amount.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 32 + 32 + 8);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<OutputData> {
Ok(OutputData {
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(read_scalar(r)?, read_u64(r)?),
})
}
}
/// The metadata for an output.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Metadata {
/// The subaddress this output was sent to.
pub subaddress: Option<SubaddressIndex>,
/// The payment ID included with this output.
/// There are 2 circumstances in which the reference wallet2 ignores the payment ID
/// but the payment ID will be returned here anyway:
///
/// 1) If the payment ID is tied to an output received by a subaddress account
/// that spent Monero in the transaction (the received output is considered
/// "change" and is not considered a "payment" in this case). If there are multiple
/// spending subaddress accounts in a transaction, the highest index spent key image
/// is used to determine the spending subaddress account.
///
/// 2) If the payment ID is the unencrypted variant and the block's hf version is
/// v12 or higher (https://github.com/serai-dex/serai/issues/512)
pub payment_id: Option<PaymentId>,
/// Arbitrary data encoded in TX extra.
pub arbitrary_data: Vec<Vec<u8>>,
}
impl core::fmt::Debug for Metadata {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("Metadata")
.field("subaddress", &self.subaddress)
.field("payment_id", &self.payment_id)
.field("arbitrary_data", &self.arbitrary_data.iter().map(hex::encode).collect::<Vec<_>>())
.finish()
}
}
impl Metadata {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
if let Some(subaddress) = self.subaddress {
w.write_all(&[1])?;
w.write_all(&subaddress.account().to_le_bytes())?;
w.write_all(&subaddress.address().to_le_bytes())?;
} else {
w.write_all(&[0])?;
}
if let Some(payment_id) = self.payment_id {
w.write_all(&[1])?;
payment_id.write(w)?;
} else {
w.write_all(&[0])?;
}
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
for part in &self.arbitrary_data {
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
w.write_all(part)?;
}
Ok(())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(1 + 8 + 1);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Metadata> {
let subaddress = if read_byte(r)? == 1 {
Some(
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
.ok_or_else(|| io::Error::other("invalid subaddress in metadata"))?,
)
} else {
None
};
Ok(Metadata {
subaddress,
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
let len = read_byte(r)?;
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
}
data
},
})
}
}
/// A received output, defined as its absolute ID, data, and metadara.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ReceivedOutput {
pub absolute: AbsoluteId,
pub data: OutputData,
pub metadata: Metadata,
}
impl ReceivedOutput {
pub fn key(&self) -> EdwardsPoint {
self.data.key
}
pub fn key_offset(&self) -> Scalar {
self.data.key_offset
}
pub fn commitment(&self) -> Commitment {
self.data.commitment.clone()
}
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
&self.metadata.arbitrary_data
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.absolute.write(w)?;
self.data.write(w)?;
self.metadata.write(w)
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
Ok(ReceivedOutput {
absolute: AbsoluteId::read(r)?,
data: OutputData::read(r)?,
metadata: Metadata::read(r)?,
})
}
}
/// A spendable output, defined as a received output and its index on the Monero blockchain.
/// This index is dependent on the Monero blockchain and will only be known once the output is
/// included within a block. This may change if there's a reorganization.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct SpendableOutput {
pub output: ReceivedOutput,
pub global_index: u64,
}
impl SpendableOutput {
/// Update the spendable output's global index. This is intended to be called if a
/// re-organization occurred.
pub async fn refresh_global_index(&mut self, rpc: &impl Rpc) -> Result<(), RpcError> {
self.global_index = *rpc
.get_o_indexes(self.output.absolute.tx)
.await?
.get(usize::from(self.output.absolute.o))
.ok_or(RpcError::InvalidNode(
"node returned output indexes didn't include an index for this output".to_string(),
))?;
Ok(())
}
pub async fn from(rpc: &impl Rpc, output: ReceivedOutput) -> Result<SpendableOutput, RpcError> {
let mut output = SpendableOutput { output, global_index: 0 };
output.refresh_global_index(rpc).await?;
Ok(output)
}
pub fn key(&self) -> EdwardsPoint {
self.output.key()
}
pub fn key_offset(&self) -> Scalar {
self.output.key_offset()
}
pub fn commitment(&self) -> Commitment {
self.output.commitment()
}
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
self.output.arbitrary_data()
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.output.write(w)?;
w.write_all(&self.global_index.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<SpendableOutput> {
Ok(SpendableOutput { output: ReceivedOutput::read(r)?, global_index: read_u64(r)? })
}
}
/// A collection of timelocked outputs, either received or spendable.
#[derive(Zeroize)]
pub struct Timelocked<O: Clone + Zeroize>(Timelock, Vec<O>);
impl<O: Clone + Zeroize> Drop for Timelocked<O> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<O: Clone + Zeroize> ZeroizeOnDrop for Timelocked<O> {}
impl<O: Clone + Zeroize> Timelocked<O> {
pub fn timelock(&self) -> Timelock {
self.0
}
/// Return the outputs if they're not timelocked, or an empty vector if they are.
#[must_use]
pub fn not_locked(&self) -> Vec<O> {
if self.0 == Timelock::None {
return self.1.clone();
}
vec![]
}
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
#[must_use]
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
// If the Timelocks are comparable, return the outputs if they're now unlocked
if self.0 <= timelock {
Some(self.1.clone())
} else {
None
}
}
#[must_use]
pub fn ignore_timelock(&self) -> Vec<O> {
self.1.clone()
}
}
impl Scanner {
/// Scan a transaction to discover the received outputs.
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
// Only scan RCT TXs since we can only spend RCT outputs
if tx.version() != 2 {
return Timelocked(tx.prefix().timelock, vec![]);
}
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()) else {
return Timelocked(tx.prefix().timelock, vec![]);
};
let Some((tx_keys, additional)) = extra.keys() else {
return Timelocked(tx.prefix().timelock, vec![]);
};
let payment_id = extra.payment_id();
let mut res = vec![];
for (o, output) in tx.prefix().outputs.iter().enumerate() {
// https://github.com/serai-dex/serai/issues/106
if let Some(burning_bug) = self.burning_bug.as_ref() {
if burning_bug.contains(&output.key) {
continue;
}
}
let output_key = decompress_point(output.key.to_bytes());
if output_key.is_none() {
continue;
}
let output_key = output_key.unwrap();
let additional = additional.as_ref().map(|additional| additional.get(o));
for key in tx_keys.iter().map(|key| Some(Some(key))).chain(core::iter::once(additional)) {
let key = match key {
Some(Some(key)) => key,
Some(None) => {
// This is non-standard. There were additional keys, yet not one for this output
// https://github.com/monero-project/monero/
// blob/04a1e2875d6e35e27bb21497988a6c822d319c28/
// src/cryptonote_basic/cryptonote_format_utils.cpp#L1062
continue;
}
None => {
break;
}
};
let (view_tag, shared_key, payment_id_xor) = shared_key(
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix().inputs)) } else { None },
self.pair.view.deref() * key,
o,
);
let payment_id = payment_id.map(|id| id ^ payment_id_xor);
if let Some(actual_view_tag) = output.view_tag {
if actual_view_tag != view_tag {
continue;
}
}
// P - shared == spend
let subaddress =
self.subaddresses.get(&(output_key - (&shared_key * ED25519_BASEPOINT_TABLE)).compress());
if subaddress.is_none() {
continue;
}
let subaddress = *subaddress.unwrap();
// If it has torsion, it'll subtract the non-torsioned shared key to a torsioned key
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
// ours
// If we did though, it'd enable bypassing the included burning bug protection
assert!(output_key.is_torsion_free());
let mut key_offset = shared_key;
if let Some(subaddress) = subaddress {
key_offset += self.pair.subaddress_derivation(subaddress);
}
// Since we've found an output to us, get its amount
let mut commitment = Commitment::zero();
// Miner transaction
if let Some(amount) = output.amount {
commitment.amount = amount;
// Regular transaction
} else {
let proofs = match &tx {
Transaction::V2 { proofs: Some(proofs), .. } => &proofs,
_ => return Timelocked(tx.prefix().timelock, vec![]),
};
commitment = match proofs.base.encrypted_amounts.get(o) {
Some(amount) => amount.decrypt(shared_key),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => break,
};
// If this is a malicious commitment, move to the next output
// Any other R value will calculate to a different spend key and are therefore ignorable
if Some(&commitment.calculate()) != proofs.base.commitments.get(o) {
break;
}
}
if commitment.amount != 0 {
res.push(ReceivedOutput {
absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() },
data: OutputData { key: output_key, key_offset, commitment },
metadata: Metadata { subaddress, payment_id, arbitrary_data: extra.data() },
});
if let Some(burning_bug) = self.burning_bug.as_mut() {
burning_bug.insert(output.key);
}
}
// Break to prevent public keys from being included multiple times, triggering multiple
// inclusions of the same output
break;
}
}
Timelocked(tx.prefix().timelock, res)
}
/// Scan a block to obtain its spendable outputs. Its the presence in a block giving these
/// transactions their global index, and this must be batched as asking for the index of specific
/// transactions is a dead giveaway for which transactions you successfully scanned. This
/// function obtains the output indexes for the miner transaction, incrementing from there
/// instead.
pub async fn scan(
&mut self,
rpc: &impl Rpc,
block: &Block,
) -> Result<Vec<Timelocked<SpendableOutput>>, RpcError> {
let mut index = rpc.get_o_indexes(block.miner_tx.hash()).await?[0];
let mut txs = vec![block.miner_tx.clone()];
txs.extend(rpc.get_transactions(&block.txs).await?);
let map = |mut timelock: Timelocked<ReceivedOutput>, index| {
if timelock.1.is_empty() {
None
} else {
Some(Timelocked(
timelock.0,
timelock
.1
.drain(..)
.map(|output| SpendableOutput {
global_index: index + u64::from(output.absolute.o),
output,
})
.collect(),
))
}
};
let mut res = vec![];
for tx in txs {
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
res.push(timelock);
}
index += u64::try_from(
tx.prefix()
.outputs
.iter()
// Filter to v2 miner TX outputs/RCT outputs since we're tracking the RCT output index
.filter(|output| {
let is_v2_miner_tx =
(tx.version() == 2) && matches!(tx.prefix().inputs.first(), Some(Input::Gen(..)));
is_v2_miner_tx || output.amount.is_none()
})
.count(),
)
.unwrap()
}
Ok(res)
}
}

View File

@@ -0,0 +1,258 @@
use rand_core::SeedableRng;
use rand_chacha::ChaCha20Rng;
use curve25519_dalek::{
constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE},
Scalar, EdwardsPoint,
};
use crate::{
io::varint_len,
primitives::Commitment,
ringct::{
clsag::Clsag, bulletproofs::Bulletproof, EncryptedAmount, RctType, RctBase, RctPrunable,
RctProofs,
},
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
extra::{ARBITRARY_DATA_MARKER, PaymentId, ExtraField, Extra},
send::{InternalPayment, SignableTransaction, SignableTransactionWithKeyImages},
};
impl SignableTransaction {
// Output the inputs for this transaction.
pub(crate) fn inputs(&self, key_images: &[EdwardsPoint]) -> Vec<Input> {
debug_assert_eq!(self.inputs.len(), key_images.len());
let mut res = Vec::with_capacity(self.inputs.len());
for ((_, decoys), key_image) in self.inputs.iter().zip(key_images) {
res.push(Input::ToKey {
amount: None,
key_offsets: decoys.offsets().to_vec(),
key_image: *key_image,
});
}
res
}
// Output the outputs for this transaction.
pub(crate) fn outputs(&self, key_images: &[EdwardsPoint]) -> Vec<Output> {
let shared_key_derivations = self.shared_key_derivations(key_images);
debug_assert_eq!(self.payments.len(), shared_key_derivations.len());
let mut res = Vec::with_capacity(self.payments.len());
for (payment, shared_key_derivations) in self.payments.iter().zip(&shared_key_derivations) {
let key =
(&shared_key_derivations.shared_key * ED25519_BASEPOINT_TABLE) + payment.address().spend;
res.push(Output {
key: key.compress(),
amount: None,
view_tag: (match self.rct_type {
RctType::ClsagBulletproof => false,
RctType::ClsagBulletproofPlus => true,
_ => panic!("unsupported RctType"),
})
.then_some(shared_key_derivations.view_tag),
});
}
res
}
// Calculate the TX extra for this transaction.
pub(crate) fn extra(&self) -> Vec<u8> {
let (tx_key, additional_keys) = self.transaction_keys_pub();
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
let payment_id_xors = self.payment_id_xors();
debug_assert_eq!(self.payments.len(), payment_id_xors.len());
let amount_of_keys = 1 + additional_keys.len();
let mut extra = Extra::new(tx_key, additional_keys);
if let Some((id, id_xor)) =
self.payments.iter().zip(&payment_id_xors).find_map(|(payment, payment_id_xor)| {
payment.address().payment_id().map(|id| (id, payment_id_xor))
})
{
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
let mut id_vec = Vec::with_capacity(1 + 8);
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
extra.push(ExtraField::Nonce(id_vec));
} else {
// If there's no payment ID, we push a dummy (as wallet2 does) if there's only one payment
if (self.payments.len() == 2) &&
self.payments.iter().any(|payment| matches!(payment, InternalPayment::Change(_, _)))
{
let (_, payment_id_xor) = self
.payments
.iter()
.zip(&payment_id_xors)
.find(|(payment, _)| matches!(payment, InternalPayment::Payment(_, _)))
.expect("multiple change outputs?");
let mut id_vec = Vec::with_capacity(1 + 8);
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap();
extra.push(ExtraField::Nonce(id_vec));
}
}
// Include data if present
for part in &self.data {
let mut arb = vec![ARBITRARY_DATA_MARKER];
arb.extend(part);
extra.push(ExtraField::Nonce(arb));
}
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
extra.write(&mut serialized).unwrap();
serialized
}
pub(crate) fn weight_and_fee(&self) -> (usize, u64) {
/*
This transaction is variable length to:
- The decoy offsets (fixed)
- The TX extra (variable to key images, requiring an interactive protocol)
Thankfully, the TX extra *length* is fixed. Accordingly, we can calculate the inevitable TX's
weight at this time with a shimmed transaction.
*/
let base_weight = {
let mut key_images = Vec::with_capacity(self.inputs.len());
let mut clsags = Vec::with_capacity(self.inputs.len());
let mut pseudo_outs = Vec::with_capacity(self.inputs.len());
for _ in &self.inputs {
key_images.push(ED25519_BASEPOINT_POINT);
clsags.push(Clsag {
D: ED25519_BASEPOINT_POINT,
s: vec![Scalar::ZERO; 16],
c1: Scalar::ZERO,
});
pseudo_outs.push(ED25519_BASEPOINT_POINT);
}
let mut encrypted_amounts = Vec::with_capacity(self.payments.len());
let mut bp_commitments = Vec::with_capacity(self.payments.len());
let mut commitments = Vec::with_capacity(self.payments.len());
for _ in &self.payments {
encrypted_amounts.push(EncryptedAmount::Compact { amount: [0; 8] });
bp_commitments.push(Commitment::zero());
commitments.push(ED25519_BASEPOINT_POINT);
}
// TODO: Remove this. Deserialize an empty BP?
let bulletproof = (match self.rct_type {
RctType::ClsagBulletproof => {
Bulletproof::prove(&mut ChaCha20Rng::from_seed([0; 32]), &bp_commitments)
}
RctType::ClsagBulletproofPlus => {
Bulletproof::prove_plus(&mut ChaCha20Rng::from_seed([0; 32]), bp_commitments)
}
_ => panic!("unsupported RctType"),
})
.expect("couldn't prove BP(+)s for this many payments despite checking in constructor?");
// `- 1` to remove the one byte for the 0 fee
Transaction::V2 {
prefix: TransactionPrefix {
timelock: Timelock::None,
inputs: self.inputs(&key_images),
outputs: self.outputs(&key_images),
extra: self.extra(),
},
proofs: Some(RctProofs {
base: RctBase { fee: 0, encrypted_amounts, pseudo_outs: vec![], commitments },
prunable: RctPrunable::Clsag { bulletproof, clsags, pseudo_outs },
}),
}
.weight() -
1
};
// If we don't have a change output, the difference is the fee
if !self.payments.iter().any(|payment| matches!(payment, InternalPayment::Change(_, _))) {
let inputs = self.inputs.iter().map(|input| input.0.commitment().amount).sum::<u64>();
let payments = self
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(amount),
InternalPayment::Change(_, _) => None,
})
.sum::<u64>();
// Safe since the constructor checks inputs > payments before any calls to weight_and_fee
let fee = inputs - payments;
return (base_weight + varint_len(fee), fee);
}
// We now have the base weight, without the fee encoded
// The fee itself will impact the weight as its encoding is [1, 9] bytes long
let mut possible_weights = Vec::with_capacity(9);
for i in 1 ..= 9 {
possible_weights.push(base_weight + i);
}
debug_assert_eq!(possible_weights.len(), 9);
// We now calculate the fee which would be used for each weight
let mut possible_fees = Vec::with_capacity(9);
for weight in possible_weights {
possible_fees.push(self.fee_rate.calculate_fee_from_weight(weight));
}
// We now look for the fee whose length matches the length used to derive it
let mut weight_and_fee = None;
for (len, possible_fee) in possible_fees.into_iter().enumerate() {
let len = 1 + len;
debug_assert!(1 <= len);
debug_assert!(len <= 9);
// We use the first fee whose encoded length is not larger than the length used within this
// weight
// This should be because the lengths are equal, yet means if somehow none are equal, this
// will still terminate successfully
if varint_len(possible_fee) <= len {
weight_and_fee = Some((base_weight + len, possible_fee));
break;
}
}
weight_and_fee.unwrap()
}
}
impl SignableTransactionWithKeyImages {
pub(crate) fn transaction_without_clsags_and_pseudo_outs(&self) -> Transaction {
let commitments_and_encrypted_amounts =
self.intent.commitments_and_encrypted_amounts(&self.key_images);
let mut commitments = Vec::with_capacity(self.intent.payments.len());
let mut bp_commitments = Vec::with_capacity(self.intent.payments.len());
let mut encrypted_amounts = Vec::with_capacity(self.intent.payments.len());
for (commitment, encrypted_amount) in commitments_and_encrypted_amounts {
commitments.push(commitment.calculate());
bp_commitments.push(commitment);
encrypted_amounts.push(encrypted_amount);
}
let bulletproof = {
let mut bp_rng = self.intent.seeded_rng(b"bulletproof");
(match self.intent.rct_type {
RctType::ClsagBulletproof => Bulletproof::prove(&mut bp_rng, &bp_commitments),
RctType::ClsagBulletproofPlus => Bulletproof::prove_plus(&mut bp_rng, bp_commitments),
_ => panic!("unsupported RctType"),
})
.expect("couldn't prove BP(+)s for this many payments despite checking in constructor?")
};
Transaction::V2 {
prefix: TransactionPrefix {
timelock: Timelock::None,
inputs: self.intent.inputs(&self.key_images),
outputs: self.intent.outputs(&self.key_images),
extra: self.intent.extra(),
},
proofs: Some(RctProofs {
base: RctBase {
fee: self.intent.weight_and_fee().1,
encrypted_amounts,
pseudo_outs: vec![],
commitments,
},
prunable: RctPrunable::Clsag { bulletproof, clsags: vec![], pseudo_outs: vec![] },
}),
}
}
}

View File

@@ -0,0 +1,231 @@
use core::ops::Deref;
use zeroize::Zeroizing;
use rand_core::SeedableRng;
use rand_chacha::ChaCha20Rng;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
use crate::{
primitives::{keccak256, Commitment},
ringct::EncryptedAmount,
SharedKeyDerivations, compact_amount_encryption,
send::{InternalPayment, SignableTransaction},
};
fn seeded_rng(
dst: &'static [u8],
view_key: &Zeroizing<Scalar>,
output_keys: impl Iterator<Item = EdwardsPoint>,
) -> ChaCha20Rng {
// Apply the DST
let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]);
transcript.extend(dst);
// Bind to the private view key to prevent foreign entities from rebuilding the transcript
transcript.extend(view_key.to_bytes());
// Ensure uniqueness across transactions by binding to a use-once object
// The output key is also binding to the output's key image, making this use-once
for key in output_keys {
transcript.extend(key.compress().to_bytes());
}
ChaCha20Rng::from_seed(keccak256(&transcript))
}
impl SignableTransaction {
pub(crate) fn seeded_rng(&self, dst: &'static [u8]) -> ChaCha20Rng {
seeded_rng(dst, &self.sender_view_key, self.inputs.iter().map(|(input, _)| input.output.key()))
}
fn has_payments_to_subaddresses(&self) -> bool {
self.payments.iter().any(|payment| match payment {
InternalPayment::Payment(addr, _) => addr.is_subaddress(),
InternalPayment::Change(addr, view) => {
if view.is_some() {
// It should not be possible to construct a change specification to a subaddress with a
// view key
// TODO
debug_assert!(!addr.is_subaddress());
}
addr.is_subaddress()
}
})
}
fn should_use_additional_keys(&self) -> bool {
let has_payments_to_subaddresses = self.has_payments_to_subaddresses();
if !has_payments_to_subaddresses {
return false;
}
let has_change_view = self.payments.iter().any(|payment| match payment {
InternalPayment::Payment(_, _) => false,
InternalPayment::Change(_, view) => view.is_some(),
});
/*
If sending to a subaddress, the shared key is not `rG` yet `rB`. Because of this, a
per-subaddress shared key is necessary, causing the usage of additional keys.
The one exception is if we're sending to a subaddress in a 2-output transaction. The second
output, the change output, will attempt scanning the singular key `rB` with `v rB`. While we
cannot calculate `r vB` with just `r` (as that'd require `vB` when we presumably only have
`vG` when sending), since we do in fact have `v` (due to it being our own view key for our
change output), we can still calculate the shared secret.
*/
has_payments_to_subaddresses && !((self.payments.len() == 2) && has_change_view)
}
// Calculate the transaction keys used as randomness.
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
let mut rng = self.seeded_rng(b"transaction_keys");
let tx_key = Zeroizing::new(Scalar::random(&mut rng));
let mut additional_keys = vec![];
if self.should_use_additional_keys() {
for _ in 0 .. self.payments.len() {
additional_keys.push(Zeroizing::new(Scalar::random(&mut rng)));
}
}
(tx_key, additional_keys)
}
fn ecdhs(&self) -> Vec<Zeroizing<EdwardsPoint>> {
let (tx_key, additional_keys) = self.transaction_keys();
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
let (tx_key_pub, additional_keys_pub) = self.transaction_keys_pub();
debug_assert_eq!(additional_keys_pub.len(), additional_keys.len());
let mut res = Vec::with_capacity(self.payments.len());
for (i, payment) in self.payments.iter().enumerate() {
let addr = payment.address();
let key_to_use =
if addr.is_subaddress() { additional_keys.get(i).unwrap_or(&tx_key) } else { &tx_key };
let ecdh = match payment {
// If we don't have the view key, use the key dedicated for this address (r A)
InternalPayment::Payment(_, _) | InternalPayment::Change(_, None) => {
Zeroizing::new(key_to_use.deref() * addr.view)
}
// If we do have the view key, use the commitment to the key (a R)
InternalPayment::Change(_, Some(view)) => Zeroizing::new(view.deref() * tx_key_pub),
};
res.push(ecdh);
}
res
}
// Calculate the shared keys and the necessary derivations.
pub(crate) fn shared_key_derivations(
&self,
key_images: &[EdwardsPoint],
) -> Vec<Zeroizing<SharedKeyDerivations>> {
let ecdhs = self.ecdhs();
let uniqueness = SharedKeyDerivations::uniqueness(&self.inputs(key_images));
let mut res = Vec::with_capacity(self.payments.len());
for (i, (payment, ecdh)) in self.payments.iter().zip(ecdhs).enumerate() {
let addr = payment.address();
res.push(SharedKeyDerivations::output_derivations(
addr.is_guaranteed().then_some(uniqueness),
ecdh,
i,
));
}
res
}
// Calculate the payment ID XOR masks.
pub(crate) fn payment_id_xors(&self) -> Vec<[u8; 8]> {
let mut res = Vec::with_capacity(self.payments.len());
for ecdh in self.ecdhs() {
res.push(SharedKeyDerivations::payment_id_xor(ecdh));
}
res
}
// Calculate the transaction_keys' commitments.
//
// These depend on the payments. Commitments for payments to subaddresses use the spend key for
// the generator.
pub(crate) fn transaction_keys_pub(&self) -> (EdwardsPoint, Vec<EdwardsPoint>) {
let (tx_key, additional_keys) = self.transaction_keys();
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
// The single transaction key uses the subaddress's spend key as its generator
let has_payments_to_subaddresses = self.has_payments_to_subaddresses();
let should_use_additional_keys = self.should_use_additional_keys();
if has_payments_to_subaddresses && (!should_use_additional_keys) {
debug_assert_eq!(additional_keys.len(), 0);
let InternalPayment::Payment(addr, _) = self
.payments
.iter()
.find(|payment| matches!(payment, InternalPayment::Payment(_, _)))
.expect("payment to subaddress yet no payment")
else {
panic!("filtered payment wasn't a payment")
};
// TODO: Support subaddresses as change?
debug_assert!(addr.is_subaddress());
return (tx_key.deref() * addr.spend, vec![]);
}
if should_use_additional_keys {
let mut additional_keys_pub = vec![];
for (additional_key, payment) in additional_keys.into_iter().zip(&self.payments) {
let addr = payment.address();
// TODO: Double check this against wallet2
if addr.is_subaddress() {
additional_keys_pub.push(additional_key.deref() * addr.spend);
} else {
additional_keys_pub.push(additional_key.deref() * ED25519_BASEPOINT_TABLE)
}
}
return (tx_key.deref() * ED25519_BASEPOINT_TABLE, additional_keys_pub);
}
debug_assert!(!has_payments_to_subaddresses);
debug_assert!(!should_use_additional_keys);
(tx_key.deref() * ED25519_BASEPOINT_TABLE, vec![])
}
pub(crate) fn commitments_and_encrypted_amounts(
&self,
key_images: &[EdwardsPoint],
) -> Vec<(Commitment, EncryptedAmount)> {
let shared_key_derivations = self.shared_key_derivations(key_images);
let mut res = Vec::with_capacity(self.payments.len());
for (payment, shared_key_derivations) in self.payments.iter().zip(shared_key_derivations) {
let amount = match payment {
InternalPayment::Payment(_, amount) => *amount,
InternalPayment::Change(_, _) => {
let inputs = self.inputs.iter().map(|input| input.0.commitment().amount).sum::<u64>();
let payments = self
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(amount),
InternalPayment::Change(_, _) => None,
})
.sum::<u64>();
let fee = self.weight_and_fee().1;
// Safe since the constructor checked this
inputs - (payments + fee)
}
};
let commitment = Commitment::new(shared_key_derivations.commitment_mask(), amount);
let encrypted_amount = EncryptedAmount::Compact {
amount: compact_amount_encryption(amount, shared_key_derivations.shared_key),
};
res.push((commitment, encrypted_amount));
}
res
}
}