Rename the coins folder to networks (#583)

* Rename the coins folder to networks

Ethereum isn't a coin. It's a network.

Resolves #357.

* More renames of coins -> networks in orchestration

* Correct paths in tests/

* cargo fmt
This commit is contained in:
Luke Parker
2024-07-18 12:16:45 -07:00
committed by GitHub
parent 40cc180853
commit 7d2d739042
244 changed files with 102 additions and 99 deletions

View File

@@ -0,0 +1,75 @@
[package]
name = "monero-wallet"
version = "0.1.0"
description = "Wallet functionality for the Monero protocol, built around monero-serai"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
rand_core = { version = "0.6", default-features = false }
# Used to send transactions
rand = { version = "0.8", default-features = false }
rand_chacha = { version = "0.3", default-features = false }
# Used to select decoys
rand_distr = { version = "0.4", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "group"] }
# Multisig dependencies
transcript = { package = "flexible-transcript", path = "../../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
group = { version = "0.13", default-features = false, optional = true }
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false, optional = true }
frost = { package = "modular-frost", path = "../../../crypto/frost", default-features = false, features = ["ed25519"], optional = true }
hex = { version = "0.4", default-features = false, features = ["alloc"] }
monero-serai = { path = "..", default-features = false }
monero-rpc = { path = "../rpc", default-features = false }
monero-address = { path = "./address", default-features = false }
[dev-dependencies]
serde = { version = "1", default-features = false, features = ["derive", "alloc", "std"] }
serde_json = { version = "1", default-features = false, features = ["alloc", "std"] }
frost = { package = "modular-frost", path = "../../../crypto/frost", default-features = false, features = ["ed25519", "tests"] }
tokio = { version = "1", features = ["sync", "macros"] }
monero-simple-request-rpc = { path = "../rpc/simple-request", default-features = false }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"rand_core/std",
"rand/std",
"rand_chacha/std",
"rand_distr/std",
"monero-serai/std",
"monero-rpc/std",
"monero-address/std",
]
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-serai/compile-time-generators"]
multisig = ["transcript", "group", "dalek-ff-group", "frost", "monero-serai/multisig", "std"]
default = ["std", "compile-time-generators"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,58 @@
# Monero Wallet
Wallet functionality for the Monero protocol, built around monero-serai. This
library prides itself on resolving common pit falls developers may face.
monero-wallet also offers a FROST-inspired multisignature protocol orders of
magnitude more performant than Monero's own.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Features
- Scanning Monero transactions
- Sending Monero transactions
- Sending Monero transactions with a FROST-inspired threshold multisignature
protocol, orders of magnitude more performant than Monero's own
### Caveats
This library DOES attempt to do the following:
- Create on-chain transactions identical to how wallet2 would (unless told not
to)
- Not be detectable as monero-serai when scanning outputs
- Not reveal spent outputs to the connected RPC node
This library DOES NOT attempt to do the following:
- Have identical RPC behavior when creating transactions
- Be a wallet
This means that monero-serai shouldn't be fingerprintable on-chain. It also
shouldn't be fingerprintable if a targeted attack occurs to detect if the
receiving wallet is monero-serai or wallet2. It also should be generally safe
for usage with remote nodes.
It won't hide from remote nodes it's monero-serai however, potentially
allowing a remote node to profile you. The implications of this are left to the
user to consider.
It also won't act as a wallet, just as a wallet functionality library. wallet2
has several *non-transaction-level* policies, such as always attempting to use
two inputs to create transactions. These are considered out of scope to
monero-serai.
Finally, this library only supports producing transactions with CLSAG
signatures. That means this library cannot spend non-RingCT outputs.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `compile-time-generators` (on by default): Derives the generators at
compile-time so they don't need to be derived at runtime. This is recommended
if program size doesn't need to be kept minimal.
- `multisig`: Adds support for creation of transactions using a threshold
multisignature wallet.

View File

@@ -0,0 +1,49 @@
[package]
name = "monero-address"
version = "0.1.0"
description = "Rust implementation of Monero addresses"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/address"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
monero-io = { path = "../../io", default-features = false }
monero-primitives = { path = "../../primitives", default-features = false }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["std"] }
hex-literal = { version = "0.4", default-features = false }
hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"monero-io/std",
]
default = ["std"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,11 @@
# Monero Address
Rust implementation of Monero addresses.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -0,0 +1,106 @@
use std_shims::{vec::Vec, string::String};
use monero_primitives::keccak256;
const ALPHABET_LEN: u64 = 58;
const ALPHABET: &[u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";
pub(crate) const BLOCK_LEN: usize = 8;
const ENCODED_BLOCK_LEN: usize = 11;
const CHECKSUM_LEN: usize = 4;
// The maximum possible length of an encoding of this many bytes
//
// This is used for determining padding/how many bytes an encoding actually uses
pub(crate) fn encoded_len_for_bytes(bytes: usize) -> usize {
let bits = u64::try_from(bytes).expect("length exceeded 2**64") * 8;
let mut max = if bits == 64 { u64::MAX } else { (1 << bits) - 1 };
let mut i = 0;
while max != 0 {
max /= ALPHABET_LEN;
i += 1;
}
i
}
// Encode an arbitrary-length stream of data
pub(crate) fn encode(bytes: &[u8]) -> String {
let mut res = String::with_capacity(bytes.len().div_ceil(BLOCK_LEN) * ENCODED_BLOCK_LEN);
for chunk in bytes.chunks(BLOCK_LEN) {
// Convert to a u64
let mut fixed_len_chunk = [0; BLOCK_LEN];
fixed_len_chunk[(BLOCK_LEN - chunk.len()) ..].copy_from_slice(chunk);
let mut val = u64::from_be_bytes(fixed_len_chunk);
// Convert to the base58 encoding
let mut chunk_str = [char::from(ALPHABET[0]); ENCODED_BLOCK_LEN];
let mut i = 0;
while val > 0 {
chunk_str[i] = ALPHABET[usize::try_from(val % ALPHABET_LEN)
.expect("ALPHABET_LEN exceeds usize despite being a usize")]
.into();
i += 1;
val /= ALPHABET_LEN;
}
// Only take used bytes, and since we put the LSBs in the first byte, reverse the byte order
for c in chunk_str.into_iter().take(encoded_len_for_bytes(chunk.len())).rev() {
res.push(c);
}
}
res
}
// Decode an arbitrary-length stream of data
pub(crate) fn decode(data: &str) -> Option<Vec<u8>> {
let mut res = Vec::with_capacity((data.len() / ENCODED_BLOCK_LEN) * BLOCK_LEN);
for chunk in data.as_bytes().chunks(ENCODED_BLOCK_LEN) {
// Convert the chunk back to a u64
let mut sum = 0u64;
for this_char in chunk {
sum = sum.checked_mul(ALPHABET_LEN)?;
sum += u64::try_from(ALPHABET.iter().position(|a| a == this_char)?)
.expect("alphabet len exceeded 2**64");
}
// From the size of the encoding, determine the size of the bytes
let mut used_bytes = None;
for i in 1 ..= BLOCK_LEN {
if encoded_len_for_bytes(i) == chunk.len() {
used_bytes = Some(i);
break;
}
}
// Only push on the used bytes
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes.unwrap()) ..]);
}
Some(res)
}
// Encode an arbitrary-length stream of data, with a checksum
pub(crate) fn encode_check(mut data: Vec<u8>) -> String {
let checksum = keccak256(&data);
data.extend(&checksum[.. CHECKSUM_LEN]);
encode(&data)
}
// Decode an arbitrary-length stream of data, with a checksum
pub(crate) fn decode_check(data: &str) -> Option<Vec<u8>> {
if data.len() < CHECKSUM_LEN {
None?;
}
let mut res = decode(data)?;
let checksum_pos = res.len() - CHECKSUM_LEN;
if keccak256(&res[.. checksum_pos])[.. CHECKSUM_LEN] != res[checksum_pos ..] {
None?;
}
res.truncate(checksum_pos);
Some(res)
}

View File

@@ -0,0 +1,507 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::{self, Write};
use std_shims::{
vec,
string::{String, ToString},
};
use zeroize::Zeroize;
use curve25519_dalek::EdwardsPoint;
use monero_io::*;
mod base58check;
use base58check::{encode_check, decode_check};
#[cfg(test)]
mod tests;
/// The address type.
///
/// The officially specified addresses are supported, along with
/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789).
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum AddressType {
/// A legacy address type.
Legacy,
/// A legacy address with a payment ID embedded.
LegacyIntegrated([u8; 8]),
/// A subaddress.
///
/// This is what SHOULD be used if specific functionality isn't needed.
Subaddress,
/// A featured address.
///
/// Featured Addresses are an unofficial address specification which is meant to be extensible
/// and support a variety of functionality. This functionality includes being a subaddresses AND
/// having a payment ID, along with being immune to the burning bug.
///
/// At this time, support for featured addresses is limited to this crate. There should be no
/// expectation of interoperability.
Featured {
/// If this address is a subaddress.
subaddress: bool,
/// The payment ID associated with this address.
payment_id: Option<[u8; 8]>,
/// If this address is guaranteed.
///
/// A guaranteed address is one where any outputs scanned to it are guaranteed to be spendable
/// under the hardness of various cryptographic problems (which are assumed hard). This is via
/// a modified shared-key derivation which eliminates the burning bug.
guaranteed: bool,
},
}
impl AddressType {
/// If this address is a subaddress.
pub fn is_subaddress(&self) -> bool {
matches!(self, AddressType::Subaddress) ||
matches!(self, AddressType::Featured { subaddress: true, .. })
}
/// The payment ID within this address.
pub fn payment_id(&self) -> Option<[u8; 8]> {
if let AddressType::LegacyIntegrated(id) = self {
Some(*id)
} else if let AddressType::Featured { payment_id, .. } = self {
*payment_id
} else {
None
}
}
/// If this address is guaranteed.
///
/// A guaranteed address is one where any outputs scanned to it are guaranteed to be spendable
/// under the hardness of various cryptographic problems (which are assumed hard). This is via
/// a modified shared-key derivation which eliminates the burning bug.
pub fn is_guaranteed(&self) -> bool {
matches!(self, AddressType::Featured { guaranteed: true, .. })
}
}
/// A subaddress index.
///
/// Subaddresses are derived from a root using a `(account, address)` tuple as an index.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct SubaddressIndex {
account: u32,
address: u32,
}
impl SubaddressIndex {
/// Create a new SubaddressIndex.
pub const fn new(account: u32, address: u32) -> Option<SubaddressIndex> {
if (account == 0) && (address == 0) {
return None;
}
Some(SubaddressIndex { account, address })
}
/// Get the account this subaddress index is under.
pub const fn account(&self) -> u32 {
self.account
}
/// Get the address this subaddress index is for, within its account.
pub const fn address(&self) -> u32 {
self.address
}
}
/// Bytes used as prefixes when encoding addresses.
///
/// These distinguish the address's type.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct AddressBytes {
legacy: u8,
legacy_integrated: u8,
subaddress: u8,
featured: u8,
}
impl AddressBytes {
/// Create a new set of address bytes, one for each address type.
pub const fn new(
legacy: u8,
legacy_integrated: u8,
subaddress: u8,
featured: u8,
) -> Option<Self> {
if (legacy == legacy_integrated) || (legacy == subaddress) || (legacy == featured) {
return None;
}
if (legacy_integrated == subaddress) || (legacy_integrated == featured) {
return None;
}
if subaddress == featured {
return None;
}
Some(AddressBytes { legacy, legacy_integrated, subaddress, featured })
}
const fn to_const_generic(self) -> u32 {
((self.legacy as u32) << 24) +
((self.legacy_integrated as u32) << 16) +
((self.subaddress as u32) << 8) +
(self.featured as u32)
}
#[allow(clippy::cast_possible_truncation)]
const fn from_const_generic(const_generic: u32) -> Self {
let legacy = (const_generic >> 24) as u8;
let legacy_integrated = ((const_generic >> 16) & (u8::MAX as u32)) as u8;
let subaddress = ((const_generic >> 8) & (u8::MAX as u32)) as u8;
let featured = (const_generic & (u8::MAX as u32)) as u8;
AddressBytes { legacy, legacy_integrated, subaddress, featured }
}
}
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_config.h#L216-L225
// https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789 for featured
const MONERO_MAINNET_BYTES: AddressBytes = match AddressBytes::new(18, 19, 42, 70) {
Some(bytes) => bytes,
None => panic!("mainnet byte constants conflicted"),
};
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_config.h#L277-L281
const MONERO_STAGENET_BYTES: AddressBytes = match AddressBytes::new(24, 25, 36, 86) {
Some(bytes) => bytes,
None => panic!("stagenet byte constants conflicted"),
};
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_config.h#L262-L266
const MONERO_TESTNET_BYTES: AddressBytes = match AddressBytes::new(53, 54, 63, 111) {
Some(bytes) => bytes,
None => panic!("testnet byte constants conflicted"),
};
/// The network this address is for.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum Network {
/// A mainnet address.
Mainnet,
/// A stagenet address.
///
/// Stagenet maintains parity with mainnet and is useful for testing integrations accordingly.
Stagenet,
/// A testnet address.
///
/// Testnet is used to test new consensus rules and functionality.
Testnet,
}
/// Errors when decoding an address.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum AddressError {
/// The address had an invalid (network, type) byte.
#[cfg_attr(feature = "std", error("invalid byte for the address's network/type ({0})"))]
InvalidTypeByte(u8),
/// The address wasn't a valid Base58Check (as defined by Monero) string.
#[cfg_attr(feature = "std", error("invalid address encoding"))]
InvalidEncoding,
/// The data encoded wasn't the proper length.
#[cfg_attr(feature = "std", error("invalid length"))]
InvalidLength,
/// The address had an invalid key.
#[cfg_attr(feature = "std", error("invalid key"))]
InvalidKey,
/// The address was featured with unrecognized features.
#[cfg_attr(feature = "std", error("unknown features"))]
UnknownFeatures(u64),
/// The network was for a different network than expected.
#[cfg_attr(
feature = "std",
error("different network ({actual:?}) than expected ({expected:?})")
)]
DifferentNetwork {
/// The Network expected.
expected: Network,
/// The Network embedded within the Address.
actual: Network,
},
/// The view key was of small order despite being in a guaranteed address.
#[cfg_attr(feature = "std", error("small-order view key in guaranteed address"))]
SmallOrderView,
}
/// Bytes used as prefixes when encoding addresses, variable to the network instance.
///
/// These distinguish the address's network and type.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct NetworkedAddressBytes {
mainnet: AddressBytes,
stagenet: AddressBytes,
testnet: AddressBytes,
}
impl NetworkedAddressBytes {
/// Create a new set of address bytes, one for each network.
pub const fn new(
mainnet: AddressBytes,
stagenet: AddressBytes,
testnet: AddressBytes,
) -> Option<Self> {
let res = NetworkedAddressBytes { mainnet, stagenet, testnet };
let all_bytes = res.to_const_generic();
let mut i = 0;
while i < 12 {
let this_byte = (all_bytes >> (32 + (i * 8))) & (u8::MAX as u128);
let mut j = 0;
while j < 12 {
if i == j {
j += 1;
continue;
}
let other_byte = (all_bytes >> (32 + (j * 8))) & (u8::MAX as u128);
if this_byte == other_byte {
return None;
}
j += 1;
}
i += 1;
}
Some(res)
}
/// Convert this set of address bytes to its representation as a u128.
///
/// We cannot use this struct directly as a const generic unfortunately.
pub const fn to_const_generic(self) -> u128 {
((self.mainnet.to_const_generic() as u128) << 96) +
((self.stagenet.to_const_generic() as u128) << 64) +
((self.testnet.to_const_generic() as u128) << 32)
}
#[allow(clippy::cast_possible_truncation)]
const fn from_const_generic(const_generic: u128) -> Self {
let mainnet = AddressBytes::from_const_generic((const_generic >> 96) as u32);
let stagenet =
AddressBytes::from_const_generic(((const_generic >> 64) & (u32::MAX as u128)) as u32);
let testnet =
AddressBytes::from_const_generic(((const_generic >> 32) & (u32::MAX as u128)) as u32);
NetworkedAddressBytes { mainnet, stagenet, testnet }
}
fn network(&self, network: Network) -> &AddressBytes {
match network {
Network::Mainnet => &self.mainnet,
Network::Stagenet => &self.stagenet,
Network::Testnet => &self.testnet,
}
}
fn byte(&self, network: Network, kind: AddressType) -> u8 {
let address_bytes = self.network(network);
match kind {
AddressType::Legacy => address_bytes.legacy,
AddressType::LegacyIntegrated(_) => address_bytes.legacy_integrated,
AddressType::Subaddress => address_bytes.subaddress,
AddressType::Featured { .. } => address_bytes.featured,
}
}
// This will return an incomplete AddressType for LegacyIntegrated/Featured.
fn metadata_from_byte(&self, byte: u8) -> Result<(Network, AddressType), AddressError> {
let mut meta = None;
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
let address_bytes = self.network(network);
if let Some(kind) = match byte {
_ if byte == address_bytes.legacy => Some(AddressType::Legacy),
_ if byte == address_bytes.legacy_integrated => Some(AddressType::LegacyIntegrated([0; 8])),
_ if byte == address_bytes.subaddress => Some(AddressType::Subaddress),
_ if byte == address_bytes.featured => {
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
}
_ => None,
} {
meta = Some((network, kind));
break;
}
}
meta.ok_or(AddressError::InvalidTypeByte(byte))
}
}
/// The bytes used for distinguishing Monero addresses.
pub const MONERO_BYTES: NetworkedAddressBytes = match NetworkedAddressBytes::new(
MONERO_MAINNET_BYTES,
MONERO_STAGENET_BYTES,
MONERO_TESTNET_BYTES,
) {
Some(bytes) => bytes,
None => panic!("Monero network byte constants conflicted"),
};
/// A Monero address.
#[derive(Clone, Copy, PartialEq, Eq, Zeroize)]
pub struct Address<const ADDRESS_BYTES: u128> {
network: Network,
kind: AddressType,
spend: EdwardsPoint,
view: EdwardsPoint,
}
impl<const ADDRESS_BYTES: u128> fmt::Debug for Address<ADDRESS_BYTES> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let hex = |bytes: &[u8]| -> String {
let mut res = String::with_capacity(2 + (2 * bytes.len()));
res.push_str("0x");
for b in bytes {
write!(&mut res, "{b:02x}").unwrap();
}
res
};
fmt
.debug_struct("Address")
.field("network", &self.network)
.field("kind", &self.kind)
.field("spend", &hex(&self.spend.compress().to_bytes()))
.field("view", &hex(&self.view.compress().to_bytes()))
// This is not a real field yet is the most valuable thing to know when debugging
.field("(address)", &self.to_string())
.finish()
}
}
impl<const ADDRESS_BYTES: u128> fmt::Display for Address<ADDRESS_BYTES> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let address_bytes: NetworkedAddressBytes =
NetworkedAddressBytes::from_const_generic(ADDRESS_BYTES);
let mut data = vec![address_bytes.byte(self.network, self.kind)];
data.extend(self.spend.compress().to_bytes());
data.extend(self.view.compress().to_bytes());
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind {
let features_uint =
(u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress);
write_varint(&features_uint, &mut data).unwrap();
}
if let Some(id) = self.kind.payment_id() {
data.extend(id);
}
write!(f, "{}", encode_check(data))
}
}
impl<const ADDRESS_BYTES: u128> Address<ADDRESS_BYTES> {
/// Create a new address.
pub fn new(network: Network, kind: AddressType, spend: EdwardsPoint, view: EdwardsPoint) -> Self {
Address { network, kind, spend, view }
}
/// Parse an address from a String, accepting any network it is.
pub fn from_str_with_unchecked_network(s: &str) -> Result<Self, AddressError> {
let raw = decode_check(s).ok_or(AddressError::InvalidEncoding)?;
let mut raw = raw.as_slice();
let address_bytes: NetworkedAddressBytes =
NetworkedAddressBytes::from_const_generic(ADDRESS_BYTES);
let (network, mut kind) = address_bytes
.metadata_from_byte(read_byte(&mut raw).map_err(|_| AddressError::InvalidLength)?)?;
let spend = read_point(&mut raw).map_err(|_| AddressError::InvalidKey)?;
let view = read_point(&mut raw).map_err(|_| AddressError::InvalidKey)?;
if matches!(kind, AddressType::Featured { .. }) {
let features = read_varint::<_, u64>(&mut raw).map_err(|_| AddressError::InvalidLength)?;
if (features >> 3) != 0 {
Err(AddressError::UnknownFeatures(features))?;
}
let subaddress = (features & 1) == 1;
let integrated = ((features >> 1) & 1) == 1;
let guaranteed = ((features >> 2) & 1) == 1;
kind =
AddressType::Featured { subaddress, payment_id: integrated.then_some([0; 8]), guaranteed };
}
// Read the payment ID, if there should be one
match kind {
AddressType::LegacyIntegrated(ref mut id) |
AddressType::Featured { payment_id: Some(ref mut id), .. } => {
*id = read_bytes(&mut raw).map_err(|_| AddressError::InvalidLength)?;
}
_ => {}
};
if !raw.is_empty() {
Err(AddressError::InvalidLength)?;
}
Ok(Address { network, kind, spend, view })
}
/// Create a new address from a `&str`.
///
/// This takes in an argument for the expected network, erroring if a distinct network was used.
/// It also errors if the address is invalid (as expected).
pub fn from_str(network: Network, s: &str) -> Result<Self, AddressError> {
Self::from_str_with_unchecked_network(s).and_then(|addr| {
if addr.network == network {
Ok(addr)
} else {
Err(AddressError::DifferentNetwork { actual: addr.network, expected: network })?
}
})
}
/// The network this address is intended for use on.
pub fn network(&self) -> Network {
self.network
}
/// The type of address this is.
pub fn kind(&self) -> &AddressType {
&self.kind
}
/// If this is a subaddress.
pub fn is_subaddress(&self) -> bool {
self.kind.is_subaddress()
}
/// The payment ID for this address.
pub fn payment_id(&self) -> Option<[u8; 8]> {
self.kind.payment_id()
}
/// If this address is guaranteed.
///
/// A guaranteed address is one where any outputs scanned to it are guaranteed to be spendable
/// under the hardness of various cryptographic problems (which are assumed hard). This is via
/// a modified shared-key derivation which eliminates the burning bug.
pub fn is_guaranteed(&self) -> bool {
self.kind.is_guaranteed()
}
/// The public spend key for this address.
pub fn spend(&self) -> EdwardsPoint {
self.spend
}
/// The public view key for this address.
pub fn view(&self) -> EdwardsPoint {
self.view
}
}
/// Instantiation of the Address type with Monero's network bytes.
pub type MoneroAddress = Address<{ MONERO_BYTES.to_const_generic() }>;

View File

@@ -0,0 +1,205 @@
use hex_literal::hex;
use rand_core::{RngCore, OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
use monero_io::decompress_point;
use crate::{Network, AddressType, MoneroAddress};
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
const STANDARD: &str =
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
const INTEGRATED: &str =
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
pXSn88oBX35";
const SUB_SPEND: [u8; 32] =
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
const SUBADDRESS: &str =
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
#[test]
fn test_encoded_len_for_bytes() {
// For an encoding of length `l`, we prune to the amount of bytes which encodes with length `l`
// This assumes length `l` -> amount of bytes has a singular answer, which is tested here
use crate::base58check::*;
let mut set = std::collections::HashSet::new();
for i in 0 .. BLOCK_LEN {
set.insert(encoded_len_for_bytes(i));
}
assert_eq!(set.len(), BLOCK_LEN);
}
#[test]
fn base58check() {
use crate::base58check::*;
assert_eq!(encode(&[]), String::new());
assert!(decode("").unwrap().is_empty());
let full_block = &[1, 2, 3, 4, 5, 6, 7, 8];
assert_eq!(&decode(&encode(full_block)).unwrap(), full_block);
let partial_block = &[1, 2, 3];
assert_eq!(&decode(&encode(partial_block)).unwrap(), partial_block);
let max_encoded_block = &[u8::MAX; 8];
assert_eq!(&decode(&encode(max_encoded_block)).unwrap(), max_encoded_block);
let max_decoded_block = "zzzzzzzzzzz";
assert!(decode(max_decoded_block).is_none());
let full_and_partial_block = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11];
assert_eq!(&decode(&encode(full_and_partial_block)).unwrap(), full_and_partial_block);
}
#[test]
fn standard_address() {
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
assert_eq!(addr.network(), Network::Mainnet);
assert_eq!(addr.kind(), &AddressType::Legacy);
assert!(!addr.is_subaddress());
assert_eq!(addr.payment_id(), None);
assert!(!addr.is_guaranteed());
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
assert_eq!(addr.view.compress().to_bytes(), VIEW);
assert_eq!(addr.to_string(), STANDARD);
}
#[test]
fn integrated_address() {
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
assert_eq!(addr.network(), Network::Mainnet);
assert_eq!(addr.kind(), &AddressType::LegacyIntegrated(PAYMENT_ID));
assert!(!addr.is_subaddress());
assert_eq!(addr.payment_id(), Some(PAYMENT_ID));
assert!(!addr.is_guaranteed());
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
assert_eq!(addr.view.compress().to_bytes(), VIEW);
assert_eq!(addr.to_string(), INTEGRATED);
}
#[test]
fn subaddress() {
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
assert_eq!(addr.network(), Network::Mainnet);
assert_eq!(addr.kind(), &AddressType::Subaddress);
assert!(addr.is_subaddress());
assert_eq!(addr.payment_id(), None);
assert!(!addr.is_guaranteed());
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
assert_eq!(addr.to_string(), SUBADDRESS);
}
#[test]
fn featured() {
for (network, first) in
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
{
for _ in 0 .. 100 {
let spend = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE;
let view = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE;
for features in 0 .. (1 << 3) {
const SUBADDRESS_FEATURE_BIT: u8 = 1;
const INTEGRATED_FEATURE_BIT: u8 = 1 << 1;
const GUARANTEED_FEATURE_BIT: u8 = 1 << 2;
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
let mut payment_id = [0; 8];
OsRng.fill_bytes(&mut payment_id);
let payment_id = Some(payment_id)
.filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
let addr = MoneroAddress::new(network, kind, spend, view);
assert_eq!(addr.to_string().chars().next().unwrap(), first);
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
assert_eq!(addr.spend, spend);
assert_eq!(addr.view, view);
assert_eq!(addr.is_subaddress(), subaddress);
assert_eq!(addr.payment_id(), payment_id);
assert_eq!(addr.is_guaranteed(), guaranteed);
}
}
}
}
#[test]
fn featured_vectors() {
#[derive(serde::Deserialize)]
struct Vector {
address: String,
network: String,
spend: String,
view: String,
subaddress: bool,
integrated: bool,
payment_id: Option<[u8; 8]>,
guaranteed: bool,
}
let vectors = serde_json::from_str::<Vec<Vector>>(FEATURED_JSON).unwrap();
for vector in vectors {
let first = vector.address.chars().next().unwrap();
let network = match vector.network.as_str() {
"Mainnet" => {
assert_eq!(first, 'C');
Network::Mainnet
}
"Testnet" => {
assert_eq!(first, 'K');
Network::Testnet
}
"Stagenet" => {
assert_eq!(first, 'F');
Network::Stagenet
}
_ => panic!("Unknown network"),
};
let spend = decompress_point(hex::decode(vector.spend).unwrap().try_into().unwrap()).unwrap();
let view = decompress_point(hex::decode(vector.view).unwrap().try_into().unwrap()).unwrap();
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
assert_eq!(addr.spend, spend);
assert_eq!(addr.view, view);
assert_eq!(addr.is_subaddress(), vector.subaddress);
assert_eq!(vector.integrated, vector.payment_id.is_some());
assert_eq!(addr.payment_id(), vector.payment_id);
assert_eq!(addr.is_guaranteed(), vector.guaranteed);
assert_eq!(
MoneroAddress::new(
network,
AddressType::Featured {
subaddress: vector.subaddress,
payment_id: vector.payment_id,
guaranteed: vector.guaranteed
},
spend,
view
)
.to_string(),
vector.address
);
}
}

View File

@@ -0,0 +1,230 @@
[
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3pYyUDn",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3wfMHCy",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJTo4p5ayvj36PStM5AX",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": true,
"payment_id": [46, 48, 134, 34, 245, 148, 243, 195],
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJWv5WqMCNE2hRs9rJfy",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": true,
"payment_id": [153, 176, 98, 204, 151, 27, 197, 168],
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4DwqwH1",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4Pyz8bD",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJcwt7hykou237MqZZDA",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": true,
"payment_id": [88, 37, 149, 111, 171, 108, 120, 181],
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJfTrFAp69u2MYbf5YeN",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": true,
"payment_id": [125, 69, 155, 152, 140, 160, 157, 186],
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712U9w7ScYA",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UA2gCrT1",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc1DbPKwJu81cxJjqBkS",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": true,
"payment_id": [92, 225, 118, 220, 39, 3, 72, 51],
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc2o1rPMaXN31Fe5J6dn",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": true,
"payment_id": [20, 120, 47, 89, 72, 165, 233, 115],
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAQHCRZ4",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAUzqaii",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcAsfQc3gJQ2gHLd5DiQ",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": true,
"payment_id": [193, 149, 123, 214, 180, 205, 195, 91],
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcDBAD5jbZQ3AMHFyvQB",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": true,
"payment_id": [205, 170, 65, 0, 51, 175, 251, 184],
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPJnBtTP",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPUrwMvP",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY5ECEhP5Nr1aCRPXdxk",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": true,
"payment_id": [173, 149, 78, 64, 215, 211, 66, 170],
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY882kTUS1D2LttnPvTR",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": true,
"payment_id": [254, 159, 186, 162, 1, 8, 156, 108],
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPpBBo8F",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPuUJX3b",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYCZPxVAoDu21DryMoto",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": true,
"payment_id": [3, 115, 230, 129, 172, 108, 116, 235],
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYFYCqKQAWL18KkpBQ8R",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": true,
"payment_id": [94, 122, 63, 167, 209, 225, 14, 180],
"guaranteed": true
}
]

View File

@@ -0,0 +1,46 @@
[package]
name = "polyseed"
version = "0.1.0"
description = "Rust implementation of Polyseed"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/polyseed"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
subtle = { version = "^2.4", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
rand_core = { version = "0.6", default-features = false }
sha3 = { version = "0.10", default-features = false }
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] }
[features]
std = [
"std-shims/std",
"thiserror",
"subtle/std",
"zeroize/std",
"rand_core/std",
"sha3/std",
"pbkdf2/std",
]
default = ["std"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,11 @@
# Polyseed
Rust implementation of [Polyseed](https://github.com/tevador/polyseed).
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -0,0 +1,477 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt;
use std_shims::{sync::OnceLock, string::String, collections::HashMap};
#[cfg(feature = "std")]
use std::time::{SystemTime, UNIX_EPOCH};
use subtle::ConstantTimeEq;
use zeroize::{Zeroize, Zeroizing, ZeroizeOnDrop};
use rand_core::{RngCore, CryptoRng};
use sha3::Sha3_256;
use pbkdf2::pbkdf2_hmac;
#[cfg(test)]
mod tests;
// Features
const FEATURE_BITS: u8 = 5;
#[allow(dead_code)]
const INTERNAL_FEATURES: u8 = 2;
const USER_FEATURES: u8 = 3;
const USER_FEATURES_MASK: u8 = (1 << USER_FEATURES) - 1;
const ENCRYPTED_MASK: u8 = 1 << 4;
const RESERVED_FEATURES_MASK: u8 = ((1 << FEATURE_BITS) - 1) ^ ENCRYPTED_MASK;
fn user_features(features: u8) -> u8 {
features & USER_FEATURES_MASK
}
fn polyseed_features_supported(features: u8) -> bool {
(features & RESERVED_FEATURES_MASK) == 0
}
// Dates
const DATE_BITS: u8 = 10;
const DATE_MASK: u16 = (1u16 << DATE_BITS) - 1;
const POLYSEED_EPOCH: u64 = 1635768000; // 1st November 2021 12:00 UTC
const TIME_STEP: u64 = 2629746; // 30.436875 days = 1/12 of the Gregorian year
// After ~85 years, this will roll over.
fn birthday_encode(time: u64) -> u16 {
u16::try_from((time.saturating_sub(POLYSEED_EPOCH) / TIME_STEP) & u64::from(DATE_MASK))
.expect("value masked by 2**10 - 1 didn't fit into a u16")
}
fn birthday_decode(birthday: u16) -> u64 {
POLYSEED_EPOCH + (u64::from(birthday) * TIME_STEP)
}
// Polyseed parameters
const SECRET_BITS: usize = 150;
const BITS_PER_BYTE: usize = 8;
const SECRET_SIZE: usize = SECRET_BITS.div_ceil(BITS_PER_BYTE); // 19
const CLEAR_BITS: usize = (SECRET_SIZE * BITS_PER_BYTE) - SECRET_BITS; // 2
// Polyseed calls this CLEAR_MASK and has a very complicated formula for this fundamental
// equivalency
#[allow(clippy::cast_possible_truncation)]
const LAST_BYTE_SECRET_BITS_MASK: u8 = ((1 << (BITS_PER_BYTE - CLEAR_BITS)) - 1) as u8;
const SECRET_BITS_PER_WORD: usize = 10;
// The amount of words in a seed.
const POLYSEED_LENGTH: usize = 16;
// Amount of characters each word must have if trimmed
pub(crate) const PREFIX_LEN: usize = 4;
const POLY_NUM_CHECK_DIGITS: usize = 1;
const DATA_WORDS: usize = POLYSEED_LENGTH - POLY_NUM_CHECK_DIGITS;
// Polynomial
const GF_BITS: usize = 11;
const POLYSEED_MUL2_TABLE: [u16; 8] = [5, 7, 1, 3, 13, 15, 9, 11];
type Poly = [u16; POLYSEED_LENGTH];
fn elem_mul2(x: u16) -> u16 {
if x < 1024 {
return 2 * x;
}
POLYSEED_MUL2_TABLE[usize::from(x % 8)] + (16 * ((x - 1024) / 8))
}
fn poly_eval(poly: &Poly) -> u16 {
// Horner's method at x = 2
let mut result = poly[POLYSEED_LENGTH - 1];
for i in (0 .. (POLYSEED_LENGTH - 1)).rev() {
result = elem_mul2(result) ^ poly[i];
}
result
}
// Key gen parameters
const POLYSEED_SALT: &[u8] = b"POLYSEED key";
const POLYSEED_KEYGEN_ITERATIONS: u32 = 10000;
// Polyseed technically supports multiple coins, and the value for Monero is 0
// See: https://github.com/tevador/polyseed/blob/dfb05d8edb682b0e8f743b1b70c9131712ff4157
// /include/polyseed.h#L57
const COIN: u16 = 0;
/// An error when working with a Polyseed.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum PolyseedError {
/// The seed was invalid.
#[cfg_attr(feature = "std", error("invalid seed"))]
InvalidSeed,
/// The entropy was invalid.
#[cfg_attr(feature = "std", error("invalid entropy"))]
InvalidEntropy,
/// The checksum did not match the data.
#[cfg_attr(feature = "std", error("invalid checksum"))]
InvalidChecksum,
/// Unsupported feature bits were set.
#[cfg_attr(feature = "std", error("unsupported features"))]
UnsupportedFeatures,
}
/// Language options for Polyseed.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Zeroize)]
pub enum Language {
/// English language option.
English,
/// Spanish language option.
Spanish,
/// French language option.
French,
/// Italian language option.
Italian,
/// Japanese language option.
Japanese,
/// Korean language option.
Korean,
/// Czech language option.
Czech,
/// Portuguese language option.
Portuguese,
/// Simplified Chinese language option.
ChineseSimplified,
/// Traditional Chinese language option.
ChineseTraditional,
}
struct WordList {
words: &'static [&'static str],
has_prefix: bool,
has_accent: bool,
}
impl WordList {
fn new(words: &'static [&'static str], has_prefix: bool, has_accent: bool) -> WordList {
let res = WordList { words, has_prefix, has_accent };
// This is needed for a later unwrap to not fails
assert!(words.len() < usize::from(u16::MAX));
res
}
}
static LANGUAGES_CELL: OnceLock<HashMap<Language, WordList>> = OnceLock::new();
#[allow(non_snake_case)]
fn LANGUAGES() -> &'static HashMap<Language, WordList> {
LANGUAGES_CELL.get_or_init(|| {
HashMap::from([
(Language::Czech, WordList::new(include!("./words/cs.rs"), true, false)),
(Language::French, WordList::new(include!("./words/fr.rs"), true, true)),
(Language::Korean, WordList::new(include!("./words/ko.rs"), false, false)),
(Language::English, WordList::new(include!("./words/en.rs"), true, false)),
(Language::Italian, WordList::new(include!("./words/it.rs"), true, false)),
(Language::Spanish, WordList::new(include!("./words/es.rs"), true, true)),
(Language::Japanese, WordList::new(include!("./words/ja.rs"), false, false)),
(Language::Portuguese, WordList::new(include!("./words/pt.rs"), true, false)),
(
Language::ChineseSimplified,
WordList::new(include!("./words/zh_simplified.rs"), false, false),
),
(
Language::ChineseTraditional,
WordList::new(include!("./words/zh_traditional.rs"), false, false),
),
])
})
}
/// A Polyseed.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Polyseed {
language: Language,
features: u8,
birthday: u16,
entropy: Zeroizing<[u8; 32]>,
checksum: u16,
}
impl fmt::Debug for Polyseed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Polyseed").finish_non_exhaustive()
}
}
fn valid_entropy(entropy: &Zeroizing<[u8; 32]>) -> bool {
// Last byte of the entropy should only use certain bits
let mut res =
entropy[SECRET_SIZE - 1].ct_eq(&(entropy[SECRET_SIZE - 1] & LAST_BYTE_SECRET_BITS_MASK));
// Last 13 bytes of the buffer should be unused
for b in SECRET_SIZE .. entropy.len() {
res &= entropy[b].ct_eq(&0);
}
res.into()
}
impl Polyseed {
// TODO: Clean this
fn to_poly(&self) -> Poly {
let mut extra_bits = u32::from(FEATURE_BITS + DATE_BITS);
let extra_val = (u16::from(self.features) << DATE_BITS) | self.birthday;
let mut entropy_idx = 0;
let mut secret_bits = BITS_PER_BYTE;
let mut seed_rem_bits = SECRET_BITS - BITS_PER_BYTE;
let mut poly = [0; POLYSEED_LENGTH];
for i in 0 .. DATA_WORDS {
extra_bits -= 1;
let mut word_bits = 0;
let mut word_val = 0;
while word_bits < SECRET_BITS_PER_WORD {
if secret_bits == 0 {
entropy_idx += 1;
secret_bits = seed_rem_bits.min(BITS_PER_BYTE);
seed_rem_bits -= secret_bits;
}
let chunk_bits = secret_bits.min(SECRET_BITS_PER_WORD - word_bits);
secret_bits -= chunk_bits;
word_bits += chunk_bits;
word_val <<= chunk_bits;
word_val |=
(u16::from(self.entropy[entropy_idx]) >> secret_bits) & ((1u16 << chunk_bits) - 1);
}
word_val <<= 1;
word_val |= (extra_val >> extra_bits) & 1;
poly[POLY_NUM_CHECK_DIGITS + i] = word_val;
}
poly
}
fn from_internal(
language: Language,
masked_features: u8,
encoded_birthday: u16,
entropy: Zeroizing<[u8; 32]>,
) -> Result<Polyseed, PolyseedError> {
if !polyseed_features_supported(masked_features) {
Err(PolyseedError::UnsupportedFeatures)?;
}
if !valid_entropy(&entropy) {
Err(PolyseedError::InvalidEntropy)?;
}
let mut res = Polyseed {
language,
birthday: encoded_birthday,
features: masked_features,
entropy,
checksum: 0,
};
res.checksum = poly_eval(&res.to_poly());
Ok(res)
}
/// Create a new `Polyseed` with specific internals.
///
/// `birthday` is defined in seconds since the epoch.
pub fn from(
language: Language,
features: u8,
birthday: u64,
entropy: Zeroizing<[u8; 32]>,
) -> Result<Polyseed, PolyseedError> {
Self::from_internal(language, user_features(features), birthday_encode(birthday), entropy)
}
/// Create a new `Polyseed`.
///
/// This uses the system's time for the birthday, if available, else 0.
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, language: Language) -> Polyseed {
// Get the birthday
#[cfg(feature = "std")]
let birthday =
SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or(core::time::Duration::ZERO).as_secs();
#[cfg(not(feature = "std"))]
let birthday = 0;
// Derive entropy
let mut entropy = Zeroizing::new([0; 32]);
rng.fill_bytes(entropy.as_mut());
entropy[SECRET_SIZE ..].fill(0);
entropy[SECRET_SIZE - 1] &= LAST_BYTE_SECRET_BITS_MASK;
Self::from(language, 0, birthday, entropy).unwrap()
}
/// Create a new `Polyseed` from a String.
#[allow(clippy::needless_pass_by_value)]
pub fn from_string(lang: Language, seed: Zeroizing<String>) -> Result<Polyseed, PolyseedError> {
// Decode the seed into its polynomial coefficients
let mut poly = [0; POLYSEED_LENGTH];
// Validate words are in the lang word list
let lang_word_list: &WordList = &LANGUAGES()[&lang];
for (i, word) in seed.split_whitespace().enumerate() {
// Find the word's index
fn check_if_matches<S: AsRef<str>, I: Iterator<Item = S>>(
has_prefix: bool,
mut lang_words: I,
word: &str,
) -> Option<usize> {
if has_prefix {
// Get the position of the word within the iterator
// Doesn't use starts_with and some words are substrs of others, leading to false
// positives
let mut get_position = || {
lang_words.position(|lang_word| {
let mut lang_word = lang_word.as_ref().chars();
let mut word = word.chars();
let mut res = true;
for _ in 0 .. PREFIX_LEN {
res &= lang_word.next() == word.next();
}
res
})
};
let res = get_position();
// If another word has this prefix, don't call it a match
if get_position().is_some() {
return None;
}
res
} else {
lang_words.position(|lang_word| lang_word.as_ref() == word)
}
}
let Some(coeff) = (if lang_word_list.has_accent {
let ascii = |word: &str| word.chars().filter(char::is_ascii).collect::<String>();
check_if_matches(
lang_word_list.has_prefix,
lang_word_list.words.iter().map(|lang_word| ascii(lang_word)),
&ascii(word),
)
} else {
check_if_matches(lang_word_list.has_prefix, lang_word_list.words.iter(), word)
}) else {
Err(PolyseedError::InvalidSeed)?
};
// WordList asserts the word list length is less than u16::MAX
poly[i] = u16::try_from(coeff).expect("coeff exceeded u16");
}
// xor out the coin
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
// Validate the checksum
if poly_eval(&poly) != 0 {
Err(PolyseedError::InvalidChecksum)?;
}
// Convert the polynomial into entropy
let mut entropy = Zeroizing::new([0; 32]);
let mut extra = 0;
let mut entropy_idx = 0;
let mut entropy_bits = 0;
let checksum = poly[0];
for mut word_val in poly.into_iter().skip(POLY_NUM_CHECK_DIGITS) {
// Parse the bottom bit, which is one of the bits of extra
// This iterates for less than 16 iters, meaning this won't drop any bits
extra <<= 1;
extra |= word_val & 1;
word_val >>= 1;
// 10 bits per word creates a [8, 2], [6, 4], [4, 6], [2, 8] cycle
// 15 % 4 is 3, leaving 2 bits off, and 152 (19 * 8) - 2 is 150, the amount of bits in the
// secret
let mut word_bits = GF_BITS - 1;
while word_bits > 0 {
if entropy_bits == BITS_PER_BYTE {
entropy_idx += 1;
entropy_bits = 0;
}
let chunk_bits = word_bits.min(BITS_PER_BYTE - entropy_bits);
word_bits -= chunk_bits;
let chunk_mask = (1u16 << chunk_bits) - 1;
if chunk_bits < BITS_PER_BYTE {
entropy[entropy_idx] <<= chunk_bits;
}
entropy[entropy_idx] |=
u8::try_from((word_val >> word_bits) & chunk_mask).expect("chunk exceeded u8");
entropy_bits += chunk_bits;
}
}
let birthday = extra & DATE_MASK;
// extra is contained to u16, and DATE_BITS > 8
let features =
u8::try_from(extra >> DATE_BITS).expect("couldn't convert extra >> DATE_BITS to u8");
let res = Self::from_internal(lang, features, birthday, entropy);
if let Ok(res) = res.as_ref() {
debug_assert_eq!(res.checksum, checksum);
}
res
}
/// When this seed was created, defined in seconds since the epoch.
pub fn birthday(&self) -> u64 {
birthday_decode(self.birthday)
}
/// This seed's features.
pub fn features(&self) -> u8 {
self.features
}
/// This seed's entropy.
pub fn entropy(&self) -> &Zeroizing<[u8; 32]> {
&self.entropy
}
/// The key derived from this seed.
pub fn key(&self) -> Zeroizing<[u8; 32]> {
let mut key = Zeroizing::new([0; 32]);
pbkdf2_hmac::<Sha3_256>(
self.entropy.as_slice(),
POLYSEED_SALT,
POLYSEED_KEYGEN_ITERATIONS,
key.as_mut(),
);
key
}
/// The String representation of this seed.
pub fn to_string(&self) -> Zeroizing<String> {
// Encode the polynomial with the existing checksum
let mut poly = self.to_poly();
poly[0] = self.checksum;
// Embed the coin
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
// Output words
let mut seed = Zeroizing::new(String::new());
let words = &LANGUAGES()[&self.language].words;
for i in 0 .. poly.len() {
seed.push_str(words[usize::from(poly[i])]);
if i < poly.len() - 1 {
seed.push(' ');
}
}
seed
}
}

View File

@@ -0,0 +1,218 @@
use zeroize::Zeroizing;
use rand_core::OsRng;
use crate::*;
#[test]
fn test_polyseed() {
struct Vector {
language: Language,
seed: String,
entropy: String,
birthday: u64,
has_prefix: bool,
has_accent: bool,
}
let vectors = [
Vector {
language: Language::English,
seed: "raven tail swear infant grief assist regular lamp \
duck valid someone little harsh puppy airport language"
.into(),
entropy: "dd76e7359a0ded37cd0ff0f3c829a5ae01673300000000000000000000000000".into(),
birthday: 1638446400,
has_prefix: true,
has_accent: false,
},
Vector {
language: Language::Spanish,
seed: "eje fin parte célebre tabú pestaña lienzo puma \
prisión hora regalo lengua existir lápiz lote sonoro"
.into(),
entropy: "5a2b02df7db21fcbe6ec6df137d54c7b20fd2b00000000000000000000000000".into(),
birthday: 3118651200,
has_prefix: true,
has_accent: true,
},
Vector {
language: Language::French,
seed: "valable arracher décaler jeudi amusant dresser mener épaissir risible \
prouesse réserve ampleur ajuster muter caméra enchère"
.into(),
entropy: "11cfd870324b26657342c37360c424a14a050b00000000000000000000000000".into(),
birthday: 1679314966,
has_prefix: true,
has_accent: true,
},
Vector {
language: Language::Italian,
seed: "caduco midollo copione meninge isotopo illogico riflesso tartaruga fermento \
olandese normale tristezza episodio voragine forbito achille"
.into(),
entropy: "7ecc57c9b4652d4e31428f62bec91cfd55500600000000000000000000000000".into(),
birthday: 1679316358,
has_prefix: true,
has_accent: false,
},
Vector {
language: Language::Portuguese,
seed: "caverna custear azedo adeus senador apertada sedoso omitir \
sujeito aurora videira molho cartaz gesso dentista tapar"
.into(),
entropy: "45473063711376cae38f1b3eba18c874124e1d00000000000000000000000000".into(),
birthday: 1679316657,
has_prefix: true,
has_accent: false,
},
Vector {
language: Language::Czech,
seed: "usmrtit nora dotaz komunita zavalit funkce mzda sotva akce \
vesta kabel herna stodola uvolnit ustrnout email"
.into(),
entropy: "7ac8a4efd62d9c3c4c02e350d32326df37821c00000000000000000000000000".into(),
birthday: 1679316898,
has_prefix: true,
has_accent: false,
},
Vector {
language: Language::Korean,
seed: "전망 선풍기 국제 무궁화 설사 기름 이론적 해안 절망 예선 \
지우개 보관 절망 말기 시각 귀신"
.into(),
entropy: "684663fda420298f42ed94b2c512ed38ddf12b00000000000000000000000000".into(),
birthday: 1679317073,
has_prefix: false,
has_accent: false,
},
Vector {
language: Language::Japanese,
seed: "うちあわせ ちつじょ つごう しはい けんこう とおる てみやげ はんとし たんとう \
といれ おさない おさえる むかう ぬぐう なふだ せまる"
.into(),
entropy: "94e6665518a6286c6e3ba508a2279eb62b771f00000000000000000000000000".into(),
birthday: 1679318722,
has_prefix: false,
has_accent: false,
},
Vector {
language: Language::ChineseTraditional,
seed: "亂 挖 斤 柄 代 圈 枝 轄 魯 論 函 開 勘 番 榮 壁".into(),
entropy: "b1594f585987ab0fd5a31da1f0d377dae5283f00000000000000000000000000".into(),
birthday: 1679426433,
has_prefix: false,
has_accent: false,
},
Vector {
language: Language::ChineseSimplified,
seed: "啊 百 族 府 票 划 伪 仓 叶 虾 借 溜 晨 左 等 鬼".into(),
entropy: "21cdd366f337b89b8d1bc1df9fe73047c22b0300000000000000000000000000".into(),
birthday: 1679426817,
has_prefix: false,
has_accent: false,
},
// The following seed requires the language specification in order to calculate
// a single valid checksum
Vector {
language: Language::Spanish,
seed: "impo sort usua cabi venu nobl oliv clim \
cont barr marc auto prod vaca torn fati"
.into(),
entropy: "dbfce25fe09b68a340e01c62417eeef43ad51800000000000000000000000000".into(),
birthday: 1701511650,
has_prefix: true,
has_accent: true,
},
];
for vector in vectors {
let add_whitespace = |mut seed: String| {
seed.push(' ');
seed
};
let seed_without_accents = |seed: &str| {
seed
.split_whitespace()
.map(|w| w.chars().filter(char::is_ascii).collect::<String>())
.collect::<Vec<_>>()
.join(" ")
};
let trim_seed = |seed: &str| {
let seed_to_trim =
if vector.has_accent { seed_without_accents(seed) } else { seed.to_string() };
seed_to_trim
.split_whitespace()
.map(|w| {
let mut ascii = 0;
let mut to_take = w.len();
for (i, char) in w.chars().enumerate() {
if char.is_ascii() {
ascii += 1;
}
if ascii == PREFIX_LEN {
// +1 to include this character, which put us at the prefix length
to_take = i + 1;
break;
}
}
w.chars().take(to_take).collect::<String>()
})
.collect::<Vec<_>>()
.join(" ")
};
// String -> Seed
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
let seed = Polyseed::from_string(vector.language, Zeroizing::new(vector.seed.clone())).unwrap();
let trim = trim_seed(&vector.seed);
let add_whitespace = add_whitespace(vector.seed.clone());
let seed_without_accents = seed_without_accents(&vector.seed);
// Make sure a version with added whitespace still works
let whitespaced_seed =
Polyseed::from_string(vector.language, Zeroizing::new(add_whitespace)).unwrap();
assert_eq!(seed, whitespaced_seed);
// Check trimmed versions works
if vector.has_prefix {
let trimmed_seed = Polyseed::from_string(vector.language, Zeroizing::new(trim)).unwrap();
assert_eq!(seed, trimmed_seed);
}
// Check versions without accents work
if vector.has_accent {
let seed_without_accents =
Polyseed::from_string(vector.language, Zeroizing::new(seed_without_accents)).unwrap();
assert_eq!(seed, seed_without_accents);
}
let entropy = Zeroizing::new(hex::decode(vector.entropy).unwrap().try_into().unwrap());
assert_eq!(*seed.entropy(), entropy);
assert!(seed.birthday().abs_diff(vector.birthday) < TIME_STEP);
// Entropy -> Seed
let from_entropy = Polyseed::from(vector.language, 0, seed.birthday(), entropy).unwrap();
assert_eq!(seed.to_string(), from_entropy.to_string());
// Check against ourselves
{
let seed = Polyseed::new(&mut OsRng, vector.language);
println!("{}. seed: {}", line!(), *seed.to_string());
assert_eq!(seed, Polyseed::from_string(vector.language, seed.to_string()).unwrap());
assert_eq!(
seed,
Polyseed::from(vector.language, 0, seed.birthday(), seed.entropy().clone(),).unwrap()
);
}
}
}
#[test]
fn test_invalid_polyseed() {
// This seed includes unsupported features bits and should error on decode
let seed = "include domain claim resemble urban hire lunch bird \
crucial fire best wife ring warm ignore model"
.into();
let res = Polyseed::from_string(Language::English, Zeroizing::new(seed));
assert_eq!(res, Err(PolyseedError::UnsupportedFeatures));
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,41 @@
[package]
name = "monero-seed"
version = "0.1.0"
description = "Rust implementation of Monero's seed algorithm"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/seed"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
rand_core = { version = "0.6", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] }
monero-primitives = { path = "../../primitives", default-features = false, features = ["std"] }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"rand_core/std",
]
default = ["std"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,11 @@
# Monero Seeds
Rust implementation of Monero's seed algorithm.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -0,0 +1,357 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use core::{ops::Deref, fmt};
use std_shims::{
sync::OnceLock,
vec,
vec::Vec,
string::{String, ToString},
collections::HashMap,
};
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use curve25519_dalek::scalar::Scalar;
#[cfg(test)]
mod tests;
// The amount of words in a seed without a checksum.
const SEED_LENGTH: usize = 24;
// The amount of words in a seed with a checksum.
const SEED_LENGTH_WITH_CHECKSUM: usize = 25;
/// An error when working with a seed.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum SeedError {
#[cfg_attr(feature = "std", error("invalid seed"))]
/// The seed was invalid.
InvalidSeed,
/// The checksum did not match the data.
#[cfg_attr(feature = "std", error("invalid checksum"))]
InvalidChecksum,
/// The deprecated English language option was used with a checksum.
///
/// The deprecated English language option did not include a checksum.
#[cfg_attr(feature = "std", error("deprecated English language option included a checksum"))]
DeprecatedEnglishWithChecksum,
}
/// Language options.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Zeroize)]
pub enum Language {
/// Chinese language option.
Chinese,
/// English language option.
English,
/// Dutch language option.
Dutch,
/// French language option.
French,
/// Spanish language option.
Spanish,
/// German language option.
German,
/// Italian language option.
Italian,
/// Portuguese language option.
Portuguese,
/// Japanese language option.
Japanese,
/// Russian language option.
Russian,
/// Esperanto language option.
Esperanto,
/// Lojban language option.
Lojban,
/// The original, and deprecated, English language.
DeprecatedEnglish,
}
fn trim(word: &str, len: usize) -> Zeroizing<String> {
Zeroizing::new(word.chars().take(len).collect())
}
struct WordList {
word_list: &'static [&'static str],
word_map: HashMap<&'static str, usize>,
trimmed_word_map: HashMap<String, usize>,
unique_prefix_length: usize,
}
impl WordList {
fn new(word_list: &'static [&'static str], prefix_length: usize) -> WordList {
let mut lang = WordList {
word_list,
word_map: HashMap::new(),
trimmed_word_map: HashMap::new(),
unique_prefix_length: prefix_length,
};
for (i, word) in lang.word_list.iter().enumerate() {
lang.word_map.insert(word, i);
lang.trimmed_word_map.insert(trim(word, lang.unique_prefix_length).deref().clone(), i);
}
lang
}
}
static LANGUAGES_CELL: OnceLock<HashMap<Language, WordList>> = OnceLock::new();
#[allow(non_snake_case)]
fn LANGUAGES() -> &'static HashMap<Language, WordList> {
LANGUAGES_CELL.get_or_init(|| {
HashMap::from([
(Language::Chinese, WordList::new(include!("./words/zh.rs"), 1)),
(Language::English, WordList::new(include!("./words/en.rs"), 3)),
(Language::Dutch, WordList::new(include!("./words/nl.rs"), 4)),
(Language::French, WordList::new(include!("./words/fr.rs"), 4)),
(Language::Spanish, WordList::new(include!("./words/es.rs"), 4)),
(Language::German, WordList::new(include!("./words/de.rs"), 4)),
(Language::Italian, WordList::new(include!("./words/it.rs"), 4)),
(Language::Portuguese, WordList::new(include!("./words/pt.rs"), 4)),
(Language::Japanese, WordList::new(include!("./words/ja.rs"), 3)),
(Language::Russian, WordList::new(include!("./words/ru.rs"), 4)),
(Language::Esperanto, WordList::new(include!("./words/eo.rs"), 4)),
(Language::Lojban, WordList::new(include!("./words/jbo.rs"), 4)),
(Language::DeprecatedEnglish, WordList::new(include!("./words/ang.rs"), 4)),
])
})
}
fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
let mut trimmed_words = Zeroizing::new(String::new());
for w in words {
*trimmed_words += &trim(w, lang.unique_prefix_length);
}
const fn crc32_table() -> [u32; 256] {
let poly = 0xedb88320u32;
let mut res = [0; 256];
let mut i = 0;
while i < 256 {
let mut entry = i;
let mut b = 0;
while b < 8 {
let trigger = entry & 1;
entry >>= 1;
if trigger == 1 {
entry ^= poly;
}
b += 1;
}
res[i as usize] = entry;
i += 1;
}
res
}
const CRC32_TABLE: [u32; 256] = crc32_table();
let trimmed_words = trimmed_words.as_bytes();
let mut checksum = u32::MAX;
for i in 0 .. trimmed_words.len() {
checksum = CRC32_TABLE[usize::from(u8::try_from(checksum % 256).unwrap() ^ trimmed_words[i])] ^
(checksum >> 8);
}
usize::try_from(!checksum).unwrap() % words.len()
}
// Convert a private key to a seed
#[allow(clippy::needless_pass_by_value)]
fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> Seed {
let bytes = Zeroizing::new(key.to_bytes());
// get the language words
let words = &LANGUAGES()[&lang].word_list;
let list_len = u64::try_from(words.len()).unwrap();
// To store the found words & add the checksum word later.
let mut seed = Vec::with_capacity(25);
// convert to words
// 4 bytes -> 3 words. 8 digits base 16 -> 3 digits base 1626
let mut segment = [0; 4];
let mut indices = [0; 4];
for i in 0 .. 8 {
// convert first 4 byte to u32 & get the word indices
let start = i * 4;
// convert 4 byte to u32
segment.copy_from_slice(&bytes[start .. (start + 4)]);
// Actually convert to a u64 so we can add without overflowing
indices[0] = u64::from(u32::from_le_bytes(segment));
indices[1] = indices[0];
indices[0] /= list_len;
indices[2] = indices[0] + indices[1];
indices[0] /= list_len;
indices[3] = indices[0] + indices[2];
// append words to seed
for i in indices.iter().skip(1) {
let word = usize::try_from(i % list_len).unwrap();
seed.push(Zeroizing::new(words[word].to_string()));
}
}
segment.zeroize();
indices.zeroize();
// create a checksum word for all languages except old english
if lang != Language::DeprecatedEnglish {
let checksum = seed[checksum_index(&seed, &LANGUAGES()[&lang])].clone();
seed.push(checksum);
}
let mut res = Zeroizing::new(String::new());
for (i, word) in seed.iter().enumerate() {
if i != 0 {
*res += " ";
}
*res += word;
}
Seed(lang, res)
}
// Convert a seed to bytes
fn seed_to_bytes(lang: Language, words: &str) -> Result<Zeroizing<[u8; 32]>, SeedError> {
// get seed words
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
if (words.len() != SEED_LENGTH) && (words.len() != SEED_LENGTH_WITH_CHECKSUM) {
panic!("invalid seed passed to seed_to_bytes");
}
let has_checksum = words.len() == SEED_LENGTH_WITH_CHECKSUM;
if has_checksum && lang == Language::DeprecatedEnglish {
Err(SeedError::DeprecatedEnglishWithChecksum)?;
}
// Validate words are in the language word list
let lang_word_list: &WordList = &LANGUAGES()[&lang];
let matched_indices = (|| {
let has_checksum = words.len() == SEED_LENGTH_WITH_CHECKSUM;
let mut matched_indices = Zeroizing::new(vec![]);
// Iterate through all the words and see if they're all present
for word in &words {
let trimmed = trim(word, lang_word_list.unique_prefix_length);
let word = if has_checksum { &trimmed } else { word };
if let Some(index) = if has_checksum {
lang_word_list.trimmed_word_map.get(word.deref())
} else {
lang_word_list.word_map.get(&word.as_str())
} {
matched_indices.push(*index);
} else {
Err(SeedError::InvalidSeed)?;
}
}
if has_checksum {
// exclude the last word when calculating a checksum.
let last_word = words.last().unwrap().clone();
let checksum = words[checksum_index(&words[.. words.len() - 1], lang_word_list)].clone();
// check the trimmed checksum and trimmed last word line up
if trim(&checksum, lang_word_list.unique_prefix_length) !=
trim(&last_word, lang_word_list.unique_prefix_length)
{
Err(SeedError::InvalidChecksum)?;
}
}
Ok(matched_indices)
})()?;
// convert to bytes
let mut res = Zeroizing::new([0; 32]);
let mut indices = Zeroizing::new([0; 4]);
for i in 0 .. 8 {
// read 3 indices at a time
let i3 = i * 3;
indices[1] = matched_indices[i3];
indices[2] = matched_indices[i3 + 1];
indices[3] = matched_indices[i3 + 2];
let inner = |i| {
let mut base = (lang_word_list.word_list.len() - indices[i] + indices[i + 1]) %
lang_word_list.word_list.len();
// Shift the index over
for _ in 0 .. i {
base *= lang_word_list.word_list.len();
}
base
};
// set the last index
indices[0] = indices[1] + inner(1) + inner(2);
if (indices[0] % lang_word_list.word_list.len()) != indices[1] {
Err(SeedError::InvalidSeed)?;
}
let pos = i * 4;
let mut bytes = u32::try_from(indices[0]).unwrap().to_le_bytes();
res[pos .. (pos + 4)].copy_from_slice(&bytes);
bytes.zeroize();
}
Ok(res)
}
/// A Monero seed.
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct Seed(Language, Zeroizing<String>);
impl fmt::Debug for Seed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Seed").finish_non_exhaustive()
}
}
impl Seed {
/// Create a new seed.
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Seed {
let mut scalar_bytes = Zeroizing::new([0; 64]);
rng.fill_bytes(scalar_bytes.as_mut());
key_to_seed(lang, Zeroizing::new(Scalar::from_bytes_mod_order_wide(scalar_bytes.deref())))
}
/// Parse a seed from a string.
#[allow(clippy::needless_pass_by_value)]
pub fn from_string(lang: Language, words: Zeroizing<String>) -> Result<Seed, SeedError> {
let entropy = seed_to_bytes(lang, &words)?;
// Make sure this is a valid scalar
let scalar = Scalar::from_canonical_bytes(*entropy);
if scalar.is_none().into() {
Err(SeedError::InvalidSeed)?;
}
let mut scalar = scalar.unwrap();
scalar.zeroize();
// Call from_entropy so a trimmed seed becomes a full seed
Ok(Self::from_entropy(lang, entropy).unwrap())
}
/// Create a seed from entropy.
#[allow(clippy::needless_pass_by_value)]
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Seed> {
Option::from(Scalar::from_canonical_bytes(*entropy))
.map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
}
/// Convert a seed to a string.
pub fn to_string(&self) -> Zeroizing<String> {
self.1.clone()
}
/// Return the entropy underlying this seed.
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
seed_to_bytes(self.0, &self.1).unwrap()
}
}

View File

@@ -0,0 +1,234 @@
use zeroize::Zeroizing;
use rand_core::OsRng;
use curve25519_dalek::scalar::Scalar;
use monero_primitives::keccak256;
use crate::*;
#[test]
fn test_original_seed() {
struct Vector {
language: Language,
seed: String,
spend: String,
view: String,
}
let vectors = [
Vector {
language: Language::Chinese,
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
},
Vector {
language: Language::English,
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
abnormal memoir nylon mostly building shrugged online ember northern \
ruby woes dauntless boil family illness inroads northern"
.into(),
spend: "c0af65c0dd837e666b9d0dfed62745f4df35aed7ea619b2798a709f0fe545403".into(),
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
},
Vector {
language: Language::Dutch,
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
.into(),
spend: "e2d2873085c447c2bc7664222ac8f7d240df3aeac137f5ff2022eaa629e5b10a".into(),
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
},
Vector {
language: Language::French,
seed: "poids vaseux tarte bazar poivre effet entier nuance \
sensuel ennui pacte osselet poudre battre alibi mouton \
stade paquet pliage gibier type question position projet pliage"
.into(),
spend: "2dd39ff1a4628a94b5c2ec3e42fb3dfe15c2b2f010154dc3b3de6791e805b904".into(),
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
},
Vector {
language: Language::Spanish,
seed: "minero ocupar mirar evadir octubre cal logro miope \
opaco disco ancla litio clase cuello nasal clase \
fiar avance deseo mente grumo negro cordón croqueta clase"
.into(),
spend: "ae2c9bebdddac067d73ec0180147fc92bdf9ac7337f1bcafbbe57dd13558eb02".into(),
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
},
Vector {
language: Language::German,
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
.into(),
spend: "79801b7a1b9796856e2397d862a113862e1fdc289a205e79d8d70995b276db06".into(),
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
},
Vector {
language: Language::Italian,
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
forzare meritare litigare lezione segreto evasione votare buio \
licenza cliente dorso natale crescere vento tutelare vetta evasione"
.into(),
spend: "5e7fd774eb00fa5877e2a8b4dc9c7ffe111008a3891220b56a6e49ac816d650a".into(),
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
},
Vector {
language: Language::Portuguese,
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
.into(),
spend: "13b3115f37e35c6aa1db97428b897e584698670c1b27854568d678e729200c0f".into(),
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
},
Vector {
language: Language::Japanese,
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
.into(),
spend: "c56e895cdb13007eda8399222974cdbab493640663804b93cbef3d8c3df80b0b".into(),
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
},
Vector {
language: Language::Russian,
seed: "шатер икра нация ехать получать инерция доза реальный \
рыжий таможня лопата душа веселый клетка атлас лекция \
обгонять паек наивный лыжный дурак стать ежик задача паек"
.into(),
spend: "7cb5492df5eb2db4c84af20766391cd3e3662ab1a241c70fc881f3d02c381f05".into(),
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
},
Vector {
language: Language::Esperanto,
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
.into(),
spend: "82ebf0336d3b152701964ed41df6b6e9a035e57fc98b84039ed0bd4611c58904".into(),
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
},
Vector {
language: Language::Lojban,
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
blabi darno dembi janli blabi fenki bukpu burcu blabi"
.into(),
spend: "e4f8c6819ab6cf792cebb858caabac9307fd646901d72123e0367ebc0a79c200".into(),
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
},
Vector {
language: Language::DeprecatedEnglish,
seed: "glorious especially puff son moment add youth nowhere \
throw glide grip wrong rhythm consume very swear \
bitter heavy eventually begin reason flirt type unable"
.into(),
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
},
// The following seeds require the language specification in order to calculate
// a single valid checksum
Vector {
language: Language::Spanish,
seed: "pluma laico atraer pintor peor cerca balde buscar \
lancha batir nulo reloj resto gemelo nevera poder columna gol \
oveja latir amplio bolero feliz fuerza nevera"
.into(),
spend: "30303983fc8d215dd020cc6b8223793318d55c466a86e4390954f373fdc7200a".into(),
view: "97c649143f3c147ba59aa5506cc09c7992c5c219bb26964442142bf97980800e".into(),
},
Vector {
language: Language::Spanish,
seed: "pluma pluma pluma pluma pluma pluma pluma pluma \
pluma pluma pluma pluma pluma pluma pluma pluma \
pluma pluma pluma pluma pluma pluma pluma pluma pluma"
.into(),
spend: "b4050000b4050000b4050000b4050000b4050000b4050000b4050000b4050000".into(),
view: "d73534f7912b395eb70ef911791a2814eb6df7ce56528eaaa83ff2b72d9f5e0f".into(),
},
Vector {
language: Language::English,
seed: "plus plus plus plus plus plus plus plus \
plus plus plus plus plus plus plus plus \
plus plus plus plus plus plus plus plus plus"
.into(),
spend: "3b0400003b0400003b0400003b0400003b0400003b0400003b0400003b040000".into(),
view: "43a8a7715eed11eff145a2024ddcc39740255156da7bbd736ee66a0838053a02".into(),
},
Vector {
language: Language::Spanish,
seed: "audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio audio"
.into(),
spend: "ba000000ba000000ba000000ba000000ba000000ba000000ba000000ba000000".into(),
view: "1437256da2c85d029b293d8c6b1d625d9374969301869b12f37186e3f906c708".into(),
},
Vector {
language: Language::English,
seed: "audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio audio"
.into(),
spend: "7900000079000000790000007900000079000000790000007900000079000000".into(),
view: "20bec797ab96780ae6a045dd816676ca7ed1d7c6773f7022d03ad234b581d600".into(),
},
];
for vector in vectors {
fn trim_by_lang(word: &str, lang: Language) -> String {
if lang != Language::DeprecatedEnglish {
word.chars().take(LANGUAGES()[&lang].unique_prefix_length).collect()
} else {
word.to_string()
}
}
let trim_seed = |seed: &str| {
seed
.split_whitespace()
.map(|word| trim_by_lang(word, vector.language))
.collect::<Vec<_>>()
.join(" ")
};
// Test against Monero
{
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
let seed = Seed::from_string(vector.language, Zeroizing::new(vector.seed.clone())).unwrap();
let trim = trim_seed(&vector.seed);
assert_eq!(seed, Seed::from_string(vector.language, Zeroizing::new(trim)).unwrap());
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
// For originalal seeds, Monero directly uses the entropy as a spend key
assert_eq!(
Option::<Scalar>::from(Scalar::from_canonical_bytes(*seed.entropy())),
Option::<Scalar>::from(Scalar::from_canonical_bytes(spend)),
);
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
// Monero then derives the view key as H(spend)
assert_eq!(
Scalar::from_bytes_mod_order(keccak256(spend)),
Scalar::from_canonical_bytes(view).unwrap()
);
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
}
// Test against ourselves
{
let seed = Seed::new(&mut OsRng, vector.language);
println!("{}. seed: {}", line!(), *seed.to_string());
let trim = trim_seed(&seed.to_string());
assert_eq!(seed, Seed::from_string(vector.language, Zeroizing::new(trim)).unwrap());
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
assert_eq!(seed, Seed::from_string(vector.language, seed.to_string()).unwrap());
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,311 @@
use std_shims::{io, vec::Vec, string::ToString, collections::HashSet};
use zeroize::{Zeroize, ZeroizeOnDrop};
use rand_core::{RngCore, CryptoRng};
use rand_distr::{Distribution, Gamma};
#[cfg(not(feature = "std"))]
use rand_distr::num_traits::Float;
use curve25519_dalek::{Scalar, EdwardsPoint};
use crate::{
DEFAULT_LOCK_WINDOW, COINBASE_LOCK_WINDOW, BLOCK_TIME,
primitives::{Commitment, Decoys},
rpc::{RpcError, DecoyRpc},
output::OutputData,
WalletOutput,
};
const RECENT_WINDOW: usize = 15;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
#[allow(clippy::cast_precision_loss)]
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
async fn select_n(
rng: &mut (impl RngCore + CryptoRng),
rpc: &impl DecoyRpc,
height: usize,
real_output: u64,
ring_len: usize,
fingerprintable_deterministic: bool,
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
if height < DEFAULT_LOCK_WINDOW {
Err(RpcError::InternalError("not enough blocks to select decoys".to_string()))?;
}
if height > rpc.get_output_distribution_end_height().await? {
Err(RpcError::InternalError(
"decoys being requested from blocks this node doesn't have".to_string(),
))?;
}
// Get the distribution
let distribution = rpc.get_output_distribution(.. height).await?;
if distribution.len() < DEFAULT_LOCK_WINDOW {
Err(RpcError::InternalError("not enough blocks to select decoys".to_string()))?;
}
let highest_output_exclusive_bound = distribution[distribution.len() - DEFAULT_LOCK_WINDOW];
// This assumes that each miner TX had one output (as sane) and checks we have sufficient
// outputs even when excluding them (due to their own timelock requirements)
// Considering this a temporal error for very new chains, it's sufficiently sane to have
if highest_output_exclusive_bound.saturating_sub(u64::try_from(COINBASE_LOCK_WINDOW).unwrap()) <
u64::try_from(ring_len).unwrap()
{
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
// Determine the outputs per second
#[allow(clippy::cast_precision_loss)]
let per_second = {
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
let initial = distribution[distribution.len().saturating_sub(blocks + 1)];
let outputs = distribution[distribution.len() - 1].saturating_sub(initial);
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
};
// Don't select the real output
let mut do_not_select = HashSet::new();
do_not_select.insert(real_output);
let decoy_count = ring_len - 1;
let mut res = Vec::with_capacity(decoy_count);
let mut iters = 0;
// Iterates until we have enough decoys
// If an iteration only returns a partial set of decoys, the remainder will be obvious as decoys
// to the RPC
// The length of that remainder is expected to be minimal
while res.len() != decoy_count {
iters += 1;
#[cfg(not(test))]
const MAX_ITERS: usize = 10;
// When testing on fresh chains, increased iterations can be useful and we don't necessitate
// reasonable performance
#[cfg(test)]
const MAX_ITERS: usize = 100;
// Ensure this isn't infinitely looping
// We check both that we aren't at the maximum amount of iterations and that the not-yet
// selected candidates exceed the amount of candidates necessary to trigger the next iteration
if (iters == MAX_ITERS) ||
((highest_output_exclusive_bound - u64::try_from(do_not_select.len()).unwrap()) <
u64::try_from(ring_len).unwrap())
{
Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?;
}
let remaining = decoy_count - res.len();
let mut candidates = Vec::with_capacity(remaining);
while candidates.len() != remaining {
// Use a gamma distribution, as Monero does
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
// /src/wallet/wallet2.cpp#L142-L143
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
#[allow(clippy::cast_precision_loss)]
if age > TIP_APPLICATION {
age -= TIP_APPLICATION;
} else {
// f64 does not have try_from available, which is why these are written with `as`
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
}
#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
let o = (age * per_second) as u64;
if o < highest_output_exclusive_bound {
// Find which block this points to
let i = distribution.partition_point(|s| *s < (highest_output_exclusive_bound - 1 - o));
let prev = i.saturating_sub(1);
let n = distribution[i] - distribution[prev];
if n != 0 {
// Select an output from within this block
let o = distribution[prev] + (rng.next_u64() % n);
if !do_not_select.contains(&o) {
candidates.push(o);
// This output will either be used or is unusable
// In either case, we should not try it again
do_not_select.insert(o);
}
}
}
}
// If this is the first time we're requesting these outputs, include the real one as well
// Prevents the node we're connected to from having a list of known decoys and then seeing a
// TX which uses all of them, with one additional output (the true spend)
let real_index = if iters == 0 {
candidates.push(real_output);
// Sort candidates so the real spends aren't the ones at the end
candidates.sort();
Some(candidates.binary_search(&real_output).unwrap())
} else {
None
};
for (i, output) in rpc
.get_unlocked_outputs(&candidates, height, fingerprintable_deterministic)
.await?
.iter_mut()
.enumerate()
{
// We could check the returned info is equivalent to our expectations, yet that'd allow the
// node to malleate the returned info to see if they can cause this error (allowing them to
// figure out the output being spent)
//
// Some degree of this attack (forcing resampling/trying to observe errors) is likely
// always possible
if real_index == Some(i) {
continue;
}
// If this is an unlocked output, push it to the result
if let Some(output) = output.take() {
res.push((candidates[i], output));
}
}
}
Ok(res)
}
async fn select_decoys<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &impl DecoyRpc,
ring_len: usize,
height: usize,
input: &WalletOutput,
fingerprintable_deterministic: bool,
) -> Result<Decoys, RpcError> {
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
let decoys = select_n(
rng,
rpc,
height,
input.relative_id.index_on_blockchain,
ring_len,
fingerprintable_deterministic,
)
.await?;
// Form the complete ring
let mut ring = decoys;
ring.push((input.relative_id.index_on_blockchain, [input.key(), input.commitment().calculate()]));
ring.sort_by(|a, b| a.0.cmp(&b.0));
/*
Monero does have sanity checks which it applies to the selected ring.
They're statistically unlikely to be hit and only occur when the transaction is published over
the RPC (so they are not a relay rule). The RPC allows disabling them, which monero-rpc does to
ensure they don't pose a problem.
They aren't worth the complexity to implement here, especially since they're non-deterministic.
*/
// We need to convert our positional indexes to offset indexes
let mut offsets = Vec::with_capacity(ring.len());
{
offsets.push(ring[0].0);
for m in 1 .. ring.len() {
offsets.push(ring[m].0 - ring[m - 1].0);
}
}
Ok(
Decoys::new(
offsets,
// Binary searches for the real spend since we don't know where it sorted to
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)).unwrap(),
ring.into_iter().map(|output| output.1).collect(),
)
.unwrap(),
)
}
/// An output with decoys selected.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct OutputWithDecoys {
output: OutputData,
decoys: Decoys,
}
impl OutputWithDecoys {
/// Select decoys for this output.
pub async fn new(
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
rpc: &impl DecoyRpc,
ring_len: usize,
height: usize,
output: WalletOutput,
) -> Result<OutputWithDecoys, RpcError> {
let decoys = select_decoys(rng, rpc, ring_len, height, &output, false).await?;
Ok(OutputWithDecoys { output: output.data.clone(), decoys })
}
/// Select a set of decoys for this output with a deterministic process.
///
/// This function will always output the same set of decoys when called with the same arguments.
/// This makes it very useful in multisignature contexts, where instead of having one participant
/// select the decoys, everyone can locally select the decoys while coming to the same result.
///
/// The set of decoys selected may be fingerprintable as having been produced by this
/// methodology.
pub async fn fingerprintable_deterministic_new(
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
rpc: &impl DecoyRpc,
ring_len: usize,
height: usize,
output: WalletOutput,
) -> Result<OutputWithDecoys, RpcError> {
let decoys = select_decoys(rng, rpc, ring_len, height, &output, true).await?;
Ok(OutputWithDecoys { output: output.data.clone(), decoys })
}
/// The key this output may be spent by.
pub fn key(&self) -> EdwardsPoint {
self.output.key()
}
/// The scalar to add to the private spend key for it to be the discrete logarithm of this
/// output's key.
pub fn key_offset(&self) -> Scalar {
self.output.key_offset
}
/// The commitment this output created.
pub fn commitment(&self) -> &Commitment {
&self.output.commitment
}
/// The decoys this output selected.
pub fn decoys(&self) -> &Decoys {
&self.decoys
}
/// Write the OutputWithDecoys.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
self.output.write(w)?;
self.decoys.write(w)
}
/// Serialize the OutputWithDecoys to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(128);
self.write(&mut serialized).unwrap();
serialized
}
/// Read an OutputWithDecoys.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
Ok(Self { output: OutputData::read(r)?, decoys: Decoys::read(r)? })
}
}

View File

@@ -0,0 +1,309 @@
use core::ops::BitXor;
use std_shims::{
vec,
vec::Vec,
io::{self, Read, BufRead, Write},
};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use monero_serai::io::*;
pub(crate) const MAX_TX_EXTRA_PADDING_COUNT: usize = 255;
const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
const PAYMENT_ID_MARKER: u8 = 0;
const ENCRYPTED_PAYMENT_ID_MARKER: u8 = 1;
// Used as it's the highest value not interpretable as a continued VarInt
pub(crate) const ARBITRARY_DATA_MARKER: u8 = 127;
/// The max amount of data which will fit within a blob of arbitrary data.
// 1 byte is used for the marker
pub const MAX_ARBITRARY_DATA_SIZE: usize = MAX_TX_EXTRA_NONCE_SIZE - 1;
/// A Payment ID.
///
/// This is a legacy method of identifying why Monero was sent to the receiver.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum PaymentId {
/// A deprecated form of payment ID which is no longer supported.
Unencrypted([u8; 32]),
/// An encrypted payment ID.
Encrypted([u8; 8]),
}
impl BitXor<[u8; 8]> for PaymentId {
type Output = PaymentId;
fn bitxor(self, bytes: [u8; 8]) -> PaymentId {
match self {
// Don't perform the xor since this isn't intended to be encrypted with xor
PaymentId::Unencrypted(_) => self,
PaymentId::Encrypted(id) => {
PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
}
}
}
}
impl PaymentId {
/// Write the PaymentId.
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
PaymentId::Unencrypted(id) => {
w.write_all(&[PAYMENT_ID_MARKER])?;
w.write_all(id)?;
}
PaymentId::Encrypted(id) => {
w.write_all(&[ENCRYPTED_PAYMENT_ID_MARKER])?;
w.write_all(id)?;
}
}
Ok(())
}
/// Serialize the PaymentId to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1 + 8);
self.write(&mut res).unwrap();
res
}
/// Read a PaymentId.
pub fn read<R: Read>(r: &mut R) -> io::Result<PaymentId> {
Ok(match read_byte(r)? {
0 => PaymentId::Unencrypted(read_bytes(r)?),
1 => PaymentId::Encrypted(read_bytes(r)?),
_ => Err(io::Error::other("unknown payment ID type"))?,
})
}
}
/// A field within the TX extra.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub enum ExtraField {
/// Padding.
///
/// This is a block of zeroes within the TX extra.
Padding(usize),
/// The transaction key.
///
/// This is a commitment to the randomness used for deriving outputs.
PublicKey(EdwardsPoint),
/// The nonce field.
///
/// This is used for data, such as payment IDs.
Nonce(Vec<u8>),
/// The field for merge-mining.
///
/// This is used within miner transactions who are merge-mining Monero to specify the foreign
/// block they mined.
MergeMining(usize, [u8; 32]),
/// The additional transaction keys.
///
/// These are the per-output commitments to the randomness used for deriving outputs.
PublicKeys(Vec<EdwardsPoint>),
/// The 'mysterious' Minergate tag.
///
/// This was used by a closed source entity without documentation. Support for parsing it was
/// added to reduce extra which couldn't be decoded.
MysteriousMinergate(Vec<u8>),
}
impl ExtraField {
/// Write the ExtraField.
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
ExtraField::Padding(size) => {
w.write_all(&[0])?;
for _ in 1 .. *size {
write_byte(&0u8, w)?;
}
}
ExtraField::PublicKey(key) => {
w.write_all(&[1])?;
w.write_all(&key.compress().to_bytes())?;
}
ExtraField::Nonce(data) => {
w.write_all(&[2])?;
write_vec(write_byte, data, w)?;
}
ExtraField::MergeMining(height, merkle) => {
w.write_all(&[3])?;
write_varint(&u64::try_from(*height).unwrap(), w)?;
w.write_all(merkle)?;
}
ExtraField::PublicKeys(keys) => {
w.write_all(&[4])?;
write_vec(write_point, keys, w)?;
}
ExtraField::MysteriousMinergate(data) => {
w.write_all(&[0xDE])?;
write_vec(write_byte, data, w)?;
}
}
Ok(())
}
/// Serialize the ExtraField to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1 + 8);
self.write(&mut res).unwrap();
res
}
/// Read an ExtraField.
pub fn read<R: BufRead>(r: &mut R) -> io::Result<ExtraField> {
Ok(match read_byte(r)? {
0 => ExtraField::Padding({
// Read until either non-zero, max padding count, or end of buffer
let mut size: usize = 1;
loop {
let buf = r.fill_buf()?;
let mut n_consume = 0;
for v in buf {
if *v != 0u8 {
Err(io::Error::other("non-zero value after padding"))?
}
n_consume += 1;
size += 1;
if size > MAX_TX_EXTRA_PADDING_COUNT {
Err(io::Error::other("padding exceeded max count"))?
}
}
if n_consume == 0 {
break;
}
r.consume(n_consume);
}
size
}),
1 => ExtraField::PublicKey(read_point(r)?),
2 => ExtraField::Nonce({
let nonce = read_vec(read_byte, r)?;
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
Err(io::Error::other("too long nonce"))?;
}
nonce
}),
3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?),
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, r)?),
_ => Err(io::Error::other("unknown extra field"))?,
})
}
}
/// The result of decoding a transaction's extra field.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Extra(pub(crate) Vec<ExtraField>);
impl Extra {
/// The keys within this extra.
///
/// This returns all keys specified with `PublicKey` and the first set of keys specified with
/// `PublicKeys`, so long as they're well-formed.
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
// /src/wallet/wallet2.cpp#L2290-L2300
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/wallet/wallet2.cpp#L2337-L2340
pub fn keys(&self) -> Option<(Vec<EdwardsPoint>, Option<Vec<EdwardsPoint>>)> {
let mut keys = vec![];
let mut additional = None;
for field in &self.0 {
match field.clone() {
ExtraField::PublicKey(this_key) => keys.push(this_key),
ExtraField::PublicKeys(these_additional) => {
additional = additional.or(Some(these_additional))
}
_ => (),
}
}
// Don't return any keys if this was non-standard and didn't include the primary key
if keys.is_empty() {
None
} else {
Some((keys, additional))
}
}
/// The payment ID embedded within this extra.
// Monero finds the first nonce field and reads the payment ID from it:
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
// src/wallet/wallet2.cpp#L2709-L2752
pub fn payment_id(&self) -> Option<PaymentId> {
for field in &self.0 {
if let ExtraField::Nonce(data) = field {
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
}
}
None
}
/// The arbitrary data within this extra.
///
/// This uses a marker custom to monero-wallet.
pub fn data(&self) -> Vec<Vec<u8>> {
let mut res = vec![];
for field in &self.0 {
if let ExtraField::Nonce(data) = field {
if data[0] == ARBITRARY_DATA_MARKER {
res.push(data[1 ..].to_vec());
}
}
}
res
}
pub(crate) fn new(key: EdwardsPoint, additional: Vec<EdwardsPoint>) -> Extra {
let mut res = Extra(Vec::with_capacity(3));
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_basic/cryptonote_format_utils.cpp#L627-L633
// We only support pushing nonces which come after these in the sort order
res.0.push(ExtraField::PublicKey(key));
if !additional.is_empty() {
res.0.push(ExtraField::PublicKeys(additional));
}
res
}
pub(crate) fn push_nonce(&mut self, nonce: Vec<u8>) {
self.0.push(ExtraField::Nonce(nonce));
}
/// Write the Extra.
///
/// This is not of deterministic length nor length-prefixed. It should only be written to a
/// buffer which will be delimited.
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for field in &self.0 {
field.write(w)?;
}
Ok(())
}
/// Serialize the Extra to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
/// Read an `Extra`.
///
/// This is not of deterministic length nor length-prefixed. It should only be read from a buffer
/// already delimited.
#[allow(clippy::unnecessary_wraps)]
pub fn read<R: BufRead>(r: &mut R) -> io::Result<Extra> {
let mut res = Extra(vec![]);
// Extra reads until EOF
// We take a BufRead so we can detect when the buffer is empty
// `fill_buf` returns the current buffer, filled if empty, only empty if the reader is
// exhausted
while !r.fill_buf()?.is_empty() {
res.0.push(ExtraField::read(r)?);
}
Ok(res)
}
}

View File

@@ -0,0 +1,160 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use std_shims::vec::Vec;
use zeroize::{Zeroize, Zeroizing};
use curve25519_dalek::{Scalar, EdwardsPoint};
use monero_serai::{
io::write_varint,
primitives::{Commitment, keccak256, keccak256_to_scalar},
ringct::EncryptedAmount,
transaction::Input,
};
pub use monero_serai::*;
pub use monero_rpc as rpc;
pub use monero_address as address;
mod view_pair;
pub use view_pair::{ViewPair, GuaranteedViewPair};
/// Structures and functionality for working with transactions' extra fields.
pub mod extra;
pub(crate) use extra::{PaymentId, Extra};
pub(crate) mod output;
pub use output::WalletOutput;
mod scan;
pub use scan::{Scanner, GuaranteedScanner};
mod decoys;
pub use decoys::OutputWithDecoys;
/// Structs and functionality for sending transactions.
pub mod send;
#[cfg(test)]
mod tests;
#[derive(Clone, PartialEq, Eq, Zeroize)]
struct SharedKeyDerivations {
// Hs("view_tag" || 8Ra || o)
view_tag: u8,
// Hs(uniqueness || 8Ra || o) where uniqueness may be empty
shared_key: Scalar,
}
impl SharedKeyDerivations {
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
fn uniqueness(inputs: &[Input]) -> [u8; 32] {
let mut u = b"uniqueness".to_vec();
for input in inputs {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
}
keccak256(u)
}
#[allow(clippy::needless_pass_by_value)]
fn output_derivations(
uniqueness: Option<[u8; 32]>,
ecdh: Zeroizing<EdwardsPoint>,
o: usize,
) -> Zeroizing<SharedKeyDerivations> {
// 8Ra
let mut output_derivation = Zeroizing::new(
Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(),
);
// || o
{
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
write_varint(&o, output_derivation).unwrap();
}
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
// uniqueness ||
let output_derivation = if let Some(uniqueness) = uniqueness {
Zeroizing::new([uniqueness.as_ref(), &output_derivation].concat())
} else {
output_derivation
};
Zeroizing::new(SharedKeyDerivations {
view_tag,
shared_key: keccak256_to_scalar(&output_derivation),
})
}
// H(8Ra || 0x8d)
#[allow(clippy::needless_pass_by_value)]
fn payment_id_xor(ecdh: Zeroizing<EdwardsPoint>) -> [u8; 8] {
// 8Ra
let output_derivation = Zeroizing::new(
Zeroizing::new(Zeroizing::new(ecdh.mul_by_cofactor()).compress().to_bytes()).to_vec(),
);
let mut payment_id_xor = [0; 8];
payment_id_xor
.copy_from_slice(&keccak256([output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
payment_id_xor
}
fn commitment_mask(&self) -> Scalar {
let mut mask = b"commitment_mask".to_vec();
mask.extend(self.shared_key.as_bytes());
let res = keccak256_to_scalar(&mask);
mask.zeroize();
res
}
fn compact_amount_encryption(&self, amount: u64) -> [u8; 8] {
let mut amount_mask = Zeroizing::new(b"amount".to_vec());
amount_mask.extend(self.shared_key.to_bytes());
let mut amount_mask = keccak256(&amount_mask);
let mut amount_mask_8 = [0; 8];
amount_mask_8.copy_from_slice(&amount_mask[.. 8]);
amount_mask.zeroize();
(amount ^ u64::from_le_bytes(amount_mask_8)).to_le_bytes()
}
fn decrypt(&self, enc_amount: &EncryptedAmount) -> Commitment {
match enc_amount {
// TODO: Add a test vector for this
EncryptedAmount::Original { mask, amount } => {
let mask_shared_sec = keccak256(self.shared_key.as_bytes());
let mask =
Scalar::from_bytes_mod_order(*mask) - Scalar::from_bytes_mod_order(mask_shared_sec);
let amount_shared_sec = keccak256(mask_shared_sec);
let amount_scalar =
Scalar::from_bytes_mod_order(*amount) - Scalar::from_bytes_mod_order(amount_shared_sec);
// d2b from rctTypes.cpp
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
Commitment::new(mask, amount)
}
EncryptedAmount::Compact { amount } => Commitment::new(
self.commitment_mask(),
u64::from_le_bytes(self.compact_amount_encryption(u64::from_le_bytes(*amount))),
),
}
}
}

View File

@@ -0,0 +1,352 @@
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{Scalar, edwards::EdwardsPoint};
use crate::{
io::*, primitives::Commitment, transaction::Timelock, address::SubaddressIndex, extra::PaymentId,
};
/// An absolute output ID, defined as its transaction hash and output index.
///
/// This is not the output's key as multiple outputs may share an output key.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub(crate) struct AbsoluteId {
pub(crate) transaction: [u8; 32],
pub(crate) index_in_transaction: u32,
}
impl core::fmt::Debug for AbsoluteId {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("AbsoluteId")
.field("transaction", &hex::encode(self.transaction))
.field("index_in_transaction", &self.index_in_transaction)
.finish()
}
}
impl AbsoluteId {
/// Write the AbsoluteId.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.transaction)?;
w.write_all(&self.index_in_transaction.to_le_bytes())
}
/// Read an AbsoluteId.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u32(r)? })
}
}
/// An output's relative ID.
///
/// This is defined as the output's index on the blockchain.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub(crate) struct RelativeId {
pub(crate) index_on_blockchain: u64,
}
impl core::fmt::Debug for RelativeId {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt.debug_struct("RelativeId").field("index_on_blockchain", &self.index_on_blockchain).finish()
}
}
impl RelativeId {
/// Write the RelativeId.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.index_on_blockchain.to_le_bytes())
}
/// Read an RelativeId.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(RelativeId { index_on_blockchain: read_u64(r)? })
}
}
/// The data within an output, as necessary to spend the output.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub(crate) struct OutputData {
pub(crate) key: EdwardsPoint,
pub(crate) key_offset: Scalar,
pub(crate) commitment: Commitment,
}
impl core::fmt::Debug for OutputData {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("OutputData")
.field("key", &hex::encode(self.key.compress().0))
.field("key_offset", &hex::encode(self.key_offset.to_bytes()))
.field("commitment", &self.commitment)
.finish()
}
}
impl OutputData {
/// The key this output may be spent by.
pub(crate) fn key(&self) -> EdwardsPoint {
self.key
}
/// The scalar to add to the private spend key for it to be the discrete logarithm of this
/// output's key.
pub(crate) fn key_offset(&self) -> Scalar {
self.key_offset
}
/// The commitment this output created.
pub(crate) fn commitment(&self) -> &Commitment {
&self.commitment
}
/// Write the OutputData.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub(crate) fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.key.compress().to_bytes())?;
w.write_all(&self.key_offset.to_bytes())?;
self.commitment.write(w)
}
/*
/// Serialize the OutputData to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 32 + 40);
self.write(&mut res).unwrap();
res
}
*/
/// Read an OutputData.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub(crate) fn read<R: Read>(r: &mut R) -> io::Result<OutputData> {
Ok(OutputData {
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::read(r)?,
})
}
}
/// The metadata for an output.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub(crate) struct Metadata {
pub(crate) additional_timelock: Timelock,
pub(crate) subaddress: Option<SubaddressIndex>,
pub(crate) payment_id: Option<PaymentId>,
pub(crate) arbitrary_data: Vec<Vec<u8>>,
}
impl core::fmt::Debug for Metadata {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("Metadata")
.field("additional_timelock", &self.additional_timelock)
.field("subaddress", &self.subaddress)
.field("payment_id", &self.payment_id)
.field("arbitrary_data", &self.arbitrary_data.iter().map(hex::encode).collect::<Vec<_>>())
.finish()
}
}
impl Metadata {
/// Write the Metadata.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.additional_timelock.write(w)?;
if let Some(subaddress) = self.subaddress {
w.write_all(&[1])?;
w.write_all(&subaddress.account().to_le_bytes())?;
w.write_all(&subaddress.address().to_le_bytes())?;
} else {
w.write_all(&[0])?;
}
if let Some(payment_id) = self.payment_id {
w.write_all(&[1])?;
payment_id.write(w)?;
} else {
w.write_all(&[0])?;
}
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
for part in &self.arbitrary_data {
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
w.write_all(part)?;
}
Ok(())
}
/// Read a Metadata.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn read<R: Read>(r: &mut R) -> io::Result<Metadata> {
let additional_timelock = Timelock::read(r)?;
let subaddress = match read_byte(r)? {
0 => None,
1 => Some(
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
.ok_or_else(|| io::Error::other("invalid subaddress in metadata"))?,
),
_ => Err(io::Error::other("invalid subaddress is_some boolean in metadata"))?,
};
Ok(Metadata {
additional_timelock,
subaddress,
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
let len = read_byte(r)?;
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
}
data
},
})
}
}
/// A scanned output and all associated data.
///
/// This struct contains all data necessary to spend this output, or handle it as a payment.
///
/// This struct is bound to a specific instance of the blockchain. If the blockchain reorganizes
/// the block this struct is bound to, it MUST be discarded. If any outputs are mutual to both
/// blockchains, scanning the new blockchain will yield those outputs again.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct WalletOutput {
/// The absolute ID for this transaction.
pub(crate) absolute_id: AbsoluteId,
/// The ID for this transaction, relative to the blockchain.
pub(crate) relative_id: RelativeId,
/// The output's data.
pub(crate) data: OutputData,
/// Associated metadata relevant for handling it as a payment.
pub(crate) metadata: Metadata,
}
impl WalletOutput {
/// The hash of the transaction which created this output.
pub fn transaction(&self) -> [u8; 32] {
self.absolute_id.transaction
}
/// The index of the output within the transaction.
pub fn index_in_transaction(&self) -> u32 {
self.absolute_id.index_in_transaction
}
/// The index of the output on the blockchain.
pub fn index_on_blockchain(&self) -> u64 {
self.relative_id.index_on_blockchain
}
/// The key this output may be spent by.
pub fn key(&self) -> EdwardsPoint {
self.data.key()
}
/// The scalar to add to the private spend key for it to be the discrete logarithm of this
/// output's key.
pub fn key_offset(&self) -> Scalar {
self.data.key_offset()
}
/// The commitment this output created.
pub fn commitment(&self) -> &Commitment {
self.data.commitment()
}
/// The additional timelock this output is subject to.
///
/// All outputs are subject to the '10-block lock', a 10-block window after their inclusion
/// on-chain during which they cannot be spent. Outputs may be additionally timelocked. This
/// function only returns the additional timelock.
pub fn additional_timelock(&self) -> Timelock {
self.metadata.additional_timelock
}
/// The index of the subaddress this output was identified as sent to.
pub fn subaddress(&self) -> Option<SubaddressIndex> {
self.metadata.subaddress
}
/// The payment ID included with this output.
///
/// This field may be `Some` even if wallet2 would not return a payment ID. This will happen if
/// the scanned output belongs to the subaddress which spent Monero within the transaction which
/// created the output. If multiple subaddresses spent Monero within this transactions, the key
/// image with the highest index is determined to be the subaddress considered as the one
/// spending.
// TODO: Clarify and cite for point A ("highest index spent key image"??)
pub fn payment_id(&self) -> Option<PaymentId> {
self.metadata.payment_id
}
/// The arbitrary data from the `extra` field of the transaction which created this output.
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
&self.metadata.arbitrary_data
}
/// Write the WalletOutput.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.absolute_id.write(w)?;
self.relative_id.write(w)?;
self.data.write(w)?;
self.metadata.write(w)
}
/// Serialize the WalletOutput to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(128);
self.write(&mut serialized).unwrap();
serialized
}
/// Read a WalletOutput.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: Read>(r: &mut R) -> io::Result<WalletOutput> {
Ok(WalletOutput {
absolute_id: AbsoluteId::read(r)?,
relative_id: RelativeId::read(r)?,
data: OutputData::read(r)?,
metadata: Metadata::read(r)?,
})
}
}

View File

@@ -0,0 +1,419 @@
use core::ops::Deref;
use std_shims::{alloc::format, vec, vec::Vec, string::ToString, collections::HashMap};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
use monero_rpc::{RpcError, Rpc};
use monero_serai::{
io::*,
primitives::Commitment,
transaction::{Timelock, Pruned, Transaction},
block::Block,
};
use crate::{
address::SubaddressIndex, ViewPair, GuaranteedViewPair, output::*, PaymentId, Extra,
SharedKeyDerivations,
};
/// A collection of potentially additionally timelocked outputs.
#[derive(Zeroize, ZeroizeOnDrop)]
pub struct Timelocked(Vec<WalletOutput>);
impl Timelocked {
/// Return the outputs which aren't subject to an additional timelock.
#[must_use]
pub fn not_additionally_locked(self) -> Vec<WalletOutput> {
let mut res = vec![];
for output in &self.0 {
if output.additional_timelock() == Timelock::None {
res.push(output.clone());
}
}
res
}
/// Return the outputs whose additional timelock unlocks by the specified block/time.
///
/// Additional timelocks are almost never used outside of miner transactions, and are
/// increasingly planned for removal. Ignoring non-miner additionally-timelocked outputs is
/// recommended.
///
/// `block` is the block number of the block the additional timelock must be satsified by.
///
/// `time` is represented in seconds since the epoch. Please note Monero uses an on-chain
/// deterministic clock for time which is subject to variance from the real world time. This time
/// argument will be evaluated against Monero's clock, not the local system's clock.
#[must_use]
pub fn additional_timelock_satisfied_by(self, block: usize, time: u64) -> Vec<WalletOutput> {
let mut res = vec![];
for output in &self.0 {
if (output.additional_timelock() <= Timelock::Block(block)) ||
(output.additional_timelock() <= Timelock::Time(time))
{
res.push(output.clone());
}
}
res
}
/// Ignore the timelocks and return all outputs within this container.
#[must_use]
pub fn ignore_additional_timelock(mut self) -> Vec<WalletOutput> {
let mut res = vec![];
core::mem::swap(&mut self.0, &mut res);
res
}
}
#[derive(Clone)]
struct InternalScanner {
pair: ViewPair,
guaranteed: bool,
subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
}
impl Zeroize for InternalScanner {
fn zeroize(&mut self) {
self.pair.zeroize();
self.guaranteed.zeroize();
// This may not be effective, unfortunately
for (mut key, mut value) in self.subaddresses.drain() {
key.zeroize();
value.zeroize();
}
}
}
impl Drop for InternalScanner {
fn drop(&mut self) {
self.zeroize();
}
}
impl ZeroizeOnDrop for InternalScanner {}
impl InternalScanner {
fn new(pair: ViewPair, guaranteed: bool) -> Self {
let mut subaddresses = HashMap::new();
subaddresses.insert(pair.spend().compress(), None);
Self { pair, guaranteed, subaddresses }
}
fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
let (spend, _) = self.pair.subaddress_keys(subaddress);
self.subaddresses.insert(spend.compress(), Some(subaddress));
}
fn scan_transaction(
&self,
tx_start_index_on_blockchain: u64,
tx_hash: [u8; 32],
tx: &Transaction<Pruned>,
) -> Result<Timelocked, RpcError> {
// Only scan TXs creating RingCT outputs
// For the full details on why this check is equivalent, please see the documentation in `scan`
if tx.version() != 2 {
return Ok(Timelocked(vec![]));
}
// Read the extra field
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()) else {
return Ok(Timelocked(vec![]));
};
let Some((tx_keys, additional)) = extra.keys() else {
return Ok(Timelocked(vec![]));
};
let payment_id = extra.payment_id();
let mut res = vec![];
for (o, output) in tx.prefix().outputs.iter().enumerate() {
let Some(output_key) = decompress_point(output.key.to_bytes()) else { continue };
// Monero checks with each TX key and with the additional key for this output
// This will be None if there's no additional keys, Some(None) if there's additional keys
// yet not one for this output (which is non-standard), and Some(Some(_)) if there's an
// additional key for this output
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_basic/cryptonote_format_utils.cpp#L1060-L1070
let additional = additional.as_ref().map(|additional| additional.get(o));
#[allow(clippy::manual_let_else)]
for key in tx_keys.iter().map(|key| Some(Some(key))).chain(core::iter::once(additional)) {
// Get the key, or continue if there isn't one
let key = match key {
Some(Some(key)) => key,
Some(None) | None => continue,
};
// Calculate the ECDH
let ecdh = Zeroizing::new(self.pair.view.deref() * key);
let output_derivations = SharedKeyDerivations::output_derivations(
if self.guaranteed {
Some(SharedKeyDerivations::uniqueness(&tx.prefix().inputs))
} else {
None
},
ecdh.clone(),
o,
);
// Check the view tag matches, if there is a view tag
if let Some(actual_view_tag) = output.view_tag {
if actual_view_tag != output_derivations.view_tag {
continue;
}
}
// P - shared == spend
let Some(subaddress) = ({
// The output key may be of torsion [0, 8)
// Our subtracting of a prime-order element means any torsion will be preserved
// If someone wanted to malleate output keys with distinct torsions, only one will be
// scanned accordingly (the one which has matching torsion of the spend key)
let subaddress_spend_key =
output_key - (&output_derivations.shared_key * ED25519_BASEPOINT_TABLE);
self.subaddresses.get(&subaddress_spend_key.compress())
}) else {
continue;
};
let subaddress = *subaddress;
// The key offset is this shared key
let mut key_offset = output_derivations.shared_key;
if let Some(subaddress) = subaddress {
// And if this was to a subaddress, it's additionally the offset from subaddress spend
// key to the normal spend key
key_offset += self.pair.subaddress_derivation(subaddress);
}
// Since we've found an output to us, get its amount
let mut commitment = Commitment::zero();
// Miner transaction
if let Some(amount) = output.amount {
commitment.amount = amount;
// Regular transaction
} else {
let Transaction::V2 { proofs: Some(ref proofs), .. } = &tx else {
// Invalid transaction, as of consensus rules at the time of writing this code
Err(RpcError::InvalidNode("non-miner v2 transaction without RCT proofs".to_string()))?
};
commitment = match proofs.base.encrypted_amounts.get(o) {
Some(amount) => output_derivations.decrypt(amount),
// Invalid transaction, as of consensus rules at the time of writing this code
None => Err(RpcError::InvalidNode(
"RCT proofs without an encrypted amount per output".to_string(),
))?,
};
// Rebuild the commitment to verify it
if Some(&commitment.calculate()) != proofs.base.commitments.get(o) {
continue;
}
}
// Decrypt the payment ID
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
res.push(WalletOutput {
absolute_id: AbsoluteId {
transaction: tx_hash,
index_in_transaction: o.try_into().unwrap(),
},
relative_id: RelativeId {
index_on_blockchain: tx_start_index_on_blockchain + u64::try_from(o).unwrap(),
},
data: OutputData { key: output_key, key_offset, commitment },
metadata: Metadata {
additional_timelock: tx.prefix().additional_timelock,
subaddress,
payment_id,
arbitrary_data: extra.data(),
},
});
// Break to prevent public keys from being included multiple times, triggering multiple
// inclusions of the same output
break;
}
}
Ok(Timelocked(res))
}
async fn scan(&mut self, rpc: &impl Rpc, block: &Block) -> Result<Timelocked, RpcError> {
if block.header.hardfork_version > 16 {
Err(RpcError::InternalError(format!(
"scanning a hardfork {} block, when we only support up to 16",
block.header.hardfork_version
)))?;
}
// We obtain all TXs in full
let mut txs_with_hashes = vec![(
block.miner_transaction.hash(),
Transaction::<Pruned>::from(block.miner_transaction.clone()),
)];
let txs = rpc.get_pruned_transactions(&block.transactions).await?;
for (hash, tx) in block.transactions.iter().zip(txs) {
txs_with_hashes.push((*hash, tx));
}
/*
Requesting the output index for each output we sucessfully scan would cause a loss of privacy
We could instead request the output indexes for all outputs we scan, yet this would notably
increase the amount of RPC calls we make.
We solve this by requesting the output index for the first RingCT output in the block, which
should be within the miner transaction. Then, as we scan transactions, we update the output
index ourselves.
Please note we only will scan RingCT outputs so we only need to track the RingCT output
index. This decision was made due to spending CN outputs potentially having burdensome
requirements (the need to make a v1 TX due to insufficient decoys).
We bound ourselves to only scanning RingCT outputs by only scanning v2 transactions. This is
safe and correct since:
1) v1 transactions cannot create RingCT outputs.
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
/src/cryptonote_basic/cryptonote_format_utils.cpp#L866-L869
2) v2 miner transactions implicitly create RingCT outputs.
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
/src/blockchain_db/blockchain_db.cpp#L232-L241
3) v2 transactions must create RingCT outputs.
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
/src/cryptonote_core/blockchain.cpp#L3055-L3065
That does bound on the hard fork version being >= 3, yet all v2 TXs have a hard fork
version > 3.
https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
/src/cryptonote_core/blockchain.cpp#L3417
*/
// Get the starting index
let mut tx_start_index_on_blockchain = {
let mut tx_start_index_on_blockchain = None;
for (hash, tx) in &txs_with_hashes {
// If this isn't a RingCT output, or there are no outputs, move to the next TX
if (!matches!(tx, Transaction::V2 { .. })) || tx.prefix().outputs.is_empty() {
continue;
}
let index = *rpc.get_o_indexes(*hash).await?.first().ok_or_else(|| {
RpcError::InvalidNode(
"requested output indexes for a TX with outputs and got none".to_string(),
)
})?;
tx_start_index_on_blockchain = Some(index);
break;
}
let Some(tx_start_index_on_blockchain) = tx_start_index_on_blockchain else {
// Block had no RingCT outputs
return Ok(Timelocked(vec![]));
};
tx_start_index_on_blockchain
};
let mut res = Timelocked(vec![]);
for (hash, tx) in txs_with_hashes {
// Push all outputs into our result
{
let mut this_txs_outputs = vec![];
core::mem::swap(
&mut self.scan_transaction(tx_start_index_on_blockchain, hash, &tx)?.0,
&mut this_txs_outputs,
);
res.0.extend(this_txs_outputs);
}
// Update the RingCT starting index for the next TX
if matches!(tx, Transaction::V2 { .. }) {
tx_start_index_on_blockchain += u64::try_from(tx.prefix().outputs.len()).unwrap()
}
}
// If the block's version is >= 12, drop all unencrypted payment IDs
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
// src/wallet/wallet2.cpp#L2739-L2744
if block.header.hardfork_version >= 12 {
for output in &mut res.0 {
if matches!(output.metadata.payment_id, Some(PaymentId::Unencrypted(_))) {
output.metadata.payment_id = None;
}
}
}
Ok(res)
}
}
/// A transaction scanner to find outputs received.
///
/// When an output is successfully scanned, the output key MUST be checked against the local
/// database for lack of prior observation. If it was prior observed, that output is an instance
/// of the
/// [burning bug](https://web.getmonero.org/2018/09/25/a-post-mortum-of-the-burning-bug.html) and
/// MAY be unspendable. Only the prior received output(s) or the newly received output will be
/// spendable (as spending one will burn all of them).
///
/// Once checked, the output key MUST be saved to the local database so future checks can be
/// performed.
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
pub struct Scanner(InternalScanner);
impl Scanner {
/// Create a Scanner from a ViewPair.
pub fn new(pair: ViewPair) -> Self {
Self(InternalScanner::new(pair, false))
}
/// Register a subaddress to scan for.
///
/// Subaddresses must be explicitly registered ahead of time in order to be successfully scanned.
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
self.0.register_subaddress(subaddress)
}
/// Scan a block.
pub async fn scan(&mut self, rpc: &impl Rpc, block: &Block) -> Result<Timelocked, RpcError> {
self.0.scan(rpc, block).await
}
}
/// A transaction scanner to find outputs received which are guaranteed to be spendable.
///
/// 'Guaranteed' outputs, or transactions outputs to the burning bug, are not officially specified
/// by the Monero project. They should only be used if necessary. No support outside of
/// monero-wallet is promised.
///
/// "guaranteed to be spendable" assumes satisfaction of any timelocks in effect.
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
pub struct GuaranteedScanner(InternalScanner);
impl GuaranteedScanner {
/// Create a GuaranteedScanner from a GuaranteedViewPair.
pub fn new(pair: GuaranteedViewPair) -> Self {
Self(InternalScanner::new(pair.0, true))
}
/// Register a subaddress to scan for.
///
/// Subaddresses must be explicitly registered ahead of time in order to be successfully scanned.
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
self.0.register_subaddress(subaddress)
}
/// Scan a block.
pub async fn scan(&mut self, rpc: &impl Rpc, block: &Block) -> Result<Timelocked, RpcError> {
self.0.scan(rpc, block).await
}
}

View File

@@ -0,0 +1,137 @@
use std_shims::{vec::Vec, io};
use zeroize::Zeroize;
use crate::{
ringct::PrunedRctProofs,
transaction::{Input, Timelock, Pruned, Transaction},
send::SignableTransaction,
};
/// The eventual output of a SignableTransaction.
///
/// If a SignableTransaction is signed and published on-chain, it will create a Transaction
/// identifiable to whoever else has the same SignableTransaction (with the same outgoing view
/// key). This structure enables checking if a Transaction is in fact such an output, as it can.
///
/// Since Monero is a privacy coin without outgoing view keys, this only performs a fuzzy match.
/// The fuzzy match executes over the outputs and associated data necessary to work with the
/// outputs (the transaction randomness, ciphertexts). This transaction does not check if the
/// inputs intended to be spent where actually the inputs spent (as infeasible).
///
/// The transaction randomness does bind to the inputs intended to be spent, so an on-chain
/// transaction will not match for multiple `Eventuality`s unless the `SignableTransaction`s they
/// were built from were in conflict (and their intended transactions cannot simultaneously exist
/// on-chain).
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Eventuality(SignableTransaction);
impl From<SignableTransaction> for Eventuality {
fn from(tx: SignableTransaction) -> Eventuality {
Eventuality(tx)
}
}
impl Eventuality {
/// Return the `extra` field any transaction following this intent would use.
///
/// This enables building a HashMap of Extra -> Eventuality for efficiently fetching the
/// `Eventuality` an on-chain transaction may complete.
///
/// This extra is cryptographically bound to the inputs intended to be spent. If the
/// `SignableTransaction`s the `Eventuality`s are built from are not in conflict (their intended
/// transactions can simultaneously exist on-chain), then each extra will only have a single
/// Eventuality associated (barring a cryptographic problem considered hard failing).
pub fn extra(&self) -> Vec<u8> {
self.0.extra()
}
/// Return if this TX matches the SignableTransaction this was created from.
///
/// Matching the SignableTransaction means this transaction created the expected outputs, they're
/// scannable, they're not locked, and this transaction claims to use the intended inputs (though
/// this is not guaranteed). This 'claim' is evaluated by this transaction using the transaction
/// keys derived from the intended inputs. This ensures two SignableTransactions with the same
/// intended payments don't match for each other's `Eventuality`s (as they'll have distinct
/// inputs intended).
#[must_use]
pub fn matches(&self, tx: &Transaction<Pruned>) -> bool {
// Verify extra
if self.0.extra() != tx.prefix().extra {
return false;
}
// Also ensure no timelock was set
if tx.prefix().additional_timelock != Timelock::None {
return false;
}
// Check the amount of inputs aligns
if tx.prefix().inputs.len() != self.0.inputs.len() {
return false;
}
// Collect the key images used by this transaction
let Ok(key_images) = tx
.prefix()
.inputs
.iter()
.map(|input| match input {
Input::Gen(_) => Err(()),
Input::ToKey { key_image, .. } => Ok(*key_image),
})
.collect::<Result<Vec<_>, _>>()
else {
return false;
};
// Check the outputs
if self.0.outputs(&key_images) != tx.prefix().outputs {
return false;
}
// Check the encrypted amounts and commitments
let commitments_and_encrypted_amounts = self.0.commitments_and_encrypted_amounts(&key_images);
let Transaction::V2 { proofs: Some(PrunedRctProofs { ref base, .. }), .. } = tx else {
return false;
};
if base.commitments !=
commitments_and_encrypted_amounts
.iter()
.map(|(commitment, _)| commitment.calculate())
.collect::<Vec<_>>()
{
return false;
}
if base.encrypted_amounts !=
commitments_and_encrypted_amounts.into_iter().map(|(_, amount)| amount).collect::<Vec<_>>()
{
return false;
}
true
}
/// Write the Eventuality.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
self.0.write(w)
}
/// Serialize the Eventuality to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
self.0.serialize()
}
/// Read a Eventuality.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Eventuality> {
Ok(Eventuality(SignableTransaction::read(r)?))
}
}

View File

@@ -0,0 +1,581 @@
use core::{ops::Deref, fmt};
use std_shims::{
io, vec,
vec::Vec,
string::{String, ToString},
};
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
#[cfg(feature = "multisig")]
use frost::FrostError;
use crate::{
io::*,
generators::{MAX_COMMITMENTS, hash_to_point},
ringct::{
clsag::{ClsagError, ClsagContext, Clsag},
RctType, RctPrunable, RctProofs,
},
transaction::Transaction,
address::{Network, SubaddressIndex, MoneroAddress},
extra::MAX_ARBITRARY_DATA_SIZE,
rpc::FeeRate,
ViewPair, GuaranteedViewPair, OutputWithDecoys,
};
mod tx_keys;
mod tx;
mod eventuality;
pub use eventuality::Eventuality;
#[cfg(feature = "multisig")]
mod multisig;
#[cfg(feature = "multisig")]
pub use multisig::{TransactionMachine, TransactionSignMachine, TransactionSignatureMachine};
pub(crate) fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
}
#[derive(Clone, PartialEq, Eq, Zeroize)]
enum ChangeEnum {
AddressOnly(MoneroAddress),
Standard { view_pair: ViewPair, subaddress: Option<SubaddressIndex> },
Guaranteed { view_pair: GuaranteedViewPair, subaddress: Option<SubaddressIndex> },
}
impl fmt::Debug for ChangeEnum {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ChangeEnum::AddressOnly(addr) => {
f.debug_struct("ChangeEnum::AddressOnly").field("addr", &addr).finish()
}
ChangeEnum::Standard { subaddress, .. } => f
.debug_struct("ChangeEnum::Standard")
.field("subaddress", &subaddress)
.finish_non_exhaustive(),
ChangeEnum::Guaranteed { subaddress, .. } => f
.debug_struct("ChangeEnum::Guaranteed")
.field("subaddress", &subaddress)
.finish_non_exhaustive(),
}
}
}
/// Specification for a change output.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Change(Option<ChangeEnum>);
impl Change {
/// Create a change output specification.
///
/// This take the view key as Monero assumes it has the view key for change outputs. It optimizes
/// its wallet protocol accordingly.
pub fn new(view_pair: ViewPair, subaddress: Option<SubaddressIndex>) -> Change {
Change(Some(ChangeEnum::Standard { view_pair, subaddress }))
}
/// Create a change output specification for a guaranteed view pair.
///
/// This take the view key as Monero assumes it has the view key for change outputs. It optimizes
/// its wallet protocol accordingly.
pub fn guaranteed(view_pair: GuaranteedViewPair, subaddress: Option<SubaddressIndex>) -> Change {
Change(Some(ChangeEnum::Guaranteed { view_pair, subaddress }))
}
/// Create a fingerprintable change output specification.
///
/// You MUST assume this will harm your privacy. Only use this if you know what you're doing.
///
/// If the change address is Some, this will be unable to optimize the transaction as the
/// Monero wallet protocol expects it can (due to presumably having the view key for the change
/// output). If a transaction should be optimized, and isn'tm it will be fingerprintable.
///
/// If the change address is None, there are two fingerprints:
///
/// 1) The change in the TX is shunted to the fee (making it fingerprintable).
///
/// 2) If there are two outputs in the TX, Monero would create a payment ID for the non-change
/// output so an observer can't tell apart TXs with a payment ID from TXs without a payment
/// ID. monero-wallet will simply not create a payment ID in this case, revealing it's a
/// monero-wallet TX without change.
pub fn fingerprintable(address: Option<MoneroAddress>) -> Change {
if let Some(address) = address {
Change(Some(ChangeEnum::AddressOnly(address)))
} else {
Change(None)
}
}
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
enum InternalPayment {
Payment(MoneroAddress, u64),
Change(ChangeEnum),
}
impl InternalPayment {
fn address(&self) -> MoneroAddress {
match self {
InternalPayment::Payment(addr, _) => *addr,
InternalPayment::Change(change) => match change {
ChangeEnum::AddressOnly(addr) => *addr,
// Network::Mainnet as the network won't effect the derivations
ChangeEnum::Standard { view_pair, subaddress } => match subaddress {
Some(subaddress) => view_pair.subaddress(Network::Mainnet, *subaddress),
None => view_pair.legacy_address(Network::Mainnet),
},
ChangeEnum::Guaranteed { view_pair, subaddress } => {
view_pair.address(Network::Mainnet, *subaddress, None)
}
},
}
}
}
/// An error while sending Monero.
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum SendError {
/// The RingCT type to produce proofs for this transaction with weren't supported.
#[cfg_attr(feature = "std", error("this library doesn't yet support that RctType"))]
UnsupportedRctType,
/// The transaction had no inputs specified.
#[cfg_attr(feature = "std", error("no inputs"))]
NoInputs,
/// The decoy quantity was invalid for the specified RingCT type.
#[cfg_attr(feature = "std", error("invalid number of decoys"))]
InvalidDecoyQuantity,
/// The transaction had no outputs specified.
#[cfg_attr(feature = "std", error("no outputs"))]
NoOutputs,
/// The transaction had too many outputs specified.
#[cfg_attr(feature = "std", error("too many outputs"))]
TooManyOutputs,
/// The transaction did not have a change output, and did not have two outputs.
///
/// Monero requires all transactions have at least two outputs, assuming one payment and one
/// change (or at least one dummy and one change). Accordingly, specifying no change and only
/// one payment prevents creating a valid transaction
#[cfg_attr(feature = "std", error("only one output and no change address"))]
NoChange,
/// Multiple addresses had payment IDs specified.
///
/// Only one payment ID is allowed per transaction.
#[cfg_attr(feature = "std", error("multiple addresses with payment IDs"))]
MultiplePaymentIds,
/// Too much arbitrary data was specified.
#[cfg_attr(feature = "std", error("too much data"))]
TooMuchArbitraryData,
/// The created transaction was too large.
#[cfg_attr(feature = "std", error("too large of a transaction"))]
TooLargeTransaction,
/// This transaction could not pay for itself.
#[cfg_attr(
feature = "std",
error(
"not enough funds (inputs {inputs}, outputs {outputs}, necessary_fee {necessary_fee:?})"
)
)]
NotEnoughFunds {
/// The amount of funds the inputs contributed.
inputs: u64,
/// The amount of funds the outputs required.
outputs: u64,
/// The fee necessary to be paid on top.
///
/// If this is None, it is because the fee was not calculated as the outputs alone caused this
/// error.
necessary_fee: Option<u64>,
},
/// This transaction is being signed with the wrong private key.
#[cfg_attr(feature = "std", error("wrong spend private key"))]
WrongPrivateKey,
/// This transaction was read from a bytestream which was malicious.
#[cfg_attr(
feature = "std",
error("this SignableTransaction was created by deserializing a malicious serialization")
)]
MaliciousSerialization,
/// There was an error when working with the CLSAGs.
#[cfg_attr(feature = "std", error("clsag error ({0})"))]
ClsagError(ClsagError),
/// There was an error when working with FROST.
#[cfg(feature = "multisig")]
#[cfg_attr(feature = "std", error("frost error {0}"))]
FrostError(FrostError),
}
/// A signable transaction.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct SignableTransaction {
rct_type: RctType,
outgoing_view_key: Zeroizing<[u8; 32]>,
inputs: Vec<OutputWithDecoys>,
payments: Vec<InternalPayment>,
data: Vec<Vec<u8>>,
fee_rate: FeeRate,
}
struct SignableTransactionWithKeyImages {
intent: SignableTransaction,
key_images: Vec<EdwardsPoint>,
}
impl SignableTransaction {
fn validate(&self) -> Result<(), SendError> {
match self.rct_type {
RctType::ClsagBulletproof | RctType::ClsagBulletproofPlus => {}
_ => Err(SendError::UnsupportedRctType)?,
}
if self.inputs.is_empty() {
Err(SendError::NoInputs)?;
}
for input in &self.inputs {
if input.decoys().len() !=
match self.rct_type {
RctType::ClsagBulletproof => 11,
RctType::ClsagBulletproofPlus => 16,
_ => panic!("unsupported RctType"),
}
{
Err(SendError::InvalidDecoyQuantity)?;
}
}
// Check we have at least one non-change output
if !self.payments.iter().any(|payment| matches!(payment, InternalPayment::Payment(_, _))) {
Err(SendError::NoOutputs)?;
}
// If we don't have at least two outputs, as required by Monero, error
if self.payments.len() < 2 {
Err(SendError::NoChange)?;
}
// Check we don't have multiple Change outputs due to decoding a malicious serialization
{
let mut change_count = 0;
for payment in &self.payments {
change_count += usize::from(u8::from(matches!(payment, InternalPayment::Change(_))));
}
if change_count > 1 {
Err(SendError::MaliciousSerialization)?;
}
}
// Make sure there's at most one payment ID
{
let mut payment_ids = 0;
for payment in &self.payments {
payment_ids += usize::from(u8::from(payment.address().payment_id().is_some()));
}
if payment_ids > 1 {
Err(SendError::MultiplePaymentIds)?;
}
}
if self.payments.len() > MAX_COMMITMENTS {
Err(SendError::TooManyOutputs)?;
}
// Check the length of each arbitrary data
for part in &self.data {
if part.len() > MAX_ARBITRARY_DATA_SIZE {
Err(SendError::TooMuchArbitraryData)?;
}
}
// Check the length of TX extra
// https://github.com/monero-project/monero/pull/8733
const MAX_EXTRA_SIZE: usize = 1060;
if self.extra().len() > MAX_EXTRA_SIZE {
Err(SendError::TooMuchArbitraryData)?;
}
// Make sure we have enough funds
let in_amount = self.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
let payments_amount = self
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(amount),
InternalPayment::Change(_) => None,
})
.sum::<u64>();
let (weight, necessary_fee) = self.weight_and_necessary_fee();
if in_amount < (payments_amount + necessary_fee) {
Err(SendError::NotEnoughFunds {
inputs: in_amount,
outputs: payments_amount,
necessary_fee: Some(necessary_fee),
})?;
}
// The limit is half the no-penalty block size
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/wallet/wallet2.cpp#L110766-L11085
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_config.h#L61
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_config.h#L64
const MAX_TX_SIZE: usize = (300_000 / 2) - 600;
if weight >= MAX_TX_SIZE {
Err(SendError::TooLargeTransaction)?;
}
Ok(())
}
/// Create a new SignableTransaction.
///
/// `outgoing_view_key` is used to seed the RNGs for this transaction. Anyone with knowledge of
/// the outgoing view key will be able to identify a transaction produced with this methodology,
/// and the data within it. Accordingly, it must be treated as a private key.
///
/// `data` represents arbitrary data which will be embedded into the transaction's `extra` field.
/// The embedding occurs using an `ExtraField::Nonce` with a custom marker byte (as to not
/// conflict with a payment ID).
pub fn new(
rct_type: RctType,
outgoing_view_key: Zeroizing<[u8; 32]>,
inputs: Vec<OutputWithDecoys>,
payments: Vec<(MoneroAddress, u64)>,
change: Change,
data: Vec<Vec<u8>>,
fee_rate: FeeRate,
) -> Result<SignableTransaction, SendError> {
// Re-format the payments and change into a consolidated payments list
let mut payments = payments
.into_iter()
.map(|(addr, amount)| InternalPayment::Payment(addr, amount))
.collect::<Vec<_>>();
if let Some(change) = change.0 {
payments.push(InternalPayment::Change(change));
}
let mut res =
SignableTransaction { rct_type, outgoing_view_key, inputs, payments, data, fee_rate };
res.validate()?;
// Shuffle the payments
{
let mut rng = res.seeded_rng(b"shuffle_payments");
res.payments.shuffle(&mut rng);
}
Ok(res)
}
/// The fee rate this transaction uses.
pub fn fee_rate(&self) -> FeeRate {
self.fee_rate
}
/// The fee this transaction requires.
///
/// This is distinct from the fee this transaction will use. If no change output is specified,
/// all unspent coins will be shunted to the fee.
pub fn necessary_fee(&self) -> u64 {
self.weight_and_necessary_fee().1
}
/// Write a SignableTransaction.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
fn write_payment<W: io::Write>(payment: &InternalPayment, w: &mut W) -> io::Result<()> {
match payment {
InternalPayment::Payment(addr, amount) => {
w.write_all(&[0])?;
write_vec(write_byte, addr.to_string().as_bytes(), w)?;
w.write_all(&amount.to_le_bytes())
}
InternalPayment::Change(change) => match change {
ChangeEnum::AddressOnly(addr) => {
w.write_all(&[1])?;
write_vec(write_byte, addr.to_string().as_bytes(), w)
}
ChangeEnum::Standard { view_pair, subaddress } => {
w.write_all(&[2])?;
write_point(&view_pair.spend(), w)?;
write_scalar(&view_pair.view, w)?;
if let Some(subaddress) = subaddress {
w.write_all(&subaddress.account().to_le_bytes())?;
w.write_all(&subaddress.address().to_le_bytes())
} else {
w.write_all(&0u32.to_le_bytes())?;
w.write_all(&0u32.to_le_bytes())
}
}
ChangeEnum::Guaranteed { view_pair, subaddress } => {
w.write_all(&[3])?;
write_point(&view_pair.spend(), w)?;
write_scalar(&view_pair.0.view, w)?;
if let Some(subaddress) = subaddress {
w.write_all(&subaddress.account().to_le_bytes())?;
w.write_all(&subaddress.address().to_le_bytes())
} else {
w.write_all(&0u32.to_le_bytes())?;
w.write_all(&0u32.to_le_bytes())
}
}
},
}
}
write_byte(&u8::from(self.rct_type), w)?;
w.write_all(self.outgoing_view_key.as_slice())?;
write_vec(OutputWithDecoys::write, &self.inputs, w)?;
write_vec(write_payment, &self.payments, w)?;
write_vec(|data, w| write_vec(write_byte, data, w), &self.data, w)?;
self.fee_rate.write(w)
}
/// Serialize the SignableTransaction to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(256);
self.write(&mut buf).unwrap();
buf
}
/// Read a `SignableTransaction`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<SignableTransaction> {
fn read_address<R: io::Read>(r: &mut R) -> io::Result<MoneroAddress> {
String::from_utf8(read_vec(read_byte, r)?)
.ok()
.and_then(|str| MoneroAddress::from_str_with_unchecked_network(&str).ok())
.ok_or_else(|| io::Error::other("invalid address"))
}
fn read_payment<R: io::Read>(r: &mut R) -> io::Result<InternalPayment> {
Ok(match read_byte(r)? {
0 => InternalPayment::Payment(read_address(r)?, read_u64(r)?),
1 => InternalPayment::Change(ChangeEnum::AddressOnly(read_address(r)?)),
2 => InternalPayment::Change(ChangeEnum::Standard {
view_pair: ViewPair::new(read_point(r)?, Zeroizing::new(read_scalar(r)?))
.map_err(io::Error::other)?,
subaddress: SubaddressIndex::new(read_u32(r)?, read_u32(r)?),
}),
3 => InternalPayment::Change(ChangeEnum::Guaranteed {
view_pair: GuaranteedViewPair::new(read_point(r)?, Zeroizing::new(read_scalar(r)?))
.map_err(io::Error::other)?,
subaddress: SubaddressIndex::new(read_u32(r)?, read_u32(r)?),
}),
_ => Err(io::Error::other("invalid payment"))?,
})
}
let res = SignableTransaction {
rct_type: RctType::try_from(read_byte(r)?)
.map_err(|()| io::Error::other("unsupported/invalid RctType"))?,
outgoing_view_key: Zeroizing::new(read_bytes(r)?),
inputs: read_vec(OutputWithDecoys::read, r)?,
payments: read_vec(read_payment, r)?,
data: read_vec(|r| read_vec(read_byte, r), r)?,
fee_rate: FeeRate::read(r)?,
};
match res.validate() {
Ok(()) => {}
Err(e) => Err(io::Error::other(e))?,
}
Ok(res)
}
fn with_key_images(mut self, key_images: Vec<EdwardsPoint>) -> SignableTransactionWithKeyImages {
debug_assert_eq!(self.inputs.len(), key_images.len());
// Sort the inputs by their key images
let mut sorted_inputs = self.inputs.into_iter().zip(key_images).collect::<Vec<_>>();
sorted_inputs
.sort_by(|(_, key_image_a), (_, key_image_b)| key_image_sort(key_image_a, key_image_b));
self.inputs = Vec::with_capacity(sorted_inputs.len());
let mut key_images = Vec::with_capacity(sorted_inputs.len());
for (input, key_image) in sorted_inputs {
self.inputs.push(input);
key_images.push(key_image);
}
SignableTransactionWithKeyImages { intent: self, key_images }
}
/// Sign this transaction.
pub fn sign(
self,
rng: &mut (impl RngCore + CryptoRng),
sender_spend_key: &Zeroizing<Scalar>,
) -> Result<Transaction, SendError> {
// Calculate the key images
let mut key_images = vec![];
for input in &self.inputs {
let input_key = Zeroizing::new(sender_spend_key.deref() + input.key_offset());
if (input_key.deref() * ED25519_BASEPOINT_TABLE) != input.key() {
Err(SendError::WrongPrivateKey)?;
}
let key_image = input_key.deref() * hash_to_point(input.key().compress().to_bytes());
key_images.push(key_image);
}
// Convert to a SignableTransactionWithKeyImages
let tx = self.with_key_images(key_images);
// Prepare the CLSAG signatures
let mut clsag_signs = Vec::with_capacity(tx.intent.inputs.len());
for input in &tx.intent.inputs {
// Re-derive the input key as this will be in a different order
let input_key = Zeroizing::new(sender_spend_key.deref() + input.key_offset());
clsag_signs.push((
input_key,
ClsagContext::new(input.decoys().clone(), input.commitment().clone())
.map_err(SendError::ClsagError)?,
));
}
// Get the output commitments' mask sum
let mask_sum = tx.intent.sum_output_masks(&tx.key_images);
// Get the actual TX, just needing the CLSAGs
let mut tx = tx.transaction_without_signatures();
// Sign the CLSAGs
let clsags_and_pseudo_outs =
Clsag::sign(rng, clsag_signs, mask_sum, tx.signature_hash().unwrap())
.map_err(SendError::ClsagError)?;
// Fill in the CLSAGs/pseudo-outs
let inputs_len = tx.prefix().inputs.len();
let Transaction::V2 {
proofs:
Some(RctProofs {
prunable: RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. },
..
}),
..
} = tx
else {
panic!("not signing clsag?")
};
*clsags = Vec::with_capacity(inputs_len);
*pseudo_outs = Vec::with_capacity(inputs_len);
for (clsag, pseudo_out) in clsags_and_pseudo_outs {
clsags.push(clsag);
pseudo_outs.push(pseudo_out);
}
// Return the signed TX
Ok(tx)
}
}

View File

@@ -0,0 +1,304 @@
use std_shims::{
vec::Vec,
io::{self, Read},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use group::ff::Field;
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
use dalek_ff_group as dfg;
use transcript::{Transcript, RecommendedTranscript};
use frost::{
curve::Ed25519,
Participant, FrostError, ThresholdKeys,
dkg::lagrange,
sign::{
Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine,
AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
},
};
use monero_serai::{
ringct::{
clsag::{ClsagContext, ClsagMultisigMaskSender, ClsagAddendum, ClsagMultisig},
RctPrunable, RctProofs,
},
transaction::Transaction,
};
use crate::send::{SendError, SignableTransaction, key_image_sort};
/// Initial FROST machine to produce a signed transaction.
pub struct TransactionMachine {
signable: SignableTransaction,
i: Participant,
// The key image generator, and the scalar offset from the spend key
key_image_generators_and_offsets: Vec<(EdwardsPoint, Scalar)>,
clsags: Vec<(ClsagMultisigMaskSender, AlgorithmMachine<Ed25519, ClsagMultisig>)>,
}
/// Second FROST machine to produce a signed transaction.
pub struct TransactionSignMachine {
signable: SignableTransaction,
i: Participant,
key_image_generators_and_offsets: Vec<(EdwardsPoint, Scalar)>,
clsags: Vec<(ClsagMultisigMaskSender, AlgorithmSignMachine<Ed25519, ClsagMultisig>)>,
our_preprocess: Vec<Preprocess<Ed25519, ClsagAddendum>>,
}
/// Final FROST machine to produce a signed transaction.
pub struct TransactionSignatureMachine {
tx: Transaction,
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
}
impl SignableTransaction {
/// Create a FROST signing machine out of this signable transaction.
pub fn multisig(self, keys: &ThresholdKeys<Ed25519>) -> Result<TransactionMachine, SendError> {
let mut clsags = vec![];
let mut key_image_generators_and_offsets = vec![];
for input in &self.inputs {
// Check this is the right set of keys
let offset = keys.offset(dfg::Scalar(input.key_offset()));
if offset.group_key().0 != input.key() {
Err(SendError::WrongPrivateKey)?;
}
let context = ClsagContext::new(input.decoys().clone(), input.commitment().clone())
.map_err(SendError::ClsagError)?;
let (clsag, clsag_mask_send) = ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Multisignature Transaction"),
context,
);
key_image_generators_and_offsets.push((
clsag.key_image_generator(),
keys.current_offset().unwrap_or(dfg::Scalar::ZERO).0 + input.key_offset(),
));
clsags.push((clsag_mask_send, AlgorithmMachine::new(clsag, offset)));
}
Ok(TransactionMachine {
signable: self,
i: keys.params().i(),
key_image_generators_and_offsets,
clsags,
})
}
}
impl PreprocessMachine for TransactionMachine {
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
type Signature = Transaction;
type SignMachine = TransactionSignMachine;
fn preprocess<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
) -> (TransactionSignMachine, Self::Preprocess) {
// Iterate over each CLSAG calling preprocess
let mut preprocesses = Vec::with_capacity(self.clsags.len());
let clsags = self
.clsags
.drain(..)
.map(|(clsag_mask_send, clsag)| {
let (clsag, preprocess) = clsag.preprocess(rng);
preprocesses.push(preprocess);
(clsag_mask_send, clsag)
})
.collect();
let our_preprocess = preprocesses.clone();
(
TransactionSignMachine {
signable: self.signable,
i: self.i,
key_image_generators_and_offsets: self.key_image_generators_and_offsets,
clsags,
our_preprocess,
},
preprocesses,
)
}
}
impl SignMachine<Transaction> for TransactionSignMachine {
type Params = ();
type Keys = ThresholdKeys<Ed25519>;
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
type SignatureShare = Vec<SignatureShare<Ed25519>>;
type SignatureMachine = TransactionSignatureMachine;
fn cache(self) -> CachedPreprocess {
unimplemented!(
"Monero transactions don't support caching their preprocesses due to {}",
"being already bound to a specific transaction"
);
}
fn from_cache(
(): (),
_: ThresholdKeys<Ed25519>,
_: CachedPreprocess,
) -> (Self, Self::Preprocess) {
unimplemented!(
"Monero transactions don't support caching their preprocesses due to {}",
"being already bound to a specific transaction"
);
}
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.clsags.iter().map(|clsag| clsag.1.read_preprocess(reader)).collect()
}
fn sign(
self,
mut commitments: HashMap<Participant, Self::Preprocess>,
msg: &[u8],
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
panic!("message was passed to the TransactionMachine when it generates its own");
}
// We do not need to be included here, yet this set of signers has yet to be validated
// We explicitly remove ourselves to ensure we aren't included twice, if we were redundantly
// included
commitments.remove(&self.i);
// Find out who's included
let mut included = commitments.keys().copied().collect::<Vec<_>>();
// This push won't duplicate due to the above removal
included.push(self.i);
// unstable sort may reorder elements of equal order
// Given our lack of duplicates, we should have no elements of equal order
included.sort_unstable();
// Start calculating the key images, as needed on the TX level
let mut key_images = vec![EdwardsPoint::identity(); self.clsags.len()];
for (image, (generator, offset)) in
key_images.iter_mut().zip(&self.key_image_generators_and_offsets)
{
*image = generator * offset;
}
// Convert the serialized nonces commitments to a parallelized Vec
let mut commitments = (0 .. self.clsags.len())
.map(|c| {
included
.iter()
.map(|l| {
let preprocess = if *l == self.i {
self.our_preprocess[c].clone()
} else {
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone()
};
// While here, calculate the key image as needed to call sign
// The CLSAG algorithm will independently calculate the key image/verify these shares
key_images[c] +=
preprocess.addendum.key_image_share().0 * lagrange::<dfg::Scalar>(*l, &included).0;
Ok((*l, preprocess))
})
.collect::<Result<HashMap<_, _>, _>>()
})
.collect::<Result<Vec<_>, _>>()?;
// The above inserted our own preprocess into these maps (which is unnecessary)
// Remove it now
for map in &mut commitments {
map.remove(&self.i);
}
// The actual TX will have sorted its inputs by key image
// We apply the same sort now to our CLSAG machines
let mut clsags = Vec::with_capacity(self.clsags.len());
for ((key_image, clsag), commitments) in key_images.iter().zip(self.clsags).zip(commitments) {
clsags.push((key_image, clsag, commitments));
}
clsags.sort_by(|x, y| key_image_sort(x.0, y.0));
let clsags =
clsags.into_iter().map(|(_, clsag, commitments)| (clsag, commitments)).collect::<Vec<_>>();
// Specify the TX's key images
let tx = self.signable.with_key_images(key_images);
// We now need to decide the masks for each CLSAG
let clsag_len = clsags.len();
let output_masks = tx.intent.sum_output_masks(&tx.key_images);
let mut rng = tx.intent.seeded_rng(b"multisig_pseudo_out_masks");
let mut sum_pseudo_outs = Scalar::ZERO;
let mut to_sign = Vec::with_capacity(clsag_len);
for (i, ((clsag_mask_send, clsag), commitments)) in clsags.into_iter().enumerate() {
let mut mask = Scalar::random(&mut rng);
if i == (clsag_len - 1) {
mask = output_masks - sum_pseudo_outs;
} else {
sum_pseudo_outs += mask;
}
clsag_mask_send.send(mask);
to_sign.push((clsag, commitments));
}
let tx = tx.transaction_without_signatures();
let msg = tx.signature_hash().unwrap();
// Iterate over each CLSAG calling sign
let mut shares = Vec::with_capacity(to_sign.len());
let clsags = to_sign
.drain(..)
.map(|(clsag, commitments)| {
let (clsag, share) = clsag.sign(commitments, &msg)?;
shares.push(share);
Ok(clsag)
})
.collect::<Result<_, _>>()?;
Ok((TransactionSignatureMachine { tx, clsags }, shares))
}
}
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
type SignatureShare = Vec<SignatureShare<Ed25519>>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect()
}
fn complete(
mut self,
shares: HashMap<Participant, Self::SignatureShare>,
) -> Result<Transaction, FrostError> {
let mut tx = self.tx;
match tx {
Transaction::V2 {
proofs:
Some(RctProofs {
prunable: RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. },
..
}),
..
} => {
for (c, clsag) in self.clsags.drain(..).enumerate() {
let (clsag, pseudo_out) = clsag.complete(
shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::<HashMap<_, _>>(),
)?;
clsags.push(clsag);
pseudo_outs.push(pseudo_out);
}
}
_ => unreachable!("attempted to sign a multisig TX which wasn't CLSAG"),
}
Ok(tx)
}
}

View File

@@ -0,0 +1,323 @@
use std_shims::{vec, vec::Vec};
use curve25519_dalek::{
constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE},
Scalar, EdwardsPoint,
};
use crate::{
io::{varint_len, write_varint},
primitives::Commitment,
ringct::{
clsag::Clsag, bulletproofs::Bulletproof, EncryptedAmount, RctType, RctBase, RctPrunable,
RctProofs,
},
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
extra::{ARBITRARY_DATA_MARKER, PaymentId, Extra},
send::{InternalPayment, SignableTransaction, SignableTransactionWithKeyImages},
};
impl SignableTransaction {
// Output the inputs for this transaction.
pub(crate) fn inputs(&self, key_images: &[EdwardsPoint]) -> Vec<Input> {
debug_assert_eq!(self.inputs.len(), key_images.len());
let mut res = Vec::with_capacity(self.inputs.len());
for (input, key_image) in self.inputs.iter().zip(key_images) {
res.push(Input::ToKey {
amount: None,
key_offsets: input.decoys().offsets().to_vec(),
key_image: *key_image,
});
}
res
}
// Output the outputs for this transaction.
pub(crate) fn outputs(&self, key_images: &[EdwardsPoint]) -> Vec<Output> {
let shared_key_derivations = self.shared_key_derivations(key_images);
debug_assert_eq!(self.payments.len(), shared_key_derivations.len());
let mut res = Vec::with_capacity(self.payments.len());
for (payment, shared_key_derivations) in self.payments.iter().zip(&shared_key_derivations) {
let key =
(&shared_key_derivations.shared_key * ED25519_BASEPOINT_TABLE) + payment.address().spend();
res.push(Output {
key: key.compress(),
amount: None,
view_tag: (match self.rct_type {
RctType::ClsagBulletproof => false,
RctType::ClsagBulletproofPlus => true,
_ => panic!("unsupported RctType"),
})
.then_some(shared_key_derivations.view_tag),
});
}
res
}
// Calculate the TX extra for this transaction.
pub(crate) fn extra(&self) -> Vec<u8> {
let (tx_key, additional_keys) = self.transaction_keys_pub();
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
let payment_id_xors = self.payment_id_xors();
debug_assert_eq!(self.payments.len(), payment_id_xors.len());
let amount_of_keys = 1 + additional_keys.len();
let mut extra = Extra::new(tx_key, additional_keys);
if let Some((id, id_xor)) =
self.payments.iter().zip(&payment_id_xors).find_map(|(payment, payment_id_xor)| {
payment.address().payment_id().map(|id| (id, payment_id_xor))
})
{
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
let mut id_vec = Vec::with_capacity(1 + 8);
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
extra.push_nonce(id_vec);
} else {
// If there's no payment ID, we push a dummy (as wallet2 does) if there's only one payment
if (self.payments.len() == 2) &&
self.payments.iter().any(|payment| matches!(payment, InternalPayment::Change(_)))
{
let (_, payment_id_xor) = self
.payments
.iter()
.zip(&payment_id_xors)
.find(|(payment, _)| matches!(payment, InternalPayment::Payment(_, _)))
.expect("multiple change outputs?");
let mut id_vec = Vec::with_capacity(1 + 8);
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap();
extra.push_nonce(id_vec);
}
}
// Include data if present
for part in &self.data {
let mut arb = vec![ARBITRARY_DATA_MARKER];
arb.extend(part);
extra.push_nonce(arb);
}
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
extra.write(&mut serialized).unwrap();
serialized
}
pub(crate) fn weight_and_necessary_fee(&self) -> (usize, u64) {
/*
This transaction is variable length to:
- The decoy offsets (fixed)
- The TX extra (variable to key images, requiring an interactive protocol)
Thankfully, the TX extra *length* is fixed. Accordingly, we can calculate the inevitable TX's
weight at this time with a shimmed transaction.
*/
let base_weight = {
let mut key_images = Vec::with_capacity(self.inputs.len());
let mut clsags = Vec::with_capacity(self.inputs.len());
let mut pseudo_outs = Vec::with_capacity(self.inputs.len());
for _ in &self.inputs {
key_images.push(ED25519_BASEPOINT_POINT);
clsags.push(Clsag {
D: ED25519_BASEPOINT_POINT,
s: vec![
Scalar::ZERO;
match self.rct_type {
RctType::ClsagBulletproof => 11,
RctType::ClsagBulletproofPlus => 16,
_ => unreachable!("unsupported RCT type"),
}
],
c1: Scalar::ZERO,
});
pseudo_outs.push(ED25519_BASEPOINT_POINT);
}
let mut encrypted_amounts = Vec::with_capacity(self.payments.len());
let mut bp_commitments = Vec::with_capacity(self.payments.len());
let mut commitments = Vec::with_capacity(self.payments.len());
for _ in &self.payments {
encrypted_amounts.push(EncryptedAmount::Compact { amount: [0; 8] });
bp_commitments.push(Commitment::zero());
commitments.push(ED25519_BASEPOINT_POINT);
}
let padded_log2 = {
let mut log2_find = 0;
while (1 << log2_find) < self.payments.len() {
log2_find += 1;
}
log2_find
};
// This is log2 the padded amount of IPA rows
// We have 64 rows per commitment, so we need 64 * c IPA rows
// We rewrite this as 2**6 * c
// By finding the padded log2 of c, we get 2**6 * 2**p
// This declares the log2 to be 6 + p
let lr_len = 6 + padded_log2;
let bulletproof = match self.rct_type {
RctType::ClsagBulletproof => {
let mut bp = Vec::with_capacity(((9 + (2 * lr_len)) * 32) + 2);
let push_point = |bp: &mut Vec<u8>| {
bp.push(1);
bp.extend([0; 31]);
};
let push_scalar = |bp: &mut Vec<u8>| bp.extend([0; 32]);
for _ in 0 .. 4 {
push_point(&mut bp);
}
for _ in 0 .. 2 {
push_scalar(&mut bp);
}
for _ in 0 .. 2 {
write_varint(&lr_len, &mut bp).unwrap();
for _ in 0 .. lr_len {
push_point(&mut bp);
}
}
for _ in 0 .. 3 {
push_scalar(&mut bp);
}
Bulletproof::read(&mut bp.as_slice()).expect("made an invalid dummy BP")
}
RctType::ClsagBulletproofPlus => {
let mut bp = Vec::with_capacity(((6 + (2 * lr_len)) * 32) + 2);
let push_point = |bp: &mut Vec<u8>| {
bp.push(1);
bp.extend([0; 31]);
};
let push_scalar = |bp: &mut Vec<u8>| bp.extend([0; 32]);
for _ in 0 .. 3 {
push_point(&mut bp);
}
for _ in 0 .. 3 {
push_scalar(&mut bp);
}
for _ in 0 .. 2 {
write_varint(&lr_len, &mut bp).unwrap();
for _ in 0 .. lr_len {
push_point(&mut bp);
}
}
Bulletproof::read_plus(&mut bp.as_slice()).expect("made an invalid dummy BP+")
}
_ => panic!("unsupported RctType"),
};
// `- 1` to remove the one byte for the 0 fee
Transaction::V2 {
prefix: TransactionPrefix {
additional_timelock: Timelock::None,
inputs: self.inputs(&key_images),
outputs: self.outputs(&key_images),
extra: self.extra(),
},
proofs: Some(RctProofs {
base: RctBase { fee: 0, encrypted_amounts, pseudo_outs: vec![], commitments },
prunable: RctPrunable::Clsag { bulletproof, clsags, pseudo_outs },
}),
}
.weight() -
1
};
// We now have the base weight, without the fee encoded
// The fee itself will impact the weight as its encoding is [1, 9] bytes long
let mut possible_weights = Vec::with_capacity(9);
for i in 1 ..= 9 {
possible_weights.push(base_weight + i);
}
debug_assert_eq!(possible_weights.len(), 9);
// We now calculate the fee which would be used for each weight
let mut possible_fees = Vec::with_capacity(9);
for weight in possible_weights {
possible_fees.push(self.fee_rate.calculate_fee_from_weight(weight));
}
// We now look for the fee whose length matches the length used to derive it
let mut weight_and_fee = None;
for (fee_len, possible_fee) in possible_fees.into_iter().enumerate() {
let fee_len = 1 + fee_len;
debug_assert!(1 <= fee_len);
debug_assert!(fee_len <= 9);
// We use the first fee whose encoded length is not larger than the length used within this
// weight
// This should be because the lengths are equal, yet means if somehow none are equal, this
// will still terminate successfully
if varint_len(possible_fee) <= fee_len {
weight_and_fee = Some((base_weight + fee_len, possible_fee));
break;
}
}
weight_and_fee.unwrap()
}
}
impl SignableTransactionWithKeyImages {
pub(crate) fn transaction_without_signatures(&self) -> Transaction {
let commitments_and_encrypted_amounts =
self.intent.commitments_and_encrypted_amounts(&self.key_images);
let mut commitments = Vec::with_capacity(self.intent.payments.len());
let mut bp_commitments = Vec::with_capacity(self.intent.payments.len());
let mut encrypted_amounts = Vec::with_capacity(self.intent.payments.len());
for (commitment, encrypted_amount) in commitments_and_encrypted_amounts {
commitments.push(commitment.calculate());
bp_commitments.push(commitment);
encrypted_amounts.push(encrypted_amount);
}
let bulletproof = {
let mut bp_rng = self.intent.seeded_rng(b"bulletproof");
(match self.intent.rct_type {
RctType::ClsagBulletproof => Bulletproof::prove(&mut bp_rng, bp_commitments),
RctType::ClsagBulletproofPlus => Bulletproof::prove_plus(&mut bp_rng, bp_commitments),
_ => panic!("unsupported RctType"),
})
.expect("couldn't prove BP(+)s for this many payments despite checking in constructor?")
};
Transaction::V2 {
prefix: TransactionPrefix {
additional_timelock: Timelock::None,
inputs: self.intent.inputs(&self.key_images),
outputs: self.intent.outputs(&self.key_images),
extra: self.intent.extra(),
},
proofs: Some(RctProofs {
base: RctBase {
fee: if self
.intent
.payments
.iter()
.any(|payment| matches!(payment, InternalPayment::Change(_)))
{
// The necessary fee is the fee
self.intent.weight_and_necessary_fee().1
} else {
// If we don't have a change output, the difference is the fee
let inputs =
self.intent.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
let payments = self
.intent
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(amount),
InternalPayment::Change(_) => None,
})
.sum::<u64>();
// Safe since the constructor checks inputs >= (payments + fee)
inputs - payments
},
encrypted_amounts,
pseudo_outs: vec![],
commitments,
},
prunable: RctPrunable::Clsag { bulletproof, clsags: vec![], pseudo_outs: vec![] },
}),
}
}
}

View File

@@ -0,0 +1,246 @@
use core::ops::Deref;
use std_shims::{vec, vec::Vec};
use zeroize::Zeroizing;
use rand_core::SeedableRng;
use rand_chacha::ChaCha20Rng;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
use crate::{
primitives::{keccak256, Commitment},
ringct::EncryptedAmount,
SharedKeyDerivations, OutputWithDecoys,
send::{ChangeEnum, InternalPayment, SignableTransaction, key_image_sort},
};
impl SignableTransaction {
pub(crate) fn seeded_rng(&self, dst: &'static [u8]) -> ChaCha20Rng {
// Apply the DST
let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]);
transcript.extend(dst);
// Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript
transcript.extend(self.outgoing_view_key.as_slice());
// Ensure uniqueness across transactions by binding to a use-once object
// The keys for the inputs is binding to their key images, making them use-once
let mut input_keys = self.inputs.iter().map(OutputWithDecoys::key).collect::<Vec<_>>();
// We sort the inputs mid-way through TX construction, so apply our own sort to ensure a
// consistent order
// We use the key image sort as it's applicable and well-defined, not because these are key
// images
input_keys.sort_by(key_image_sort);
for key in input_keys {
transcript.extend(key.compress().to_bytes());
}
ChaCha20Rng::from_seed(keccak256(&transcript))
}
fn has_payments_to_subaddresses(&self) -> bool {
self.payments.iter().any(|payment| match payment {
InternalPayment::Payment(addr, _) => addr.is_subaddress(),
InternalPayment::Change(change) => match change {
ChangeEnum::AddressOnly(addr) => addr.is_subaddress(),
// These aren't considered payments to subaddresses as we don't need to send to them as
// subaddresses
// We can calculate the shared key using the view key, as if we were receiving, instead
ChangeEnum::Standard { .. } | ChangeEnum::Guaranteed { .. } => false,
},
})
}
fn should_use_additional_keys(&self) -> bool {
let has_payments_to_subaddresses = self.has_payments_to_subaddresses();
if !has_payments_to_subaddresses {
return false;
}
let has_change_view = self.payments.iter().any(|payment| match payment {
InternalPayment::Payment(_, _) => false,
InternalPayment::Change(change) => match change {
ChangeEnum::AddressOnly(_) => false,
ChangeEnum::Standard { .. } | ChangeEnum::Guaranteed { .. } => true,
},
});
/*
If sending to a subaddress, the shared key is not `rG` yet `rB`. Because of this, a
per-subaddress shared key is necessary, causing the usage of additional keys.
The one exception is if we're sending to a subaddress in a 2-output transaction. The second
output, the change output, will attempt scanning the singular key `rB` with `v rB`. While we
cannot calculate `r vB` with just `r` (as that'd require `vB` when we presumably only have
`vG` when sending), since we do in fact have `v` (due to it being our own view key for our
change output), we can still calculate the shared secret.
*/
has_payments_to_subaddresses && !((self.payments.len() == 2) && has_change_view)
}
// Calculate the transaction keys used as randomness.
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
let mut rng = self.seeded_rng(b"transaction_keys");
let tx_key = Zeroizing::new(Scalar::random(&mut rng));
let mut additional_keys = vec![];
if self.should_use_additional_keys() {
for _ in 0 .. self.payments.len() {
additional_keys.push(Zeroizing::new(Scalar::random(&mut rng)));
}
}
(tx_key, additional_keys)
}
fn ecdhs(&self) -> Vec<Zeroizing<EdwardsPoint>> {
let (tx_key, additional_keys) = self.transaction_keys();
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
let (tx_key_pub, additional_keys_pub) = self.transaction_keys_pub();
debug_assert_eq!(additional_keys_pub.len(), additional_keys.len());
let mut res = Vec::with_capacity(self.payments.len());
for (i, payment) in self.payments.iter().enumerate() {
let addr = payment.address();
let key_to_use =
if addr.is_subaddress() { additional_keys.get(i).unwrap_or(&tx_key) } else { &tx_key };
let ecdh = match payment {
// If we don't have the view key, use the key dedicated for this address (r A)
InternalPayment::Payment(_, _) |
InternalPayment::Change(ChangeEnum::AddressOnly { .. }) => {
Zeroizing::new(key_to_use.deref() * addr.view())
}
// If we do have the view key, use the commitment to the key (a R)
InternalPayment::Change(ChangeEnum::Standard { view_pair, .. }) => {
Zeroizing::new(view_pair.view.deref() * tx_key_pub)
}
InternalPayment::Change(ChangeEnum::Guaranteed { view_pair, .. }) => {
Zeroizing::new(view_pair.0.view.deref() * tx_key_pub)
}
};
res.push(ecdh);
}
res
}
// Calculate the shared keys and the necessary derivations.
pub(crate) fn shared_key_derivations(
&self,
key_images: &[EdwardsPoint],
) -> Vec<Zeroizing<SharedKeyDerivations>> {
let ecdhs = self.ecdhs();
let uniqueness = SharedKeyDerivations::uniqueness(&self.inputs(key_images));
let mut res = Vec::with_capacity(self.payments.len());
for (i, (payment, ecdh)) in self.payments.iter().zip(ecdhs).enumerate() {
let addr = payment.address();
res.push(SharedKeyDerivations::output_derivations(
addr.is_guaranteed().then_some(uniqueness),
ecdh,
i,
));
}
res
}
// Calculate the payment ID XOR masks.
pub(crate) fn payment_id_xors(&self) -> Vec<[u8; 8]> {
let mut res = Vec::with_capacity(self.payments.len());
for ecdh in self.ecdhs() {
res.push(SharedKeyDerivations::payment_id_xor(ecdh));
}
res
}
// Calculate the transaction_keys' commitments.
//
// These depend on the payments. Commitments for payments to subaddresses use the spend key for
// the generator.
pub(crate) fn transaction_keys_pub(&self) -> (EdwardsPoint, Vec<EdwardsPoint>) {
let (tx_key, additional_keys) = self.transaction_keys();
debug_assert!(additional_keys.is_empty() || (additional_keys.len() == self.payments.len()));
// The single transaction key uses the subaddress's spend key as its generator
let has_payments_to_subaddresses = self.has_payments_to_subaddresses();
let should_use_additional_keys = self.should_use_additional_keys();
if has_payments_to_subaddresses && (!should_use_additional_keys) {
debug_assert_eq!(additional_keys.len(), 0);
let InternalPayment::Payment(addr, _) = self
.payments
.iter()
.find(|payment| matches!(payment, InternalPayment::Payment(_, _)))
.expect("payment to subaddress yet no payment")
else {
panic!("filtered payment wasn't a payment")
};
return (tx_key.deref() * addr.spend(), vec![]);
}
if should_use_additional_keys {
let mut additional_keys_pub = vec![];
for (additional_key, payment) in additional_keys.into_iter().zip(&self.payments) {
let addr = payment.address();
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/device/device_default.cpp#L308-L312
if addr.is_subaddress() {
additional_keys_pub.push(additional_key.deref() * addr.spend());
} else {
additional_keys_pub.push(additional_key.deref() * ED25519_BASEPOINT_TABLE)
}
}
return (tx_key.deref() * ED25519_BASEPOINT_TABLE, additional_keys_pub);
}
debug_assert!(!has_payments_to_subaddresses);
debug_assert!(!should_use_additional_keys);
(tx_key.deref() * ED25519_BASEPOINT_TABLE, vec![])
}
pub(crate) fn commitments_and_encrypted_amounts(
&self,
key_images: &[EdwardsPoint],
) -> Vec<(Commitment, EncryptedAmount)> {
let shared_key_derivations = self.shared_key_derivations(key_images);
let mut res = Vec::with_capacity(self.payments.len());
for (payment, shared_key_derivations) in self.payments.iter().zip(shared_key_derivations) {
let amount = match payment {
InternalPayment::Payment(_, amount) => *amount,
InternalPayment::Change(_) => {
let inputs = self.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
let payments = self
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(amount),
InternalPayment::Change(_) => None,
})
.sum::<u64>();
let necessary_fee = self.weight_and_necessary_fee().1;
// Safe since the constructor checked this TX has enough funds for itself
inputs - (payments + necessary_fee)
}
};
let commitment = Commitment::new(shared_key_derivations.commitment_mask(), amount);
let encrypted_amount = EncryptedAmount::Compact {
amount: shared_key_derivations.compact_amount_encryption(amount),
};
res.push((commitment, encrypted_amount));
}
res
}
pub(crate) fn sum_output_masks(&self, key_images: &[EdwardsPoint]) -> Scalar {
self
.commitments_and_encrypted_amounts(key_images)
.into_iter()
.map(|(commitment, _)| commitment.mask)
.sum()
}
}

View File

@@ -0,0 +1,197 @@
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use crate::{
io::write_varint,
extra::{MAX_TX_EXTRA_PADDING_COUNT, ExtraField, Extra},
};
// Tests derived from
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
// tests/unit_tests/test_tx_utils.cpp
// which is licensed
#[rustfmt::skip]
/*
Copyright (c) 2014-2022, The Monero Project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Parts of the project are originally copyright (c) 2012-2013 The Cryptonote
developers
Parts of the project are originally copyright (c) 2014 The Boolberry
developers, distributed under the MIT licence:
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
const PUB_KEY_BYTES: [u8; 33] = [
1, 30, 208, 98, 162, 133, 64, 85, 83, 112, 91, 188, 89, 211, 24, 131, 39, 154, 22, 228, 80, 63,
198, 141, 173, 111, 244, 183, 4, 149, 186, 140, 230,
];
fn pub_key() -> EdwardsPoint {
CompressedEdwardsY(PUB_KEY_BYTES[1 .. PUB_KEY_BYTES.len()].try_into().expect("invalid pub key"))
.decompress()
.unwrap()
}
fn test_write_buf(extra: &Extra, buf: &[u8]) {
let mut w: Vec<u8> = vec![];
Extra::write(extra, &mut w).unwrap();
assert_eq!(buf, w);
}
#[test]
fn empty_extra() {
let buf: Vec<u8> = vec![];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert!(extra.0.is_empty());
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_size_1() {
let buf: Vec<u8> = vec![0];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Padding(1)]);
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_size_2() {
let buf: Vec<u8> = vec![0, 0];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Padding(2)]);
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_max_size() {
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Padding(MAX_TX_EXTRA_PADDING_COUNT)]);
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_exceed_max_size() {
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT + 1];
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
}
#[test]
fn invalid_padding_only() {
let buf: Vec<u8> = vec![0, 42];
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
}
#[test]
fn pub_key_only() {
let buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_nonce_only() {
let buf: Vec<u8> = vec![2, 1, 42];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Nonce(vec![42])]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_nonce_only_wrong_size() {
let mut buf: Vec<u8> = vec![0; 20];
buf[0] = 2;
buf[1] = 255;
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
}
#[test]
fn pub_key_and_padding() {
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
buf.extend([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]);
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key()), ExtraField::Padding(76)]);
test_write_buf(&extra, &buf);
}
#[test]
fn pub_key_and_invalid_padding() {
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
buf.extend([0, 1]);
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
}
#[test]
fn extra_mysterious_minergate_only() {
let buf: Vec<u8> = vec![222, 1, 42];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![42])]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_mysterious_minergate_only_large() {
let mut buf: Vec<u8> = vec![222];
write_varint(&512u64, &mut buf).unwrap();
buf.extend_from_slice(&vec![0; 512]);
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![0; 512])]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_mysterious_minergate_only_wrong_size() {
let mut buf: Vec<u8> = vec![0; 20];
buf[0] = 222;
buf[1] = 255;
Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap_err();
}
#[test]
fn extra_mysterious_minergate_and_pub_key() {
let mut buf: Vec<u8> = vec![222, 1, 42];
buf.extend(PUB_KEY_BYTES.to_vec());
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(
extra.0,
vec![ExtraField::MysteriousMinergate(vec![42]), ExtraField::PublicKey(pub_key())]
);
test_write_buf(&extra, &buf);
}

View File

@@ -0,0 +1 @@
mod extra;

View File

@@ -0,0 +1,144 @@
use core::ops::Deref;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, Scalar, EdwardsPoint};
use crate::{
primitives::keccak256_to_scalar,
address::{Network, AddressType, SubaddressIndex, MoneroAddress},
};
/// An error while working with a ViewPair.
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum ViewPairError {
/// The spend key was torsioned.
///
/// Torsioned spend keys are of questionable spendability. This library avoids that question by
/// rejecting such ViewPairs.
// CLSAG seems to support it if the challenge does a torsion clear, FCMP++ should ship with a
// torsion clear, yet it's not worth it to modify CLSAG sign to generate challenges until the
// torsion clears and ensure spendability (nor can we reasonably guarantee that in the future)
#[cfg_attr(feature = "std", error("torsioned spend key"))]
TorsionedSpendKey,
}
/// The pair of keys necessary to scan transactions.
///
/// This is composed of the public spend key and the private view key.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct ViewPair {
spend: EdwardsPoint,
pub(crate) view: Zeroizing<Scalar>,
}
impl ViewPair {
/// Create a new ViewPair.
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> Result<Self, ViewPairError> {
if !spend.is_torsion_free() {
Err(ViewPairError::TorsionedSpendKey)?;
}
Ok(ViewPair { spend, view })
}
/// The public spend key for this ViewPair.
pub fn spend(&self) -> EdwardsPoint {
self.spend
}
/// The public view key for this ViewPair.
pub fn view(&self) -> EdwardsPoint {
self.view.deref() * ED25519_BASEPOINT_TABLE
}
pub(crate) fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
keccak256_to_scalar(Zeroizing::new(
[
b"SubAddr\0".as_ref(),
Zeroizing::new(self.view.to_bytes()).as_ref(),
&index.account().to_le_bytes(),
&index.address().to_le_bytes(),
]
.concat(),
))
}
pub(crate) fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
let scalar = self.subaddress_derivation(index);
let spend = self.spend + (&scalar * ED25519_BASEPOINT_TABLE);
let view = self.view.deref() * spend;
(spend, view)
}
/// Derive a legacy address from this ViewPair.
///
/// Subaddresses SHOULD be used instead.
pub fn legacy_address(&self, network: Network) -> MoneroAddress {
MoneroAddress::new(network, AddressType::Legacy, self.spend, self.view())
}
/// Derive a legacy integrated address from this ViewPair.
///
/// Subaddresses SHOULD be used instead.
pub fn legacy_integrated_address(&self, network: Network, payment_id: [u8; 8]) -> MoneroAddress {
MoneroAddress::new(network, AddressType::LegacyIntegrated(payment_id), self.spend, self.view())
}
/// Derive a subaddress from this ViewPair.
pub fn subaddress(&self, network: Network, subaddress: SubaddressIndex) -> MoneroAddress {
let (spend, view) = self.subaddress_keys(subaddress);
MoneroAddress::new(network, AddressType::Subaddress, spend, view)
}
}
/// The pair of keys necessary to scan outputs immune to the burning bug.
///
/// This is composed of the public spend key and a non-zero private view key.
///
/// 'Guaranteed' outputs, or transactions outputs to the burning bug, are not officially specified
/// by the Monero project. They should only be used if necessary. No support outside of
/// monero-wallet is promised.
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct GuaranteedViewPair(pub(crate) ViewPair);
impl GuaranteedViewPair {
/// Create a new GuaranteedViewPair.
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> Result<Self, ViewPairError> {
ViewPair::new(spend, view).map(GuaranteedViewPair)
}
/// The public spend key for this GuaranteedViewPair.
pub fn spend(&self) -> EdwardsPoint {
self.0.spend()
}
/// The public view key for this GuaranteedViewPair.
pub fn view(&self) -> EdwardsPoint {
self.0.view()
}
/// Returns an address with the provided specification.
///
/// The returned address will be a featured address with the guaranteed flag set. These should
/// not be presumed to be interoperable with any other software.
pub fn address(
&self,
network: Network,
subaddress: Option<SubaddressIndex>,
payment_id: Option<[u8; 8]>,
) -> MoneroAddress {
let (spend, view) = if let Some(index) = subaddress {
self.0.subaddress_keys(index)
} else {
(self.spend(), self.view())
};
MoneroAddress::new(
network,
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed: true },
spend,
view,
)
}
}

View File

@@ -0,0 +1,81 @@
use monero_serai::transaction::Transaction;
use monero_wallet::{rpc::Rpc, extra::MAX_ARBITRARY_DATA_SIZE, send::SendError};
mod runner;
test!(
add_single_data_less_than_max,
(
|_, mut builder: Builder, addr| async move {
let arbitrary_data = vec![b'\0'; MAX_ARBITRARY_DATA_SIZE - 1];
// make sure we can add to tx
builder.add_data(arbitrary_data.clone()).unwrap();
builder.add_payment(addr, 5);
(builder.build().unwrap(), (arbitrary_data,))
},
|rpc, block, tx: Transaction, mut scanner: Scanner, data: (Vec<u8>,)| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.arbitrary_data()[0], data.0);
},
),
);
test!(
add_multiple_data_less_than_max,
(
|_, mut builder: Builder, addr| async move {
let mut data = vec![];
for b in 1 ..= 3 {
data.push(vec![b; MAX_ARBITRARY_DATA_SIZE - 1]);
}
// Add data multiple times
for data in &data {
builder.add_data(data.clone()).unwrap();
}
builder.add_payment(addr, 5);
(builder.build().unwrap(), data)
},
|rpc, block, tx: Transaction, mut scanner: Scanner, data: Vec<Vec<u8>>| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.arbitrary_data(), data);
},
),
);
test!(
add_single_data_more_than_max,
(
|_, mut builder: Builder, addr| async move {
// Make a data that is bigger than the maximum
let mut data = vec![b'a'; MAX_ARBITRARY_DATA_SIZE + 1];
// Make sure we get an error if we try to add it to the TX
assert_eq!(builder.add_data(data.clone()), Err(SendError::TooMuchArbitraryData));
// Reduce data size and retry. The data will now be 255 bytes long (including the added
// marker), exactly
data.pop();
builder.add_data(data.clone()).unwrap();
builder.add_payment(addr, 5);
(builder.build().unwrap(), data)
},
|rpc, block, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.arbitrary_data(), vec![data]);
},
),
);

View File

@@ -0,0 +1,165 @@
use monero_simple_request_rpc::SimpleRequestRpc;
use monero_wallet::{
DEFAULT_LOCK_WINDOW,
transaction::Transaction,
rpc::{Rpc, DecoyRpc},
WalletOutput,
};
mod runner;
test!(
select_latest_output_as_decoy_canonical,
(
// First make an initial tx0
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 2000000000000);
output
},
),
(
// Then make a second tx1
|rct_type: RctType, rpc: SimpleRequestRpc, mut builder: Builder, addr, state: _| async move {
let output_tx0: WalletOutput = state;
let input = OutputWithDecoys::fingerprintable_deterministic_new(
&mut OsRng,
&rpc,
ring_len(rct_type),
rpc.get_height().await.unwrap(),
output_tx0.clone(),
)
.await
.unwrap();
builder.add_input(input);
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), (rct_type, output_tx0))
},
// Then make sure DSA selects freshly unlocked output from tx1 as a decoy
|rpc, _, tx: Transaction, _: Scanner, state: (_, _)| async move {
use rand_core::OsRng;
let rpc: SimpleRequestRpc = rpc;
let height = rpc.get_height().await.unwrap();
let most_recent_o_index = rpc.get_o_indexes(tx.hash()).await.unwrap().pop().unwrap();
// Make sure output from tx1 is in the block in which it unlocks
let out_tx1 = rpc.get_outs(&[most_recent_o_index]).await.unwrap().swap_remove(0);
assert_eq!(out_tx1.height, height - DEFAULT_LOCK_WINDOW);
assert!(out_tx1.unlocked);
// Select decoys using spendable output from tx0 as the real, and make sure DSA selects
// the freshly unlocked output from tx1 as a decoy
let (rct_type, output_tx0): (RctType, WalletOutput) = state;
let mut selected_fresh_decoy = false;
let mut attempts = 1000;
while !selected_fresh_decoy && attempts > 0 {
let decoys = OutputWithDecoys::fingerprintable_deterministic_new(
&mut OsRng, // TODO: use a seeded RNG to consistently select the latest output
&rpc,
ring_len(rct_type),
height,
output_tx0.clone(),
)
.await
.unwrap()
.decoys()
.clone();
selected_fresh_decoy = decoys.positions().contains(&most_recent_o_index);
attempts -= 1;
}
assert!(selected_fresh_decoy);
assert_eq!(height, rpc.get_height().await.unwrap());
},
),
);
test!(
select_latest_output_as_decoy,
(
// First make an initial tx0
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|rpc: SimpleRequestRpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 2000000000000);
output
},
),
(
// Then make a second tx1
|rct_type: RctType, rpc, mut builder: Builder, addr, output_tx0: WalletOutput| async move {
let rpc: SimpleRequestRpc = rpc;
let input = OutputWithDecoys::new(
&mut OsRng,
&rpc,
ring_len(rct_type),
rpc.get_height().await.unwrap(),
output_tx0.clone(),
)
.await
.unwrap();
builder.add_input(input);
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), (rct_type, output_tx0))
},
// Then make sure DSA selects freshly unlocked output from tx1 as a decoy
|rpc, _, tx: Transaction, _: Scanner, state: (_, _)| async move {
use rand_core::OsRng;
let rpc: SimpleRequestRpc = rpc;
let height = rpc.get_height().await.unwrap();
let most_recent_o_index = rpc.get_o_indexes(tx.hash()).await.unwrap().pop().unwrap();
// Make sure output from tx1 is in the block in which it unlocks
let out_tx1 = rpc.get_outs(&[most_recent_o_index]).await.unwrap().swap_remove(0);
assert_eq!(out_tx1.height, height - DEFAULT_LOCK_WINDOW);
assert!(out_tx1.unlocked);
// Select decoys using spendable output from tx0 as the real, and make sure DSA selects
// the freshly unlocked output from tx1 as a decoy
let (rct_type, output_tx0): (RctType, WalletOutput) = state;
let mut selected_fresh_decoy = false;
let mut attempts = 1000;
while !selected_fresh_decoy && attempts > 0 {
let decoys = OutputWithDecoys::new(
&mut OsRng, // TODO: use a seeded RNG to consistently select the latest output
&rpc,
ring_len(rct_type),
height,
output_tx0.clone(),
)
.await
.unwrap()
.decoys()
.clone();
selected_fresh_decoy = decoys.positions().contains(&most_recent_o_index);
attempts -= 1;
}
assert!(selected_fresh_decoy);
assert_eq!(height, rpc.get_height().await.unwrap());
},
),
);

View File

@@ -0,0 +1,80 @@
use curve25519_dalek::constants::ED25519_BASEPOINT_POINT;
use monero_serai::transaction::Transaction;
use monero_wallet::{
rpc::Rpc,
address::{AddressType, MoneroAddress},
};
mod runner;
test!(
eventuality,
(
|_, mut builder: Builder, _| async move {
// Add a standard address, a payment ID address, a subaddress, and a guaranteed address
// Each have their own slight implications to eventualities
builder.add_payment(
MoneroAddress::new(
Network::Mainnet,
AddressType::Legacy,
ED25519_BASEPOINT_POINT,
ED25519_BASEPOINT_POINT,
),
1,
);
builder.add_payment(
MoneroAddress::new(
Network::Mainnet,
AddressType::LegacyIntegrated([0xaa; 8]),
ED25519_BASEPOINT_POINT,
ED25519_BASEPOINT_POINT,
),
2,
);
builder.add_payment(
MoneroAddress::new(
Network::Mainnet,
AddressType::Subaddress,
ED25519_BASEPOINT_POINT,
ED25519_BASEPOINT_POINT,
),
3,
);
builder.add_payment(
MoneroAddress::new(
Network::Mainnet,
AddressType::Featured { subaddress: false, payment_id: None, guaranteed: true },
ED25519_BASEPOINT_POINT,
ED25519_BASEPOINT_POINT,
),
4,
);
let tx = builder.build().unwrap();
let eventuality = Eventuality::from(tx.clone());
assert_eq!(
eventuality,
Eventuality::read::<&[u8]>(&mut eventuality.serialize().as_ref()).unwrap()
);
(tx, eventuality)
},
|_, _, mut tx: Transaction, _, eventuality: Eventuality| async move {
// 4 explicitly outputs added and one change output
assert_eq!(tx.prefix().outputs.len(), 5);
// The eventuality's available extra should be the actual TX's
assert_eq!(tx.prefix().extra, eventuality.extra());
// The TX should match
assert!(eventuality.matches(&tx.clone().into()));
// Mutate the TX
let Transaction::V2 { proofs: Some(ref mut proofs), .. } = tx else {
panic!("TX wasn't RingCT")
};
proofs.base.commitments[0] += ED25519_BASEPOINT_POINT;
// Verify it no longer matches
assert!(!eventuality.matches(&tx.clone().into()));
},
),
);

View File

@@ -0,0 +1,82 @@
use zeroize::{Zeroize, Zeroizing};
use monero_wallet::{
ringct::RctType,
rpc::FeeRate,
address::MoneroAddress,
OutputWithDecoys,
send::{Change, SendError, SignableTransaction},
extra::MAX_ARBITRARY_DATA_SIZE,
};
/// A builder for Monero transactions.
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
pub struct SignableTransactionBuilder {
rct_type: RctType,
outgoing_view_key: Zeroizing<[u8; 32]>,
inputs: Vec<OutputWithDecoys>,
payments: Vec<(MoneroAddress, u64)>,
change: Change,
data: Vec<Vec<u8>>,
fee_rate: FeeRate,
}
impl SignableTransactionBuilder {
pub fn new(
rct_type: RctType,
outgoing_view_key: Zeroizing<[u8; 32]>,
change: Change,
fee_rate: FeeRate,
) -> Self {
Self {
rct_type,
outgoing_view_key,
inputs: vec![],
payments: vec![],
change,
data: vec![],
fee_rate,
}
}
pub fn add_input(&mut self, input: OutputWithDecoys) -> &mut Self {
self.inputs.push(input);
self
}
#[allow(unused)]
pub fn add_inputs(&mut self, inputs: &[OutputWithDecoys]) -> &mut Self {
self.inputs.extend(inputs.iter().cloned());
self
}
pub fn add_payment(&mut self, dest: MoneroAddress, amount: u64) -> &mut Self {
self.payments.push((dest, amount));
self
}
#[allow(unused)]
pub fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) -> &mut Self {
self.payments.extend(payments);
self
}
#[allow(unused)]
pub fn add_data(&mut self, data: Vec<u8>) -> Result<&mut Self, SendError> {
if data.len() > MAX_ARBITRARY_DATA_SIZE {
Err(SendError::TooMuchArbitraryData)?;
}
self.data.push(data);
Ok(self)
}
pub fn build(self) -> Result<SignableTransaction, SendError> {
SignableTransaction::new(
self.rct_type,
self.outgoing_view_key,
self.inputs,
self.payments,
self.change,
self.data,
self.fee_rate,
)
}
}

View File

@@ -0,0 +1,355 @@
use core::ops::Deref;
use std_shims::sync::OnceLock;
use zeroize::Zeroizing;
use rand_core::OsRng;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
use tokio::sync::Mutex;
use monero_simple_request_rpc::SimpleRequestRpc;
use monero_wallet::{
ringct::RctType,
transaction::Transaction,
block::Block,
rpc::{Rpc, FeeRate},
address::{Network, AddressType, MoneroAddress},
DEFAULT_LOCK_WINDOW, ViewPair, GuaranteedViewPair, WalletOutput, Scanner,
};
mod builder;
pub use builder::SignableTransactionBuilder;
pub fn ring_len(rct_type: RctType) -> usize {
match rct_type {
RctType::ClsagBulletproof => 11,
RctType::ClsagBulletproofPlus => 16,
_ => panic!("ring size unknown for RctType"),
}
}
pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) {
let spend = Scalar::random(&mut OsRng);
let spend_pub = &spend * ED25519_BASEPOINT_TABLE;
let view = Zeroizing::new(Scalar::random(&mut OsRng));
(
spend,
ViewPair::new(spend_pub, view.clone()).unwrap(),
MoneroAddress::new(
Network::Mainnet,
AddressType::Legacy,
spend_pub,
view.deref() * ED25519_BASEPOINT_TABLE,
),
)
}
#[allow(unused)]
pub fn random_guaranteed_address() -> (Scalar, GuaranteedViewPair, MoneroAddress) {
let spend = Scalar::random(&mut OsRng);
let spend_pub = &spend * ED25519_BASEPOINT_TABLE;
let view = Zeroizing::new(Scalar::random(&mut OsRng));
(
spend,
GuaranteedViewPair::new(spend_pub, view.clone()).unwrap(),
MoneroAddress::new(
Network::Mainnet,
AddressType::Legacy,
spend_pub,
view.deref() * ED25519_BASEPOINT_TABLE,
),
)
}
// TODO: Support transactions already on-chain
// TODO: Don't have a side effect of mining blocks more blocks than needed under race conditions
pub async fn mine_until_unlocked(
rpc: &SimpleRequestRpc,
addr: &MoneroAddress,
tx_hash: [u8; 32],
) -> Block {
// mine until tx is in a block
let mut height = rpc.get_height().await.unwrap();
let mut found = false;
let mut block = None;
while !found {
let inner_block = rpc.get_block_by_number(height - 1).await.unwrap();
found = match inner_block.transactions.iter().find(|&&x| x == tx_hash) {
Some(_) => {
block = Some(inner_block);
true
}
None => {
height = rpc.generate_blocks(addr, 1).await.unwrap().1 + 1;
false
}
}
}
// Mine until tx's outputs are unlocked
for _ in 0 .. (DEFAULT_LOCK_WINDOW - 1) {
rpc.generate_blocks(addr, 1).await.unwrap();
}
block.unwrap()
}
// Mines 60 blocks and returns an unlocked miner TX output.
#[allow(dead_code)]
pub async fn get_miner_tx_output(rpc: &SimpleRequestRpc, view: &ViewPair) -> WalletOutput {
let mut scanner = Scanner::new(view.clone());
// Mine 60 blocks to unlock a miner TX
let start = rpc.get_height().await.unwrap();
rpc.generate_blocks(&view.legacy_address(Network::Mainnet), 60).await.unwrap();
let block = rpc.get_block_by_number(start).await.unwrap();
scanner.scan(rpc, &block).await.unwrap().ignore_additional_timelock().swap_remove(0)
}
/// Make sure the weight and fee match the expected calculation.
pub fn check_weight_and_fee(tx: &Transaction, fee_rate: FeeRate) {
let Transaction::V2 { proofs: Some(ref proofs), .. } = tx else { panic!("TX wasn't RingCT") };
let fee = proofs.base.fee;
let weight = tx.weight();
let expected_weight = fee_rate.calculate_weight_from_fee(fee);
assert_eq!(weight, expected_weight);
let expected_fee = fee_rate.calculate_fee_from_weight(weight);
assert_eq!(fee, expected_fee);
}
pub async fn rpc() -> SimpleRequestRpc {
let rpc =
SimpleRequestRpc::new("http://serai:seraidex@127.0.0.1:18081".to_string()).await.unwrap();
const BLOCKS_TO_MINE: usize = 110;
// Only run once
if rpc.get_height().await.unwrap() > BLOCKS_TO_MINE {
return rpc;
}
let addr = MoneroAddress::new(
Network::Mainnet,
AddressType::Legacy,
&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE,
&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE,
);
// Mine enough blocks to ensure decoy availability
rpc.generate_blocks(&addr, BLOCKS_TO_MINE).await.unwrap();
rpc
}
pub static SEQUENTIAL: OnceLock<Mutex<()>> = OnceLock::new();
#[macro_export]
macro_rules! async_sequential {
($(async fn $name: ident() $body: block)*) => {
$(
#[tokio::test]
async fn $name() {
let guard = runner::SEQUENTIAL.get_or_init(|| tokio::sync::Mutex::new(())).lock().await;
let local = tokio::task::LocalSet::new();
local.run_until(async move {
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
drop(guard);
Err(err).unwrap()
}
}).await;
}
)*
}
}
#[macro_export]
macro_rules! test {
(
$name: ident,
(
$first_tx: expr,
$first_checks: expr,
),
$((
$tx: expr,
$checks: expr,
)$(,)?),*
) => {
async_sequential! {
async fn $name() {
use core::{ops::Deref, any::Any};
#[cfg(feature = "multisig")]
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::{RngCore, OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
#[cfg(feature = "multisig")]
use frost::{
curve::Ed25519,
Participant,
tests::{THRESHOLD, key_gen},
};
use monero_wallet::{
ringct::RctType,
rpc::FeePriority,
address::Network,
ViewPair, Scanner, OutputWithDecoys,
send::{Change, SignableTransaction, Eventuality},
};
use runner::{
SignableTransactionBuilder, ring_len, random_address, rpc, mine_until_unlocked,
get_miner_tx_output, check_weight_and_fee,
};
type Builder = SignableTransactionBuilder;
// Run each function as both a single signer and as a multisig
#[allow(clippy::redundant_closure_call)]
for multisig in [false, true] {
// Only run the multisig variant if multisig is enabled
if multisig {
#[cfg(not(feature = "multisig"))]
continue;
}
let spend = Zeroizing::new(Scalar::random(&mut OsRng));
#[cfg(feature = "multisig")]
let keys = key_gen::<_, Ed25519>(&mut OsRng);
let spend_pub = if !multisig {
spend.deref() * ED25519_BASEPOINT_TABLE
} else {
#[cfg(not(feature = "multisig"))]
panic!("Multisig branch called without the multisig feature");
#[cfg(feature = "multisig")]
keys[&Participant::new(1).unwrap()].group_key().0
};
let rpc = rpc().await;
let view_priv = Zeroizing::new(Scalar::random(&mut OsRng));
let mut outgoing_view = Zeroizing::new([0; 32]);
OsRng.fill_bytes(outgoing_view.as_mut());
let view = ViewPair::new(spend_pub, view_priv.clone()).unwrap();
let addr = view.legacy_address(Network::Mainnet);
let miner_tx = get_miner_tx_output(&rpc, &view).await;
let rct_type = match rpc.get_hardfork_version().await.unwrap() {
14 => RctType::ClsagBulletproof,
15 | 16 => RctType::ClsagBulletproofPlus,
_ => panic!("unrecognized hardfork version"),
};
let builder = SignableTransactionBuilder::new(
rct_type,
outgoing_view,
Change::new(
ViewPair::new(
&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE,
Zeroizing::new(Scalar::random(&mut OsRng))
).unwrap(),
None,
),
rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(),
);
let sign = |tx: SignableTransaction| {
let spend = spend.clone();
#[cfg(feature = "multisig")]
let keys = keys.clone();
assert_eq!(&SignableTransaction::read(&mut tx.serialize().as_slice()).unwrap(), &tx);
let eventuality = Eventuality::from(tx.clone());
let tx = if !multisig {
tx.sign(&mut OsRng, &spend).unwrap()
} else {
#[cfg(not(feature = "multisig"))]
panic!("multisig branch called without the multisig feature");
#[cfg(feature = "multisig")]
{
let mut machines = HashMap::new();
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
machines.insert(i, tx.clone().multisig(&keys[&i]).unwrap());
}
frost::tests::sign_without_caching(&mut OsRng, machines, &[])
}
};
assert_eq!(&eventuality.extra(), &tx.prefix().extra, "eventuality extra was distinct");
assert!(eventuality.matches(&tx.clone().into()), "eventuality didn't match");
tx
};
// TODO: Generate a distinct wallet for each transaction to prevent overlap
let next_addr = addr;
let temp = Box::new({
let mut builder = builder.clone();
let input = OutputWithDecoys::fingerprintable_deterministic_new(
&mut OsRng,
&rpc,
ring_len(rct_type),
rpc.get_height().await.unwrap(),
miner_tx,
).await.unwrap();
builder.add_input(input);
let (tx, state) = ($first_tx)(rpc.clone(), builder, next_addr).await;
let fee_rate = tx.fee_rate().clone();
let signed = sign(tx);
rpc.publish_transaction(&signed).await.unwrap();
let block =
mine_until_unlocked(&rpc, &random_address().2, signed.hash()).await;
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
check_weight_and_fee(&tx, fee_rate);
let scanner = Scanner::new(view.clone());
($first_checks)(rpc.clone(), block, tx, scanner, state).await
});
#[allow(unused_variables, unused_mut, unused_assignments)]
let mut carried_state: Box<dyn Any> = temp;
$(
let (tx, state) = ($tx)(
rct_type,
rpc.clone(),
builder.clone(),
next_addr,
*carried_state.downcast().unwrap()
).await;
let fee_rate = tx.fee_rate().clone();
let signed = sign(tx);
rpc.publish_transaction(&signed).await.unwrap();
let block =
mine_until_unlocked(&rpc, &random_address().2, signed.hash()).await;
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
if stringify!($name) != "spend_one_input_to_two_outputs_no_change" {
// Skip weight and fee check for the above test because when there is no change,
// the change is added to the fee
check_weight_and_fee(&tx, fee_rate);
}
#[allow(unused_assignments)]
{
let scanner = Scanner::new(view.clone());
carried_state = Box::new(($checks)(rpc.clone(), block, tx, scanner, state).await);
}
)*
}
}
}
}
}

View File

@@ -0,0 +1,166 @@
use monero_serai::transaction::Transaction;
use monero_wallet::{rpc::Rpc, address::SubaddressIndex, extra::PaymentId, GuaranteedScanner};
mod runner;
test!(
scan_standard_address,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::new(view.clone());
builder.add_payment(view.legacy_address(Network::Mainnet), 5);
(builder.build().unwrap(), scanner)
},
|rpc, block, tx: Transaction, _, mut state: Scanner| async move {
let output = state.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
let dummy_payment_id = PaymentId::Encrypted([0u8; 8]);
assert_eq!(output.payment_id(), Some(dummy_payment_id));
},
),
);
test!(
scan_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(0, 1).unwrap();
let view = runner::random_address().1;
let mut scanner = Scanner::new(view.clone());
scanner.register_subaddress(subaddress);
builder.add_payment(view.subaddress(Network::Mainnet, subaddress), 5);
(builder.build().unwrap(), (scanner, subaddress))
},
|rpc, block, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
let output =
state.0.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.subaddress(), Some(state.1));
},
),
);
test!(
scan_integrated_address,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::new(view.clone());
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(view.legacy_integrated_address(Network::Mainnet, payment_id), 5);
(builder.build().unwrap(), (scanner, payment_id))
},
|rpc, block, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output =
state.0.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(state.1)));
},
),
);
test!(
scan_guaranteed,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_guaranteed_address().1;
let scanner = GuaranteedScanner::new(view.clone());
builder.add_payment(view.address(Network::Mainnet, None, None), 5);
(builder.build().unwrap(), scanner)
},
|rpc, block, tx: Transaction, _, mut scanner: GuaranteedScanner| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.subaddress(), None);
},
),
);
test!(
scan_guaranteed_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(0, 2).unwrap();
let view = runner::random_guaranteed_address().1;
let mut scanner = GuaranteedScanner::new(view.clone());
scanner.register_subaddress(subaddress);
builder.add_payment(view.address(Network::Mainnet, Some(subaddress), None), 5);
(builder.build().unwrap(), (scanner, subaddress))
},
|rpc, block, tx: Transaction, _, mut state: (GuaranteedScanner, SubaddressIndex)| async move {
let output =
state.0.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.subaddress(), Some(state.1));
},
),
);
test!(
scan_guaranteed_integrated,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_guaranteed_address().1;
let scanner = GuaranteedScanner::new(view.clone());
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(view.address(Network::Mainnet, None, Some(payment_id)), 5);
(builder.build().unwrap(), (scanner, payment_id))
},
|rpc, block, tx: Transaction, _, mut state: (GuaranteedScanner, [u8; 8])| async move {
let output =
state.0.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(state.1)));
},
),
);
#[rustfmt::skip]
test!(
scan_guaranteed_integrated_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(0, 3).unwrap();
let view = runner::random_guaranteed_address().1;
let mut scanner = GuaranteedScanner::new(view.clone());
scanner.register_subaddress(subaddress);
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(view.address(Network::Mainnet, Some(subaddress), Some(payment_id)), 5);
(builder.build().unwrap(), (scanner, payment_id, subaddress))
},
|
rpc,
block,
tx: Transaction,
_,
mut state: (GuaranteedScanner, [u8; 8], SubaddressIndex),
| async move {
let output = state.0.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(state.1)));
assert_eq!(output.subaddress(), Some(state.2));
},
),
);

View File

@@ -0,0 +1,394 @@
use std::collections::HashSet;
use rand_core::OsRng;
use monero_simple_request_rpc::SimpleRequestRpc;
use monero_wallet::{
ringct::RctType, transaction::Transaction, rpc::Rpc, address::SubaddressIndex, extra::Extra,
WalletOutput, OutputWithDecoys,
};
mod runner;
use runner::{SignableTransactionBuilder, ring_len};
// Set up inputs, select decoys, then add them to the TX builder
async fn add_inputs(
rct_type: RctType,
rpc: &SimpleRequestRpc,
outputs: Vec<WalletOutput>,
builder: &mut SignableTransactionBuilder,
) {
for output in outputs {
builder.add_input(
OutputWithDecoys::fingerprintable_deterministic_new(
&mut OsRng,
rpc,
ring_len(rct_type),
rpc.get_height().await.unwrap(),
output,
)
.await
.unwrap(),
);
}
}
test!(
spend_miner_output,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 5);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 5);
},
),
);
test!(
spend_multiple_outputs,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let mut outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 2);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].transaction(), tx.hash());
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
assert_eq!(outputs[0].commitment().amount, 1000000000000);
assert_eq!(outputs[1].commitment().amount, 2000000000000);
outputs
},
),
(
|rct_type: RctType, rpc, mut builder: Builder, addr, outputs: Vec<WalletOutput>| async move {
add_inputs(rct_type, &rpc, outputs, &mut builder).await;
builder.add_payment(addr, 6);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 6);
},
),
);
test!(
// Ideally, this would be single_R, yet it isn't feasible to apply allow(non_snake_case) here
single_r_subaddress_send,
(
// Consume this builder for an output we can use in the future
// This is needed because we can't get the input from the passed in builder
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].commitment().amount, 1000000000000);
outputs
},
),
(
|rct_type, rpc: SimpleRequestRpc, _, _, outputs: Vec<WalletOutput>| async move {
use monero_wallet::rpc::FeePriority;
let view_priv = Zeroizing::new(Scalar::random(&mut OsRng));
let mut outgoing_view = Zeroizing::new([0; 32]);
OsRng.fill_bytes(outgoing_view.as_mut());
let change_view =
ViewPair::new(&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, view_priv.clone())
.unwrap();
let mut builder = SignableTransactionBuilder::new(
rct_type,
outgoing_view,
Change::new(change_view.clone(), None),
rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(),
);
add_inputs(rct_type, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await;
// Send to a subaddress
let sub_view = ViewPair::new(
&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE,
Zeroizing::new(Scalar::random(&mut OsRng)),
)
.unwrap();
builder
.add_payment(sub_view.subaddress(Network::Mainnet, SubaddressIndex::new(0, 1).unwrap()), 1);
(builder.build().unwrap(), (change_view, sub_view))
},
|rpc, block, tx: Transaction, _, views: (ViewPair, ViewPair)| async move {
// Make sure the change can pick up its output
let mut change_scanner = Scanner::new(views.0);
assert!(
change_scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().len() == 1
);
// Make sure the subaddress can pick up its output
let mut sub_scanner = Scanner::new(views.1);
sub_scanner.register_subaddress(SubaddressIndex::new(0, 1).unwrap());
let sub_outputs = sub_scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert!(sub_outputs.len() == 1);
assert_eq!(sub_outputs[0].transaction(), tx.hash());
assert_eq!(sub_outputs[0].commitment().amount, 1);
assert!(sub_outputs[0].subaddress().unwrap().account() == 0);
assert!(sub_outputs[0].subaddress().unwrap().address() == 1);
// Make sure only one R was included in TX extra
assert!(Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref())
.unwrap()
.keys()
.unwrap()
.1
.is_none());
},
),
);
test!(
spend_one_input_to_one_output_plus_change,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].commitment().amount, 2000000000000);
outputs
},
),
(
|rct_type: RctType, rpc, mut builder: Builder, addr, outputs: Vec<WalletOutput>| async move {
add_inputs(rct_type, &rpc, outputs, &mut builder).await;
builder.add_payment(addr, 2);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let output =
scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
assert_eq!(output.commitment().amount, 2);
},
),
);
test!(
spend_max_outputs,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].commitment().amount, 1000000000000);
outputs
},
),
(
|rct_type: RctType, rpc, mut builder: Builder, addr, outputs: Vec<WalletOutput>| async move {
add_inputs(rct_type, &rpc, outputs, &mut builder).await;
for i in 0 .. 15 {
builder.add_payment(addr, i + 1);
}
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let mut scanned_tx = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
let mut output_amounts = HashSet::new();
for i in 0 .. 15 {
output_amounts.insert(i + 1);
}
for _ in 0 .. 15 {
let output = scanned_tx.swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
let amount = output.commitment().amount;
assert!(output_amounts.remove(&amount));
}
assert_eq!(output_amounts.len(), 0);
},
),
);
test!(
spend_max_outputs_to_subaddresses,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].commitment().amount, 1000000000000);
outputs
},
),
(
|rct_type: RctType, rpc, mut builder: Builder, _, outputs: Vec<WalletOutput>| async move {
add_inputs(rct_type, &rpc, outputs, &mut builder).await;
let view = runner::random_address().1;
let mut scanner = Scanner::new(view.clone());
let mut subaddresses = vec![];
for i in 0 .. 15 {
let subaddress = SubaddressIndex::new(0, i + 1).unwrap();
scanner.register_subaddress(subaddress);
builder.add_payment(view.subaddress(Network::Mainnet, subaddress), u64::from(i + 1));
subaddresses.push(subaddress);
}
(builder.build().unwrap(), (scanner, subaddresses))
},
|rpc, block, tx: Transaction, _, mut state: (Scanner, Vec<SubaddressIndex>)| async move {
use std::collections::HashMap;
let mut scanned_tx = state.0.scan(&rpc, &block).await.unwrap().not_additionally_locked();
let mut output_amounts_by_subaddress = HashMap::new();
for i in 0 .. 15 {
output_amounts_by_subaddress.insert(u64::try_from(i + 1).unwrap(), state.1[i]);
}
for _ in 0 .. 15 {
let output = scanned_tx.swap_remove(0);
assert_eq!(output.transaction(), tx.hash());
let amount = output.commitment().amount;
assert_eq!(
output.subaddress().unwrap(),
output_amounts_by_subaddress.remove(&amount).unwrap()
);
}
assert_eq!(output_amounts_by_subaddress.len(), 0);
},
),
);
test!(
spend_one_input_to_two_outputs_no_change,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].commitment().amount, 1000000000000);
outputs
},
),
(
|rct_type, rpc: SimpleRequestRpc, _, addr, outputs: Vec<WalletOutput>| async move {
use monero_wallet::rpc::FeePriority;
let mut outgoing_view = Zeroizing::new([0; 32]);
OsRng.fill_bytes(outgoing_view.as_mut());
let mut builder = SignableTransactionBuilder::new(
rct_type,
outgoing_view,
Change::fingerprintable(None),
rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(),
);
add_inputs(rct_type, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await;
builder.add_payment(addr, 10000);
builder.add_payment(addr, 50000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let mut outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 2);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[1].transaction(), tx.hash());
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
assert_eq!(outputs[0].commitment().amount, 10000);
assert_eq!(outputs[1].commitment().amount, 50000);
// The remainder should get shunted to fee, which is fingerprintable
let Transaction::V2 { proofs: Some(ref proofs), .. } = tx else { panic!("TX wasn't RingCT") };
assert_eq!(proofs.base.fee, 1000000000000 - 10000 - 50000);
},
),
);
test!(
subaddress_change,
(
// Consume this builder for an output we can use in the future
// This is needed because we can't get the input from the passed in builder
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), ())
},
|rpc, block, tx: Transaction, mut scanner: Scanner, ()| async move {
let outputs = scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].transaction(), tx.hash());
assert_eq!(outputs[0].commitment().amount, 1000000000000);
outputs
},
),
(
|rct_type, rpc: SimpleRequestRpc, _, _, outputs: Vec<WalletOutput>| async move {
use monero_wallet::rpc::FeePriority;
let view_priv = Zeroizing::new(Scalar::random(&mut OsRng));
let mut outgoing_view = Zeroizing::new([0; 32]);
OsRng.fill_bytes(outgoing_view.as_mut());
let change_view =
ViewPair::new(&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE, view_priv.clone())
.unwrap();
let mut builder = SignableTransactionBuilder::new(
rct_type,
outgoing_view,
Change::new(change_view.clone(), Some(SubaddressIndex::new(0, 1).unwrap())),
rpc.get_fee_rate(FeePriority::Unimportant).await.unwrap(),
);
add_inputs(rct_type, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await;
// Send to a random address
let view = ViewPair::new(
&Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE,
Zeroizing::new(Scalar::random(&mut OsRng)),
)
.unwrap();
builder.add_payment(view.legacy_address(Network::Mainnet), 1);
(builder.build().unwrap(), change_view)
},
|rpc, block, _, _, change_view: ViewPair| async move {
// Make sure the change can pick up its output
let mut change_scanner = Scanner::new(change_view);
change_scanner.register_subaddress(SubaddressIndex::new(0, 1).unwrap());
let outputs = change_scanner.scan(&rpc, &block).await.unwrap().not_additionally_locked();
assert!(outputs.len() == 1);
assert!(outputs[0].subaddress().unwrap().account() == 0);
assert!(outputs[0].subaddress().unwrap().address() == 1);
},
),
);

View File

@@ -0,0 +1,366 @@
use rand_core::{OsRng, RngCore};
use serde::Deserialize;
use serde_json::json;
use monero_simple_request_rpc::SimpleRequestRpc;
use monero_wallet::{
transaction::Transaction,
rpc::Rpc,
address::{Network, SubaddressIndex, MoneroAddress},
extra::{MAX_ARBITRARY_DATA_SIZE, Extra, PaymentId},
Scanner,
};
mod runner;
#[derive(Clone, Copy, PartialEq, Eq)]
enum AddressSpec {
Legacy,
LegacyIntegrated([u8; 8]),
Subaddress(SubaddressIndex),
}
#[derive(Deserialize, Debug)]
struct EmptyResponse {}
async fn make_integrated_address(rpc: &SimpleRequestRpc, payment_id: [u8; 8]) -> String {
#[derive(Debug, Deserialize)]
struct IntegratedAddressResponse {
integrated_address: String,
}
let res = rpc
.json_rpc_call::<IntegratedAddressResponse>(
"make_integrated_address",
Some(json!({ "payment_id": hex::encode(payment_id) })),
)
.await
.unwrap();
res.integrated_address
}
async fn initialize_rpcs() -> (SimpleRequestRpc, SimpleRequestRpc, MoneroAddress) {
let wallet_rpc = SimpleRequestRpc::new("http://127.0.0.1:18082".to_string()).await.unwrap();
let daemon_rpc = runner::rpc().await;
#[derive(Debug, Deserialize)]
struct AddressResponse {
address: String,
}
let mut wallet_id = [0; 8];
OsRng.fill_bytes(&mut wallet_id);
let _: EmptyResponse = wallet_rpc
.json_rpc_call(
"create_wallet",
Some(json!({ "filename": hex::encode(wallet_id), "language": "English" })),
)
.await
.unwrap();
let address: AddressResponse =
wallet_rpc.json_rpc_call("get_address", Some(json!({ "account_index": 0 }))).await.unwrap();
// Fund the new wallet
let address = MoneroAddress::from_str(Network::Mainnet, &address.address).unwrap();
daemon_rpc.generate_blocks(&address, 70).await.unwrap();
(wallet_rpc, daemon_rpc, address)
}
async fn from_wallet_rpc_to_self(spec: AddressSpec) {
// initialize rpc
let (wallet_rpc, daemon_rpc, wallet_rpc_addr) = initialize_rpcs().await;
// make an addr
let (_, view_pair, _) = runner::random_address();
let addr = match spec {
AddressSpec::Legacy => view_pair.legacy_address(Network::Mainnet),
AddressSpec::LegacyIntegrated(payment_id) => {
view_pair.legacy_integrated_address(Network::Mainnet, payment_id)
}
AddressSpec::Subaddress(index) => view_pair.subaddress(Network::Mainnet, index),
};
// refresh & make a tx
let _: EmptyResponse = wallet_rpc.json_rpc_call("refresh", None).await.unwrap();
#[derive(Debug, Deserialize)]
struct TransferResponse {
tx_hash: String,
}
let tx: TransferResponse = wallet_rpc
.json_rpc_call(
"transfer",
Some(json!({
"destinations": [{"address": addr.to_string(), "amount": 1_000_000_000_000u64 }],
})),
)
.await
.unwrap();
let tx_hash = hex::decode(tx.tx_hash).unwrap().try_into().unwrap();
// TODO: Needs https://github.com/monero-project/monero/pull/9260
// let fee_rate = daemon_rpc
// .get_fee_rate(FeePriority::Unimportant)
// .await
// .unwrap();
// unlock it
let block = runner::mine_until_unlocked(&daemon_rpc, &wallet_rpc_addr, tx_hash).await;
// Create the scanner
let mut scanner = Scanner::new(view_pair);
if let AddressSpec::Subaddress(index) = spec {
scanner.register_subaddress(index);
}
// Retrieve it and scan it
let output =
scanner.scan(&daemon_rpc, &block).await.unwrap().not_additionally_locked().swap_remove(0);
assert_eq!(output.transaction(), tx_hash);
// TODO: Needs https://github.com/monero-project/monero/pull/9260
// runner::check_weight_and_fee(&tx, fee_rate);
match spec {
AddressSpec::Subaddress(index) => {
assert_eq!(output.subaddress(), Some(index));
assert_eq!(output.payment_id(), Some(PaymentId::Encrypted([0u8; 8])));
}
AddressSpec::LegacyIntegrated(payment_id) => {
assert_eq!(output.payment_id(), Some(PaymentId::Encrypted(payment_id)));
assert_eq!(output.subaddress(), None);
}
AddressSpec::Legacy => {
assert_eq!(output.subaddress(), None);
assert_eq!(output.payment_id(), Some(PaymentId::Encrypted([0u8; 8])));
}
}
assert_eq!(output.commitment().amount, 1000000000000);
}
async_sequential!(
async fn receipt_of_wallet_rpc_tx_standard() {
from_wallet_rpc_to_self(AddressSpec::Legacy).await;
}
async fn receipt_of_wallet_rpc_tx_subaddress() {
from_wallet_rpc_to_self(AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap())).await;
}
async fn receipt_of_wallet_rpc_tx_integrated() {
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
from_wallet_rpc_to_self(AddressSpec::LegacyIntegrated(payment_id)).await;
}
);
#[derive(PartialEq, Eq, Debug, Deserialize)]
struct Index {
major: u32,
minor: u32,
}
#[derive(Debug, Deserialize)]
struct Transfer {
payment_id: String,
subaddr_index: Index,
amount: u64,
}
#[derive(Debug, Deserialize)]
struct TransfersResponse {
transfer: Transfer,
transfers: Vec<Transfer>,
}
test!(
send_to_wallet_rpc_standard,
(
|_, mut builder: Builder, _| async move {
// initialize rpc
let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await;
// add destination
builder.add_payment(wallet_rpc_addr, 1000000);
(builder.build().unwrap(), wallet_rpc)
},
|_, _, tx: Transaction, _, data: SimpleRequestRpc| async move {
// confirm receipt
let _: EmptyResponse = data.json_rpc_call("refresh", None).await.unwrap();
let transfer: TransfersResponse = data
.json_rpc_call("get_transfer_by_txid", Some(json!({ "txid": hex::encode(tx.hash()) })))
.await
.unwrap();
assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 });
assert_eq!(transfer.transfer.amount, 1000000);
assert_eq!(transfer.transfer.payment_id, hex::encode([0u8; 8]));
},
),
);
test!(
send_to_wallet_rpc_subaddress,
(
|_, mut builder: Builder, _| async move {
// initialize rpc
let (wallet_rpc, _, _) = initialize_rpcs().await;
// make the subaddress
#[derive(Debug, Deserialize)]
struct AccountResponse {
address: String,
account_index: u32,
}
let addr: AccountResponse = wallet_rpc.json_rpc_call("create_account", None).await.unwrap();
assert!(addr.account_index != 0);
builder
.add_payment(MoneroAddress::from_str(Network::Mainnet, &addr.address).unwrap(), 1000000);
(builder.build().unwrap(), (wallet_rpc, addr.account_index))
},
|_, _, tx: Transaction, _, data: (SimpleRequestRpc, u32)| async move {
// confirm receipt
let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap();
let transfer: TransfersResponse = data
.0
.json_rpc_call(
"get_transfer_by_txid",
Some(json!({ "txid": hex::encode(tx.hash()), "account_index": data.1 })),
)
.await
.unwrap();
assert_eq!(transfer.transfer.subaddr_index, Index { major: data.1, minor: 0 });
assert_eq!(transfer.transfer.amount, 1000000);
assert_eq!(transfer.transfer.payment_id, hex::encode([0u8; 8]));
// Make sure only one R was included in TX extra
assert!(Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref())
.unwrap()
.keys()
.unwrap()
.1
.is_none());
},
),
);
test!(
send_to_wallet_rpc_subaddresses,
(
|_, mut builder: Builder, _| async move {
// initialize rpc
let (wallet_rpc, daemon_rpc, _) = initialize_rpcs().await;
// make the subaddress
#[derive(Debug, Deserialize)]
struct AddressesResponse {
addresses: Vec<String>,
address_index: u32,
}
let addrs: AddressesResponse = wallet_rpc
.json_rpc_call("create_address", Some(json!({ "account_index": 0, "count": 2 })))
.await
.unwrap();
assert!(addrs.address_index != 0);
assert!(addrs.addresses.len() == 2);
builder.add_payments(&[
(MoneroAddress::from_str(Network::Mainnet, &addrs.addresses[0]).unwrap(), 1000000),
(MoneroAddress::from_str(Network::Mainnet, &addrs.addresses[1]).unwrap(), 2000000),
]);
(builder.build().unwrap(), (wallet_rpc, daemon_rpc, addrs.address_index))
},
|_, _, tx: Transaction, _, data: (SimpleRequestRpc, SimpleRequestRpc, u32)| async move {
// confirm receipt
let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap();
let transfer: TransfersResponse = data
.0
.json_rpc_call(
"get_transfer_by_txid",
Some(json!({ "txid": hex::encode(tx.hash()), "account_index": 0 })),
)
.await
.unwrap();
assert_eq!(transfer.transfers.len(), 2);
for t in transfer.transfers {
match t.amount {
1000000 => assert_eq!(t.subaddr_index, Index { major: 0, minor: data.2 }),
2000000 => assert_eq!(t.subaddr_index, Index { major: 0, minor: data.2 + 1 }),
_ => unreachable!(),
}
}
// Make sure 3 additional pub keys are included in TX extra
let keys =
Extra::read::<&[u8]>(&mut tx.prefix().extra.as_ref()).unwrap().keys().unwrap().1.unwrap();
assert_eq!(keys.len(), 3);
},
),
);
test!(
send_to_wallet_rpc_integrated,
(
|_, mut builder: Builder, _| async move {
// initialize rpc
let (wallet_rpc, _, _) = initialize_rpcs().await;
// make the addr
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
let addr = make_integrated_address(&wallet_rpc, payment_id).await;
builder.add_payment(MoneroAddress::from_str(Network::Mainnet, &addr).unwrap(), 1000000);
(builder.build().unwrap(), (wallet_rpc, payment_id))
},
|_, _, tx: Transaction, _, data: (SimpleRequestRpc, [u8; 8])| async move {
// confirm receipt
let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap();
let transfer: TransfersResponse = data
.0
.json_rpc_call("get_transfer_by_txid", Some(json!({ "txid": hex::encode(tx.hash()) })))
.await
.unwrap();
assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 });
assert_eq!(transfer.transfer.payment_id, hex::encode(data.1));
assert_eq!(transfer.transfer.amount, 1000000);
},
),
);
test!(
send_to_wallet_rpc_with_arb_data,
(
|_, mut builder: Builder, _| async move {
// initialize rpc
let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await;
// add destination
builder.add_payment(wallet_rpc_addr, 1000000);
// Make 2 data that is the full 255 bytes
for _ in 0 .. 2 {
let data = vec![b'a'; MAX_ARBITRARY_DATA_SIZE];
builder.add_data(data).unwrap();
}
(builder.build().unwrap(), wallet_rpc)
},
|_, _, tx: Transaction, _, data: SimpleRequestRpc| async move {
// confirm receipt
let _: EmptyResponse = data.json_rpc_call("refresh", None).await.unwrap();
let transfer: TransfersResponse = data
.json_rpc_call("get_transfer_by_txid", Some(json!({ "txid": hex::encode(tx.hash()) })))
.await
.unwrap();
assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 });
assert_eq!(transfer.transfer.amount, 1000000);
},
),
);

View File

@@ -0,0 +1,51 @@
[package]
name = "monero-wallet-util"
version = "0.1.0"
description = "Additional utility functions for monero-wallet"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/util"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
rand_core = { version = "0.6", default-features = false }
monero-wallet = { path = "..", default-features = false }
monero-seed = { path = "../seed", default-features = false }
polyseed = { path = "../polyseed", default-features = false }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"rand_core/std",
"monero-wallet/std",
"monero-seed/std",
"polyseed/std",
]
compile-time-generators = ["monero-wallet/compile-time-generators"]
multisig = ["monero-wallet/multisig", "std"]
default = ["std", "compile-time-generators"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,25 @@
# Monero Wallet Utilities
Additional utility functions for monero-wallet.
This library is isolated as it adds a notable amount of dependencies to the
tree, and to be a subject to a distinct versioning policy. This library may
more frequently undergo breaking API changes.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Features
- Support for Monero's seed algorithm
- Support for Polyseed
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `compile-time-generators` (on by default): Derives the generators at
compile-time so they don't need to be derived at runtime. This is recommended
if program size doesn't need to be kept minimal.
- `multisig`: Adds support for creation of transactions using a threshold
multisignature wallet.

View File

@@ -0,0 +1,9 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
pub use monero_wallet::*;
/// Seed creation and parsing functionality.
pub mod seed;

View File

@@ -0,0 +1,150 @@
use core::fmt;
use std_shims::string::String;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use rand_core::{RngCore, CryptoRng};
pub use monero_seed as original;
pub use polyseed;
use original::{SeedError as OriginalSeedError, Seed as OriginalSeed};
use polyseed::{PolyseedError, Polyseed};
/// An error from working with seeds.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum SeedError {
/// The seed was invalid.
#[cfg_attr(feature = "std", error("invalid seed"))]
InvalidSeed,
/// The entropy was invalid.
#[cfg_attr(feature = "std", error("invalid entropy"))]
InvalidEntropy,
/// The checksum did not match the data.
#[cfg_attr(feature = "std", error("invalid checksum"))]
InvalidChecksum,
/// Unsupported features were enabled.
#[cfg_attr(feature = "std", error("unsupported features"))]
UnsupportedFeatures,
}
impl From<OriginalSeedError> for SeedError {
fn from(error: OriginalSeedError) -> SeedError {
match error {
OriginalSeedError::DeprecatedEnglishWithChecksum | OriginalSeedError::InvalidChecksum => {
SeedError::InvalidChecksum
}
OriginalSeedError::InvalidSeed => SeedError::InvalidSeed,
}
}
}
impl From<PolyseedError> for SeedError {
fn from(error: PolyseedError) -> SeedError {
match error {
PolyseedError::UnsupportedFeatures => SeedError::UnsupportedFeatures,
PolyseedError::InvalidEntropy => SeedError::InvalidEntropy,
PolyseedError::InvalidSeed => SeedError::InvalidSeed,
PolyseedError::InvalidChecksum => SeedError::InvalidChecksum,
}
}
}
/// The type of the seed.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum SeedType {
/// The seed format originally used by Monero,
Original(monero_seed::Language),
/// Polyseed.
Polyseed(polyseed::Language),
}
/// A seed, internally either the original format or a Polyseed.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub enum Seed {
/// The originally formatted seed.
Original(OriginalSeed),
/// A Polyseed.
Polyseed(Polyseed),
}
impl fmt::Debug for Seed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Seed::Original(_) => f.debug_struct("Seed::Original").finish_non_exhaustive(),
Seed::Polyseed(_) => f.debug_struct("Seed::Polyseed").finish_non_exhaustive(),
}
}
}
impl Seed {
/// Create a new seed.
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, seed_type: SeedType) -> Seed {
match seed_type {
SeedType::Original(lang) => Seed::Original(OriginalSeed::new(rng, lang)),
SeedType::Polyseed(lang) => Seed::Polyseed(Polyseed::new(rng, lang)),
}
}
/// Parse a seed from a string.
pub fn from_string(seed_type: SeedType, words: Zeroizing<String>) -> Result<Seed, SeedError> {
match seed_type {
SeedType::Original(lang) => Ok(OriginalSeed::from_string(lang, words).map(Seed::Original)?),
SeedType::Polyseed(lang) => Ok(Polyseed::from_string(lang, words).map(Seed::Polyseed)?),
}
}
/// Create a seed from entropy.
///
/// A birthday may be optionally provided, denoted in seconds since the epoch. For
/// SeedType::Original, it will be ignored. For SeedType::Polyseed, it'll be embedded into the
/// seed.
///
/// For SeedType::Polyseed, the last 13 bytes of `entropy` must be 0.
// TODO: Return Result, not Option
pub fn from_entropy(
seed_type: SeedType,
entropy: Zeroizing<[u8; 32]>,
birthday: Option<u64>,
) -> Option<Seed> {
match seed_type {
SeedType::Original(lang) => OriginalSeed::from_entropy(lang, entropy).map(Seed::Original),
SeedType::Polyseed(lang) => {
Polyseed::from(lang, 0, birthday.unwrap_or(0), entropy).ok().map(Seed::Polyseed)
}
}
}
/// Converts the seed to a string.
pub fn to_string(&self) -> Zeroizing<String> {
match self {
Seed::Original(seed) => seed.to_string(),
Seed::Polyseed(seed) => seed.to_string(),
}
}
/// Get the entropy for this seed.
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
match self {
Seed::Original(seed) => seed.entropy(),
Seed::Polyseed(seed) => seed.entropy().clone(),
}
}
/// Get the key derived from this seed.
pub fn key(&self) -> Zeroizing<[u8; 32]> {
match self {
// Original does not differentiate between its entropy and its key
Seed::Original(seed) => seed.entropy(),
Seed::Polyseed(seed) => seed.key(),
}
}
/// Get the birthday of this seed, denoted in seconds since the epoch.
pub fn birthday(&self) -> u64 {
match self {
Seed::Original(_) => 0,
Seed::Polyseed(seed) => seed.birthday(),
}
}
}

View File

@@ -0,0 +1,3 @@
// TODO
#[test]
fn test() {}