mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
commit e0a9e8825d6c22c797fb84e26ed6ef10136ca9c2 Author: Luke Parker <lukeparker5132@gmail.com> Date: Fri Jan 6 04:24:08 2023 -0500 Remove Scanner::address It either needed to return an Option, panic on misconfiguration, or return a distinct Scanner type based on burning bug immunity to offer this API properly. Panicking wouldn't be proper, and the Option<Address> would've been... awkward. The new register_subaddress function, maintaining the needed functionality, also provides further clarity on the intended side effect of the previously present Scanner::address function. commit 7359360ab2fc8c9255c6f58250c214252ce217a4 Author: Luke Parker <lukeparker5132@gmail.com> Date: Fri Jan 6 01:35:02 2023 -0500 fmt/clippy from last commit commit 80d912fc19cd268f3b019a9d9961a48b2c45e828 Author: Luke Parker <lukeparker5132@gmail.com> Date: Thu Jan 5 19:36:49 2023 -0500 Add Substrate "assets" pallet While over-engineered for our purposes, it's still usable. Also cleans the runtime a bit. commit 2ed2944b6598d75bdc3c995aaf39b717846207de Author: Luke Parker <lukeparker5132@gmail.com> Date: Wed Jan 4 23:09:58 2023 -0500 Remove the timestamp pallet It was needed for contracts, which has since been removed. We now no longer need it. commit 7fc1fc2dccecebe1d94cb7b4c00f2b5cb271c87b Author: Luke Parker <lukeparker5132@gmail.com> Date: Wed Jan 4 22:52:41 2023 -0500 Initial validator sets pallet (#187) * Initial work on a Validator Sets pallet * Update Validator Set docs per current discussions * Update validator-sets primitives and storage handling * Add validator set pallets to deny.toml * Remove Curve from primitives Since we aren't reusing keys across coins, there's no reason for it to be on-chain (as previously planned). * Update documentation on Validator Sets * Use Twox64Concat instead of Identity Ensures an even distribution of keys. While xxhash is breakable, these keys aren't manipulatable by users. * Add math ops on Amount and define a coin as 1e8 * Add validator-sets to the runtime and remove contracts Also removes the randomness pallet which was only required by the contracts runtime. Does not remove the contracts folder yet so they can still be referred to while validator-sets is under development. Does remove them from Cargo.toml. * Add vote function to validator-sets * Remove contracts folder * Create an event for the Validator Sets pallet * Remove old contracts crates from deny.toml * Remove line from staking branch * Remove staking from runtime * Correct VS Config in runtime * cargo update * Resolve a few PR comments on terminology * Create a serai-primitives crate Move types such as Amount/Coin out of validator-sets. Will be expanded in the future. * Fixes for last commit * Don't reserve set 0 * Further fixes * Add files meant for last commit * Remove Staking transfer commit 3309295911d22177bd68972d138aea2f8658eb5f Author: Luke Parker <lukeparker5132@gmail.com> Date: Wed Jan 4 06:17:00 2023 -0500 Reorder coins in README by market cap commit db5d19cad33ccf067d876b7f5b7cca47c228e2fc Author: Luke Parker <lukeparker5132@gmail.com> Date: Wed Jan 4 06:07:58 2023 -0500 Update README commit 606484d744b1c6cc408382994c77f1def25d3e7d Author: Luke Parker <lukeparker5132@gmail.com> Date: Wed Jan 4 03:17:36 2023 -0500 cargo update commit3a319b229fAuthor: akildemir <aeg_asd@hotmail.com> Date: Wed Jan 4 16:26:25 2023 +0300 update address public API design commitd9fa88fa76Author: akildemir <aeg_asd@hotmail.com> Date: Mon Jan 2 13:35:06 2023 +0300 fix clippy error commitcc722e897bMerge:cafa9b3eeca440Author: akildemir <aeg_asd@hotmail.com> Date: Mon Jan 2 11:39:04 2023 +0300 Merge https://github.com/serai-dex/serai into develop commitcafa9b361eAuthor: akildemir <aeg_asd@hotmail.com> Date: Mon Jan 2 11:38:26 2023 +0300 fix build errors commitce5b5f2b37Merge:f502d6749c4acfAuthor: akildemir <aeg_asd@hotmail.com> Date: Sun Jan 1 15:16:25 2023 +0300 Merge https://github.com/serai-dex/serai into develop commitf502d67282Author: akildemir <aeg_asd@hotmail.com> Date: Thu Dec 22 13:13:09 2022 +0300 fix pr issues commit26ffb226d4Author: akildemir <aeg_asd@hotmail.com> Date: Thu Dec 22 13:11:43 2022 +0300 remove extraneous rpc call commit0e829f8531Author: akildemir <aeg_asd@hotmail.com> Date: Thu Dec 15 13:56:53 2022 +0300 add scan tests commit5123c7f121Author: akildemir <aeg_asd@hotmail.com> Date: Thu Dec 15 13:56:13 2022 +0300 add new address functions & comments
230 lines
7.4 KiB
Rust
230 lines
7.4 KiB
Rust
use core::ops::Deref;
|
|
use std::collections::{HashSet, HashMap};
|
|
|
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
|
|
use curve25519_dalek::{
|
|
constants::ED25519_BASEPOINT_TABLE,
|
|
scalar::Scalar,
|
|
edwards::{EdwardsPoint, CompressedEdwardsY},
|
|
};
|
|
|
|
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
|
|
|
|
mod extra;
|
|
pub(crate) use extra::{PaymentId, ExtraField, Extra};
|
|
|
|
/// Address encoding and decoding functionality.
|
|
pub mod address;
|
|
use address::{Network, AddressType, AddressSpec, AddressMeta, MoneroAddress};
|
|
|
|
mod scan;
|
|
pub use scan::{ReceivedOutput, SpendableOutput};
|
|
|
|
pub(crate) mod decoys;
|
|
pub(crate) use decoys::Decoys;
|
|
|
|
mod send;
|
|
pub use send::{Fee, TransactionError, SignableTransaction, SignableTransactionBuilder};
|
|
#[cfg(feature = "multisig")]
|
|
pub use send::TransactionMachine;
|
|
|
|
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> std::cmp::Ordering {
|
|
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
|
|
}
|
|
|
|
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
|
|
pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
|
|
let mut u = b"uniqueness".to_vec();
|
|
for input in inputs {
|
|
match input {
|
|
// If Gen, this should be the only input, making this loop somewhat pointless
|
|
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
|
Input::Gen(height) => {
|
|
write_varint(height, &mut u).unwrap();
|
|
}
|
|
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
|
|
}
|
|
}
|
|
hash(&u)
|
|
}
|
|
|
|
// Hs("view_tag" || 8Ra || o), Hs(8Ra || o), and H(8Ra || 0x8d) with uniqueness inclusion in the
|
|
// Scalar as an option
|
|
#[allow(non_snake_case)]
|
|
pub(crate) fn shared_key(
|
|
uniqueness: Option<[u8; 32]>,
|
|
s: &Scalar,
|
|
P: &EdwardsPoint,
|
|
o: usize,
|
|
) -> (u8, Scalar, [u8; 8]) {
|
|
// 8Ra
|
|
let mut output_derivation = (s * P).mul_by_cofactor().compress().to_bytes().to_vec();
|
|
// || o
|
|
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
|
|
|
|
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
|
|
let mut payment_id_xor = [0; 8];
|
|
payment_id_xor
|
|
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
|
|
|
// uniqueness ||
|
|
let shared_key = if let Some(uniqueness) = uniqueness {
|
|
[uniqueness.as_ref(), &output_derivation].concat().to_vec()
|
|
} else {
|
|
output_derivation
|
|
};
|
|
|
|
(view_tag, hash_to_scalar(&shared_key), payment_id_xor)
|
|
}
|
|
|
|
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
|
|
let mut amount_mask = b"amount".to_vec();
|
|
amount_mask.extend(key.to_bytes());
|
|
(amount ^ u64::from_le_bytes(hash(&amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
|
|
}
|
|
|
|
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
|
|
u64::from_le_bytes(amount_encryption(u64::from_le_bytes(amount), key))
|
|
}
|
|
|
|
pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
|
|
let mut mask = b"commitment_mask".to_vec();
|
|
mask.extend(shared_key.to_bytes());
|
|
hash_to_scalar(&mask)
|
|
}
|
|
|
|
/// The private view key and public spend key, enabling scanning transactions.
|
|
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
|
|
pub struct ViewPair {
|
|
spend: EdwardsPoint,
|
|
view: Zeroizing<Scalar>,
|
|
}
|
|
|
|
impl ViewPair {
|
|
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> ViewPair {
|
|
ViewPair { spend, view }
|
|
}
|
|
|
|
fn subaddress_derivation(&self, index: (u32, u32)) -> Scalar {
|
|
if index == (0, 0) {
|
|
return Scalar::zero();
|
|
}
|
|
|
|
hash_to_scalar(&Zeroizing::new(
|
|
[
|
|
b"SubAddr\0".as_ref(),
|
|
Zeroizing::new(self.view.to_bytes()).as_ref(),
|
|
&index.0.to_le_bytes(),
|
|
&index.1.to_le_bytes(),
|
|
]
|
|
.concat(),
|
|
))
|
|
}
|
|
|
|
fn subaddress_keys(&self, index: (u32, u32)) -> Option<(EdwardsPoint, EdwardsPoint)> {
|
|
if index == (0, 0) {
|
|
return None;
|
|
}
|
|
|
|
let scalar = self.subaddress_derivation(index);
|
|
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
|
|
let view = self.view.deref() * spend;
|
|
Some((spend, view))
|
|
}
|
|
|
|
/// Returns an address with the provided specification.
|
|
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
|
|
let mut spend = self.spend;
|
|
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
|
|
|
|
// construct the address meta
|
|
let meta = match spec {
|
|
AddressSpec::Standard => AddressMeta::new(network, AddressType::Standard),
|
|
AddressSpec::Integrated(payment_id) => {
|
|
AddressMeta::new(network, AddressType::Integrated(payment_id))
|
|
}
|
|
AddressSpec::Subaddress(i1, i2) => {
|
|
if let Some(keys) = self.subaddress_keys((i1, i2)) {
|
|
(spend, view) = keys;
|
|
AddressMeta::new(network, AddressType::Subaddress)
|
|
} else {
|
|
AddressMeta::new(network, AddressType::Standard)
|
|
}
|
|
}
|
|
AddressSpec::Featured(subaddress, payment_id, guaranteed) => {
|
|
let mut is_subaddress = false;
|
|
if let Some(Some(keys)) = subaddress.map(|subaddress| self.subaddress_keys(subaddress)) {
|
|
(spend, view) = keys;
|
|
is_subaddress = true;
|
|
}
|
|
AddressMeta::new(network, AddressType::Featured(is_subaddress, payment_id, guaranteed))
|
|
}
|
|
};
|
|
|
|
MoneroAddress::new(meta, spend, view)
|
|
}
|
|
}
|
|
|
|
/// Transaction scanner.
|
|
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
|
|
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
|
|
/// ignored.
|
|
#[derive(Clone)]
|
|
pub struct Scanner {
|
|
pair: ViewPair,
|
|
pub(crate) subaddresses: HashMap<CompressedEdwardsY, (u32, u32)>,
|
|
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
|
|
}
|
|
|
|
impl Zeroize for Scanner {
|
|
fn zeroize(&mut self) {
|
|
self.pair.zeroize();
|
|
|
|
// These may not be effective, unfortunately
|
|
for (mut key, mut value) in self.subaddresses.drain() {
|
|
key.zeroize();
|
|
value.zeroize();
|
|
}
|
|
if let Some(ref mut burning_bug) = self.burning_bug.take() {
|
|
for mut output in burning_bug.drain() {
|
|
output.zeroize();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Drop for Scanner {
|
|
fn drop(&mut self) {
|
|
self.zeroize();
|
|
}
|
|
}
|
|
|
|
impl ZeroizeOnDrop for Scanner {}
|
|
|
|
impl Scanner {
|
|
/// Create a Scanner from a ViewPair.
|
|
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
|
|
/// When an output is successfully scanned, the output key MUST be saved to disk.
|
|
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
|
|
/// If None is passed, a modified shared key derivation is used which is immune to the burning
|
|
/// bug (specifically the Guaranteed feature from Featured Addresses).
|
|
// TODO: Should this take in a DB access handle to ensure output keys are saved?
|
|
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
|
|
let mut subaddresses = HashMap::new();
|
|
subaddresses.insert(pair.spend.compress(), (0, 0));
|
|
Scanner { pair, subaddresses, burning_bug }
|
|
}
|
|
|
|
/// Register a subaddress.
|
|
// There used to be an address function here, yet it wasn't safe. It could generate addresses
|
|
// incompatible with the Scanner. While we could return None for that, then we have the issue
|
|
// of runtime failures to generate an address.
|
|
// Removing that API was the simplest option.
|
|
pub fn register_subaddress(&mut self, subaddress: (u32, u32)) {
|
|
if let Some((spend, _)) = self.pair.subaddress_keys(subaddress) {
|
|
self.subaddresses.insert(spend.compress(), subaddress);
|
|
}
|
|
}
|
|
}
|