mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 20:29:23 +00:00
* serai Dockerfile & Makefile fixed * added new bitcoin mod & bitcoinhram * couple changes * added odd&even check for bitcoin signing * sign message updated * print_keys commented out * fixed signing process * Added new bitcoin library & added most of bitcoin processor logic * added new crate and refactored the bitcoin coin library * added signing test function * moved signature.rs * publish set to false * tests moved back to the root * added new functions to rpc * added utxo test * added new rpc methods and refactored bitcoin processor * added spendable output & fixed errors & added new logic for sighash & opened port 18443 for bitcoin docker * changed tweak keys * added tweak_keys & publish transaction and refactored bitcoin processor * added new structs and fixed problems for testing purposes * reverted dockerfile back its original * reverted block generation of bitcoin to 5 seconds * deleted unnecessary test function * added new sighash & added new dbg messages & fixed couple errors * fixed couple issue & removed unused functions * fix for signing process * crypto file for bitcoin refactored * disabled test_send & removed some of the debug logs * signing implemented & transaction weight calculation added & change address logic added * refactored tweak_keys * refactored mine_block & fixed change_address logic * implemented new traits to bitcoin processor& refactored bitcoin processor * added new line to tests file * added new line to bitcoin's wallet.rs * deleted Cargo.toml from coins folder * edited bitcoin's Cargo.toml and added LICENSE * added new line to bitcoin's Cargo.toml * added spaces * added spaces * deleted unnecessary object * added spaces * deleted patch numbers * updated sha256 parameter for message * updated tag as const * deleted unnecessary brackets and imports * updated rpc.rs to 2 space indent * deleted unnecessary brackers * deleted unnecessary brackets * changed it to explicit * updated to explicit * deleted unnecessary parsing * added ? for easy return * updated imports * updated height to number * deleted unnecessary brackets * updated clsag to sig & to_vec to as_ref * updated _sig to schnorr_signature * deleted unnecessary variable * updated Cargo.toml of processor and bitcoin * updated imports of bitcoin processor * updated MBlock to BBlock * updated MSignable to BSignable * updated imports * deleted mask from Fee * updated get_block function return * updated comparison logic for scripts * updated assert to debug_assert * updated height to number * updated txid logic * updated tweak_keys definition * updated imports * deleted new line * delete HashMap from monero * deleted old test code parts * updated test amount to a round number * changed the test code part back to its original * updated imports of rpc.rs * deleted unnecessary return assignments * deleted get_fee_per_byte * deleted create_raw_transaction * deleted fund_raw_transaction * deleted sign transaction rpc * delete verify_message rpc * deleted get_balance * deleted decode_raw_transaction rpc * deleted list_transactions rpc * changed test_send to p2wpkh * updated imports of test_send * fixed imports of test_send * updated bitcoin's mine_block function * updated bitcoin's test_send * updated bitcoin's hram and test_signing * deleted 2 rpc function (is_confirmed & get_transaction_block_number) * deleted get_raw_transaction_hex * deleted get_raw_transaction_info * deleted new_address * deleted test_mempool_accept * updated remove(0) to remove(index) * deleted ger_raw_transaction * deleted RawTx trait and converted type to Transaction * reverted raw_hex feature back * added NotEnoughFunds to CoinError * changed Sighash to all * removed lifetime of RpcParams * changed pub to pub(crate) & changed sig_hash line * changed taproot_key_spend_signature_hash to internal * added Clone to RpcError & deleted get_utxo_for * changed to_hex to as_bytes for weight calculation * updated SpendableOutput * deleted unnecessary parentheses * updated serialize of Output s id field * deleted unused crate & added lazy_static * updated RPC init function * added lazy_static for TAG_HASH & updated imported crates * changed get_block_index to get_block_number * deleted get_block_info * updated get_height to get_latest_block_number * removed GetBlockWithDetailResult and get_block_with_transactions * deleted unnecessary imports from rpc_helper * removed lock and unlock_unspent * deleted get_transactions and get_transaction and renamed get_raw_transaction to get_transaction * updated opt_into_json * changed payment_address and amount to output_script and amount for transcript * refactored error logic for rpc & deleted anyhow crate * added a dedicated file for json helper functions * refactored imports and deleted unused code * added clippy::non_snake_case * removed unused Error items * added new line to Cargo * rekmoved Block and used bitcoin::Block direcetly * removed added println and futures.len check * removed HashMap from coin mod.rs * updated Testnet to Regtest * removed unnecessary variable * updated as_str to & * removed RawTx trait * added newline * changed test transaction to p2pkh * updated test_send * updated test_send * updated test_send * reformatted bitcoin processor * moved sighash logic into signmachine * removed generate_to_address * added test_address function to bitcoin processor * updated RpcResponse to enum and added Clone trait * removed old RpcResponse * updated shared_key to internal_key * updated fee part * updated test_send block logic * added a test function for getting spendables * updated tweaking keys logic * updated calculate_weight logic * added todo for BitcoinSchnorr Algorithm * updated calculate_weight * updated calculate_weight * updated calculate_weight * added a TODO for bitcoin's signing process * removed unused code * Finish merging develop * cargo fmt * cargo machete * Handle most clippy lints on bitcoin Doesn't handle the unused transcript due to pending cryptographic considerations. * Rearrange imports and clippy tests * Misc processor lint * Update deny.toml * Remove unnecessary RPC code * updated test_send * added bitcoin ci & updated test-dependencies yml * fixed bitcoin ci * updated bitcoin ci yml * Remove mining from the bitcoin/monero docker files The tests should control block production in order to test various circumstances. The automatic mining disrupts assumptions made in testing. Since we're now using the Bitcoin docker container for testing... * Multiple fixes to the Bitcoin processor Doesn't unwrap on RPC errors. Returns the expected connection error. Fee calculation has a random - 1. This has been removed. Supports the change address being an Option, as it is. This should not have been blindly unwrapped. * Remove unnecessary RPC code * Further RPC simplifications * Simplify Bitcoin action It should not be mining. * cargo fmt * Finish RPC simplifications * Run bitcoind as a daemon * Remove the requirement on txindex Saves tens of GB. Also has attempt_send no longer return a list of outputs. That's incompatible with this and only relevant to old scheduling designs. * Remove number from Bitcoin SignableTransaction Monero requires the current block number for decoy selection. Bitcoin doesn't have a use. * Ban coinbase transactions These are burdened by maturity, so it's critically flawed to support them. This causes the test_send function to fail as its working was premised on a coinbase output. While it does make an actual output, it had insufficient funds for the test's expectations due to regtest halving every 150 blocks. In order to workaround this, the test will invalidate any existing chain, offering a fresh start. Also removes test_get_spendables and simplifies test_send. * Various simplifications Modifies SpendableOutput further to not require RPC calls at time of sign. Removes the need to have get_transaction in the RPC. * Clean prepare_send * Update the Bitcoin TransactionMachine to output a Transaction * Bitcoin TransactionMachine simplifications * Update XOnly key handling * Use a single sighash cache * Move tweak_keys * Remove unnecessary PSBT sets * Restore removed newlines * Other newlines * Replace calculate_weight's custom math with a dummy TX serialize * Move BTC TX construction code from processor to bitcoin * Rename transactions.rs to wallet.rs * Remove unused crate * Note TODO * Clean bitcoin signature test * Make unit test out of BTC FROST signing test * Final lint * Remove usage of PartiallySignedTransaction --------- Co-authored-by: Luke Parker <lukeparker5132@gmail.com>
386 lines
12 KiB
Rust
386 lines
12 KiB
Rust
use std::collections::HashMap;
|
|
|
|
use rand_core::OsRng;
|
|
|
|
use group::GroupEncoding;
|
|
|
|
use transcript::{Transcript, RecommendedTranscript};
|
|
use frost::{
|
|
curve::{Ciphersuite, Curve},
|
|
FrostError, ThresholdKeys,
|
|
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine},
|
|
};
|
|
|
|
use crate::{
|
|
coin::{CoinError, Output, Coin},
|
|
SignError, Network,
|
|
};
|
|
|
|
pub struct WalletKeys<C: Curve> {
|
|
keys: ThresholdKeys<C>,
|
|
creation_block: usize,
|
|
}
|
|
|
|
impl<C: Curve> WalletKeys<C> {
|
|
pub fn new(keys: ThresholdKeys<C>, creation_block: usize) -> WalletKeys<C> {
|
|
WalletKeys { keys, creation_block }
|
|
}
|
|
|
|
// Bind this key to a specific network by applying an additive offset
|
|
// While it would be fine to just C::ID, including the group key creates distinct
|
|
// offsets instead of static offsets. Under a statically offset system, a BTC key could
|
|
// have X subtracted to find the potential group key, and then have Y added to find the
|
|
// potential ETH group key. While this shouldn't be an issue, as this isn't a private
|
|
// system, there are potentially other benefits to binding this to a specific group key
|
|
// It's no longer possible to influence group key gen to key cancel without breaking the hash
|
|
// function as well, although that degree of influence means key gen is broken already
|
|
fn bind(&self, chain: &[u8]) -> ThresholdKeys<C> {
|
|
const DST: &[u8] = b"Serai Processor Wallet Chain Bind";
|
|
let mut transcript = RecommendedTranscript::new(DST);
|
|
transcript.append_message(b"chain", chain);
|
|
transcript.append_message(b"curve", C::ID);
|
|
transcript.append_message(b"group_key", self.keys.group_key().to_bytes());
|
|
self.keys.offset(<C as Ciphersuite>::hash_to_F(DST, &transcript.challenge(b"offset")))
|
|
}
|
|
}
|
|
|
|
pub trait CoinDb {
|
|
// Set a block as scanned to
|
|
fn scanned_to_block(&mut self, block: usize);
|
|
// Acknowledge a specific block number as part of a canonical block
|
|
fn acknowledge_block(&mut self, canonical: usize, block: usize);
|
|
|
|
// Adds an output to the DB. Returns false if the output was already added
|
|
fn add_output<O: Output>(&mut self, output: &O) -> bool;
|
|
|
|
// Block this coin has been scanned to (inclusive)
|
|
fn scanned_block(&self) -> usize;
|
|
// Acknowledged block for a given canonical block
|
|
fn acknowledged_block(&self, canonical: usize) -> usize;
|
|
}
|
|
|
|
pub struct MemCoinDb {
|
|
// Block number of the block this coin has been scanned to
|
|
scanned_block: usize,
|
|
// Acknowledged block for a given canonical block
|
|
acknowledged_blocks: HashMap<usize, usize>,
|
|
outputs: HashMap<Vec<u8>, Vec<u8>>,
|
|
}
|
|
|
|
impl MemCoinDb {
|
|
pub fn new() -> MemCoinDb {
|
|
MemCoinDb { scanned_block: 0, acknowledged_blocks: HashMap::new(), outputs: HashMap::new() }
|
|
}
|
|
}
|
|
|
|
impl CoinDb for MemCoinDb {
|
|
fn scanned_to_block(&mut self, block: usize) {
|
|
self.scanned_block = block;
|
|
}
|
|
|
|
fn acknowledge_block(&mut self, canonical: usize, block: usize) {
|
|
debug_assert!(!self.acknowledged_blocks.contains_key(&canonical));
|
|
self.acknowledged_blocks.insert(canonical, block);
|
|
}
|
|
|
|
fn add_output<O: Output>(&mut self, output: &O) -> bool {
|
|
// This would be insecure as we're indexing by ID and this will replace the output as a whole
|
|
// Multiple outputs may have the same ID in edge cases such as Monero, where outputs are ID'd
|
|
// by output key, not by hash + index
|
|
// self.outputs.insert(output.id(), output).is_some()
|
|
let id = output.id().as_ref().to_vec();
|
|
if self.outputs.contains_key(&id) {
|
|
return false;
|
|
}
|
|
self.outputs.insert(id, output.serialize());
|
|
true
|
|
}
|
|
|
|
fn scanned_block(&self) -> usize {
|
|
self.scanned_block
|
|
}
|
|
|
|
fn acknowledged_block(&self, canonical: usize) -> usize {
|
|
self.acknowledged_blocks[&canonical]
|
|
}
|
|
}
|
|
|
|
fn select_inputs<C: Coin>(inputs: &mut Vec<C::Output>) -> (Vec<C::Output>, u64) {
|
|
// Sort to ensure determinism. Inefficient, yet produces the most legible code to be optimized
|
|
// later
|
|
inputs.sort_by_key(|a| a.amount());
|
|
|
|
// Select the maximum amount of outputs possible
|
|
let res = inputs.split_off(inputs.len() - C::MAX_INPUTS.min(inputs.len()));
|
|
// Calculate their sum value, minus the fee needed to spend them
|
|
let sum = res.iter().map(|input| input.amount()).sum();
|
|
// sum -= C::MAX_FEE; // TODO
|
|
(res, sum)
|
|
}
|
|
|
|
fn select_outputs<C: Coin>(
|
|
payments: &mut Vec<(C::Address, u64)>,
|
|
value: &mut u64,
|
|
) -> Vec<(C::Address, u64)> {
|
|
// Prioritize large payments which will most efficiently use large inputs
|
|
payments.sort_by(|a, b| a.1.cmp(&b.1));
|
|
|
|
// Grab the payments this will successfully fund
|
|
let mut outputs = vec![];
|
|
let mut p = payments.len();
|
|
while p != 0 {
|
|
p -= 1;
|
|
if *value >= payments[p].1 {
|
|
*value -= payments[p].1;
|
|
// Swap remove will either pop the tail or insert an element that wouldn't fit, making it
|
|
// always safe to move past
|
|
outputs.push(payments.swap_remove(p));
|
|
}
|
|
// Doesn't break in this else case as a smaller payment may still fit
|
|
}
|
|
|
|
outputs
|
|
}
|
|
|
|
// Optimizes on the expectation selected/inputs are sorted from lowest value to highest
|
|
fn refine_inputs<C: Coin>(
|
|
selected: &mut Vec<C::Output>,
|
|
inputs: &mut Vec<C::Output>,
|
|
mut remaining: u64,
|
|
) {
|
|
// Drop unused inputs
|
|
let mut s = 0;
|
|
while remaining > selected[s].amount() {
|
|
remaining -= selected[s].amount();
|
|
s += 1;
|
|
}
|
|
// Add them back to the inputs pool
|
|
inputs.extend(selected.drain(.. s));
|
|
|
|
// Replace large inputs with smaller ones
|
|
for s in (0 .. selected.len()).rev() {
|
|
for input in inputs.iter_mut() {
|
|
// Doesn't break due to inputs no longer being sorted
|
|
// This could be made faster if we prioritized small input usage over transaction size/fees
|
|
// TODO: Consider. This would implicitly consolidate inputs which would be advantageous
|
|
if selected[s].amount() < input.amount() {
|
|
continue;
|
|
}
|
|
|
|
// If we can successfully replace this input, do so
|
|
let diff = selected[s].amount() - input.amount();
|
|
if remaining > diff {
|
|
remaining -= diff;
|
|
|
|
let old = selected[s].clone();
|
|
selected[s] = input.clone();
|
|
*input = old;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[allow(clippy::type_complexity)]
|
|
fn select_inputs_outputs<C: Coin>(
|
|
inputs: &mut Vec<C::Output>,
|
|
outputs: &mut Vec<(C::Address, u64)>,
|
|
) -> (Vec<C::Output>, Vec<(C::Address, u64)>) {
|
|
if inputs.is_empty() {
|
|
return (vec![], vec![]);
|
|
}
|
|
|
|
let (mut selected, mut value) = select_inputs::<C>(inputs);
|
|
|
|
let outputs = select_outputs::<C>(outputs, &mut value);
|
|
if outputs.is_empty() {
|
|
inputs.extend(selected);
|
|
return (vec![], vec![]);
|
|
}
|
|
|
|
refine_inputs::<C>(&mut selected, inputs, value);
|
|
(selected, outputs)
|
|
}
|
|
|
|
#[allow(clippy::type_complexity)]
|
|
pub struct Wallet<D: CoinDb, C: Coin> {
|
|
db: D,
|
|
coin: C,
|
|
keys: Vec<(ThresholdKeys<C::Curve>, Vec<C::Output>)>,
|
|
pending: Vec<(usize, ThresholdKeys<C::Curve>)>,
|
|
}
|
|
|
|
impl<D: CoinDb, C: Coin> Wallet<D, C> {
|
|
pub fn new(db: D, coin: C) -> Wallet<D, C> {
|
|
Wallet { db, coin, keys: vec![], pending: vec![] }
|
|
}
|
|
|
|
pub fn scanned_block(&self) -> usize {
|
|
self.db.scanned_block()
|
|
}
|
|
pub fn acknowledge_block(&mut self, canonical: usize, block: usize) {
|
|
self.db.acknowledge_block(canonical, block);
|
|
}
|
|
pub fn acknowledged_block(&self, canonical: usize) -> usize {
|
|
self.db.acknowledged_block(canonical)
|
|
}
|
|
|
|
pub fn add_keys(&mut self, keys: &WalletKeys<C::Curve>) {
|
|
let creation_block = keys.creation_block;
|
|
let mut keys = keys.bind(C::ID);
|
|
self.coin.tweak_keys(&mut keys);
|
|
self.pending.push((self.acknowledged_block(creation_block), keys));
|
|
}
|
|
|
|
pub fn address(&self) -> C::Address {
|
|
self.coin.address(self.keys[self.keys.len() - 1].0.group_key())
|
|
}
|
|
|
|
pub async fn poll(&mut self) -> Result<(), CoinError> {
|
|
if self.coin.get_latest_block_number().await? < (C::CONFIRMATIONS - 1) {
|
|
return Ok(());
|
|
}
|
|
let confirmed_block = self.coin.get_latest_block_number().await? - (C::CONFIRMATIONS - 1);
|
|
|
|
// Will never scan the genesis block, which shouldn't be an issue
|
|
for b in (self.scanned_block() + 1) ..= confirmed_block {
|
|
// If any keys activated at this block, shift them over
|
|
{
|
|
let mut k = 0;
|
|
while k < self.pending.len() {
|
|
// TODO
|
|
//if b < self.pending[k].0 {
|
|
//} else if b == self.pending[k].0 {
|
|
if b <= self.pending[k].0 {
|
|
self.keys.push((self.pending.swap_remove(k).1, vec![]));
|
|
} else {
|
|
k += 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
let block = self.coin.get_block(b).await?;
|
|
for (keys, outputs) in self.keys.iter_mut() {
|
|
outputs.extend(
|
|
self
|
|
.coin
|
|
.get_outputs(&block, keys.group_key())
|
|
.await?
|
|
.drain(..)
|
|
.filter(|output| self.db.add_output(output)),
|
|
);
|
|
}
|
|
|
|
self.db.scanned_to_block(b);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
// This should be called whenever new outputs are received, meaning there was a new block
|
|
// If these outputs were received and sent to Substrate, it should be called after they're
|
|
// included in a block and we have results to act on
|
|
// If these outputs weren't sent to Substrate (change), it should be called immediately
|
|
// with all payments still queued from the last call
|
|
pub async fn prepare_sends(
|
|
&mut self,
|
|
canonical: usize,
|
|
mut payments: Vec<(C::Address, u64)>,
|
|
fee: C::Fee,
|
|
) -> Result<(Vec<(C::Address, u64)>, Vec<C::SignableTransaction>), CoinError> {
|
|
if payments.is_empty() {
|
|
return Ok((vec![], vec![]));
|
|
}
|
|
|
|
let acknowledged_block = self.acknowledged_block(canonical);
|
|
|
|
// TODO: Log schedule outputs when MAX_OUTPUTS is lower than payments.len()
|
|
// Payments is the first set of TXs in the schedule
|
|
// As each payment re-appears, let mut payments = schedule[payment] where the only input is
|
|
// the source payment
|
|
// let (mut payments, schedule) = schedule(payments);
|
|
|
|
let mut txs = vec![];
|
|
for (keys, outputs) in self.keys.iter_mut() {
|
|
while !outputs.is_empty() {
|
|
let (inputs, outputs) = select_inputs_outputs::<C>(outputs, &mut payments);
|
|
// If we can no longer process any payments, move to the next set of keys
|
|
if outputs.is_empty() {
|
|
debug_assert_eq!(inputs.len(), 0);
|
|
break;
|
|
}
|
|
|
|
// Create the transcript for this transaction
|
|
let mut transcript = RecommendedTranscript::new(b"Serai Processor Wallet Send");
|
|
transcript
|
|
.append_message(b"canonical_block", u64::try_from(canonical).unwrap().to_le_bytes());
|
|
transcript.append_message(
|
|
b"acknowledged_block",
|
|
u64::try_from(acknowledged_block).unwrap().to_le_bytes(),
|
|
);
|
|
transcript.append_message(b"index", u64::try_from(txs.len()).unwrap().to_le_bytes());
|
|
|
|
let tx = self
|
|
.coin
|
|
.prepare_send(
|
|
keys.clone(),
|
|
transcript,
|
|
acknowledged_block,
|
|
inputs,
|
|
&outputs,
|
|
Some(keys.group_key()),
|
|
fee,
|
|
)
|
|
.await?;
|
|
// self.db.save_tx(tx) // TODO
|
|
txs.push(tx);
|
|
}
|
|
}
|
|
|
|
Ok((payments, txs))
|
|
}
|
|
|
|
pub async fn attempt_send<N: Network>(
|
|
&mut self,
|
|
network: &mut N,
|
|
prepared: C::SignableTransaction,
|
|
) -> Result<Vec<u8>, SignError> {
|
|
let attempt = self.coin.attempt_send(prepared).await.map_err(SignError::CoinError)?;
|
|
|
|
let (attempt, commitments) = attempt.preprocess(&mut OsRng);
|
|
let commitments = network
|
|
.round(commitments.serialize())
|
|
.await
|
|
.map_err(SignError::NetworkError)?
|
|
.drain()
|
|
.map(|(validator, preprocess)| {
|
|
Ok((
|
|
validator,
|
|
attempt
|
|
.read_preprocess::<&[u8]>(&mut preprocess.as_ref())
|
|
.map_err(|_| SignError::FrostError(FrostError::InvalidPreprocess(validator)))?,
|
|
))
|
|
})
|
|
.collect::<Result<HashMap<_, _>, _>>()?;
|
|
|
|
let (attempt, share) = attempt.sign(commitments, b"").map_err(SignError::FrostError)?;
|
|
let shares = network
|
|
.round(share.serialize())
|
|
.await
|
|
.map_err(SignError::NetworkError)?
|
|
.drain()
|
|
.map(|(validator, share)| {
|
|
Ok((
|
|
validator,
|
|
attempt
|
|
.read_share::<&[u8]>(&mut share.as_ref())
|
|
.map_err(|_| SignError::FrostError(FrostError::InvalidShare(validator)))?,
|
|
))
|
|
})
|
|
.collect::<Result<HashMap<_, _>, _>>()?;
|
|
|
|
let tx = attempt.complete(shares).map_err(SignError::FrostError)?;
|
|
|
|
self.coin.publish_transaction(&tx).await.map_err(SignError::CoinError)
|
|
}
|
|
}
|