22 Commits

Author SHA1 Message Date
Luke Parker
f75fe67493 Remove accidentally committed ETH files 2024-06-24 09:44:01 -04:00
Luke Parker
28edc6615e Finish merging develop 2024-06-24 09:40:13 -04:00
Luke Parker
ad70bce9f0 Merge branch 'develop' into HEAD 2024-06-24 07:51:44 -04:00
akildemir
3e99d68cfe fix total allocated stake update in wrong time (#518)
* fix total allocated stake update in wrong time

* Restore mid-set increases

* Correct typo I introduced

---------

Co-authored-by: Luke Parker <lukeparker5132@gmail.com>
2024-06-24 07:41:25 -04:00
akildemir
4d9c2df38c Add coordinator rotation test (#535)
* add node side unit test

* complete rotation test for all networks

* set up the fast-epoch docker file

* fix pr comments

* add coordinator side rotation test

* bug fixes

* Remove EPOCH_INTERVAL

* Minor nits

* Add note on origin of publish_tx function in tests/coordinator

* Correct ThresholdParams assert_eq

* fmt

* Correct detection of handover completion

* Restore key gen message match from develop

It was modified in response to the handover completion bug, which has now been
resolved.

* bug fixes

* Correct invalid constant

* Typo fixes

* remove selecting participant to remove at random

---------

Co-authored-by: Luke Parker <lukeparker5132@gmail.com>
2024-06-21 08:39:17 -04:00
Luke Parker
8ab6f9c36e alloy 0.1 2024-06-19 12:39:47 -04:00
Luke Parker
253cf3253d Correct hash for 1.79.0-slim-bookworm docker image 2024-06-13 19:00:01 -04:00
Luke Parker
03445b3020 Update httparse, as 1.9.2 was yanked 2024-06-13 16:49:58 -04:00
Luke Parker
9af111b4aa Rust 1.79, cargo update 2024-06-13 15:57:08 -04:00
Luke Parker
41ce5b1738 Use the serai_abi::Call in the actual Transaction type
We prior required they had the same encoding, yet this ensures they do by
making them one and the same. This does require an large, ugly, From/TryInto
block which is deemed preferable for moving this more and more into syntax
(from semantics).

Further improvements (notably re: Extra) is possible, and this already lets us
strip some members from the Call enum.
2024-06-03 23:38:22 -04:00
Luke Parker
2a05cf3225 June 2024 nightly update
Replaces #571.
2024-06-01 21:46:49 -04:00
Luke Parker
f4147c39b2 bitcoin 0.32.1 2024-05-31 01:02:43 -04:00
rlking
cd69f3b9d6 Check if wasm was built by container exit code and state instead of local mountpoint (#570)
* Check if the serai wasm was built successfully by verifying the build container's status code and state, instead of checking the volume mountpoint locally

* Use a log statement for which wasm is used

* Minor typo fix

---------

Co-authored-by: Luke Parker <lukeparker5132@gmail.com>
2024-05-25 20:33:23 -04:00
Luke Parker
1d2beb3ee4 Ethereum relayer server
Causes send test to pass for the processor.
2024-05-22 18:50:11 -04:00
Luke Parker
ac709b2945 Correct processor docker tests encoding of Bitcoin addresses in OutInstructions 2024-05-21 08:49:57 -04:00
Luke Parker
a473800c26 More aggressive cargo update
Adds a few deps which are fine. Patches an old parking_lot(_core) version.
2024-05-21 08:07:32 -04:00
Luke Parker
09aac20293 Set the BufReader capacity to 0
Fixes issues with bitcoin.

We only use a BufReader as it's the only way to use a std::io::Read generic as
a bitcoin::io::Read object.
2024-05-21 07:06:13 -04:00
Luke Parker
f93214012d Use ScriptBuf over Address where possible 2024-05-21 06:44:59 -04:00
Luke Parker
400319cd29 cargo update
Also updates our gems
2024-05-21 06:09:04 -04:00
Luke Parker
a0a7d63dad bitcoin 0.32 2024-05-21 05:27:01 -04:00
Luke Parker
fb7d12ee6e Short-circuit test_no_deadlock_in_multisig_completed if preconditions not met 2024-05-21 03:20:44 -04:00
Luke Parker
11ec9e3535 Ethereum processor docker tests, barring send
We need the TX publication relay thingy for send to work (though that is the
point the test fails at).
2024-05-21 00:29:33 -04:00
137 changed files with 3212 additions and 3192 deletions

View File

@@ -1 +1 @@
nightly-2024-05-01 nightly-2024-06-01

1318
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,8 @@
resolver = "2" resolver = "2"
members = [ members = [
# Version patches # Version patches
"patches/parking_lot_core",
"patches/parking_lot",
"patches/zstd", "patches/zstd",
"patches/rocksdb", "patches/rocksdb",
"patches/proc-macro-crate", "patches/proc-macro-crate",
@@ -36,8 +38,11 @@ members = [
"crypto/schnorrkel", "crypto/schnorrkel",
"coins/bitcoin", "coins/bitcoin",
"coins/ethereum/alloy-simple-request-transport", "coins/ethereum/alloy-simple-request-transport",
"coins/ethereum", "coins/ethereum",
"coins/ethereum/relayer",
"coins/monero/generators", "coins/monero/generators",
"coins/monero", "coins/monero",
@@ -112,6 +117,8 @@ lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s # Needed due to dockertest's usage of `Rc`s when we need `Arc`s
dockertest = { git = "https://github.com/orcalabs/dockertest-rs", rev = "4dd6ae24738aa6dc5c89444cc822ea4745517493" } dockertest = { git = "https://github.com/orcalabs/dockertest-rs", rev = "4dd6ae24738aa6dc5c89444cc822ea4745517493" }
parking_lot_core = { path = "patches/parking_lot_core" }
parking_lot = { path = "patches/parking_lot" }
# wasmtime pulls in an old version for this # wasmtime pulls in an old version for this
zstd = { path = "patches/zstd" } zstd = { path = "patches/zstd" }
# Needed for WAL compression # Needed for WAL compression

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin" repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -23,7 +23,7 @@ thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false } zeroize = { version = "^1.5", default-features = false }
rand_core = { version = "0.6", default-features = false } rand_core = { version = "0.6", default-features = false }
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] } bitcoin = { version = "0.32", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] } k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
@@ -36,7 +36,7 @@ serde_json = { version = "1", default-features = false, optional = true }
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true } simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
[dev-dependencies] [dev-dependencies]
secp256k1 = { version = "0.28", default-features = false, features = ["std"] } secp256k1 = { version = "0.29", default-features = false, features = ["std"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] } frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }

View File

@@ -195,13 +195,13 @@ impl Rpc {
// If this was already successfully published, consider this having succeeded // If this was already successfully published, consider this having succeeded
if let RpcError::RequestError(Error { code, .. }) = e { if let RpcError::RequestError(Error { code, .. }) = e {
if code == RPC_VERIFY_ALREADY_IN_CHAIN { if code == RPC_VERIFY_ALREADY_IN_CHAIN {
return Ok(tx.txid()); return Ok(tx.compute_txid());
} }
} }
Err(e)? Err(e)?
} }
}; };
if txid != tx.txid() { if txid != tx.compute_txid() {
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?; Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
} }
Ok(txid) Ok(txid)
@@ -215,7 +215,7 @@ impl Rpc {
let tx: Transaction = encode::deserialize(&bytes) let tx: Transaction = encode::deserialize(&bytes)
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?; .map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array(); let mut tx_hash = *tx.compute_txid().as_raw_hash().as_byte_array();
tx_hash.reverse(); tx_hash.reverse();
if hash != &tx_hash { if hash != &tx_hash {
Err(RpcError::InvalidResponse("node replied with a different transaction"))?; Err(RpcError::InvalidResponse("node replied with a different transaction"))?;

View File

@@ -39,7 +39,7 @@ fn test_algorithm() {
.verify_schnorr( .verify_schnorr(
&Signature::from_slice(&sig) &Signature::from_slice(&sig)
.expect("couldn't convert produced signature to secp256k1::Signature"), .expect("couldn't convert produced signature to secp256k1::Signature"),
&Message::from(Hash::hash(MESSAGE)), &Message::from_digest_slice(Hash::hash(MESSAGE).as_ref()).unwrap(),
&x_only(&keys[&Participant::new(1).unwrap()].group_key()), &x_only(&keys[&Participant::new(1).unwrap()].group_key()),
) )
.unwrap() .unwrap()

View File

@@ -4,7 +4,7 @@ use std_shims::{
io::{self, Write}, io::{self, Write},
}; };
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std_shims::io::Read; use std::io::{Read, BufReader};
use k256::{ use k256::{
elliptic_curve::sec1::{Tag, ToEncodedPoint}, elliptic_curve::sec1::{Tag, ToEncodedPoint},
@@ -18,8 +18,8 @@ use frost::{
}; };
use bitcoin::{ use bitcoin::{
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf, consensus::encode::serialize, key::TweakedPublicKey, OutPoint, ScriptBuf, TxOut, Transaction,
TxOut, Transaction, Block, Block,
}; };
#[cfg(feature = "std")] #[cfg(feature = "std")]
use bitcoin::consensus::encode::Decodable; use bitcoin::consensus::encode::Decodable;
@@ -46,12 +46,12 @@ pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
/// Return the Taproot address payload for a public key. /// Return the Taproot address payload for a public key.
/// ///
/// If the key is odd, this will return None. /// If the key is odd, this will return None.
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> { pub fn p2tr_script_buf(key: ProjectivePoint) -> Option<ScriptBuf> {
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY { if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
return None; return None;
} }
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)))) Some(ScriptBuf::new_p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
} }
/// A spendable output. /// A spendable output.
@@ -89,11 +89,17 @@ impl ReceivedOutput {
/// Read a ReceivedOutput from a generic satisfying Read. /// Read a ReceivedOutput from a generic satisfying Read.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> { pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
Ok(ReceivedOutput { let offset = Secp256k1::read_F(r)?;
offset: Secp256k1::read_F(r)?, let output;
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?, let outpoint;
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?, {
}) let mut buf_r = BufReader::with_capacity(0, r);
output =
TxOut::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid TxOut"))?;
outpoint =
OutPoint::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid OutPoint"))?;
}
Ok(ReceivedOutput { offset, output, outpoint })
} }
/// Write a ReceivedOutput to a generic satisfying Write. /// Write a ReceivedOutput to a generic satisfying Write.
@@ -124,7 +130,7 @@ impl Scanner {
/// Returns None if this key can't be scanned for. /// Returns None if this key can't be scanned for.
pub fn new(key: ProjectivePoint) -> Option<Scanner> { pub fn new(key: ProjectivePoint) -> Option<Scanner> {
let mut scripts = HashMap::new(); let mut scripts = HashMap::new();
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO); scripts.insert(p2tr_script_buf(key)?, Scalar::ZERO);
Some(Scanner { key, scripts }) Some(Scanner { key, scripts })
} }
@@ -141,9 +147,8 @@ impl Scanner {
// chance of being even // chance of being even
// That means this should terminate within a very small amount of iterations // That means this should terminate within a very small amount of iterations
loop { loop {
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) { match p2tr_script_buf(self.key + (ProjectivePoint::GENERATOR * offset)) {
Some(address) => { Some(script) => {
let script = address.script_pubkey();
if self.scripts.contains_key(&script) { if self.scripts.contains_key(&script) {
None?; None?;
} }
@@ -166,7 +171,7 @@ impl Scanner {
res.push(ReceivedOutput { res.push(ReceivedOutput {
offset: *offset, offset: *offset,
output: output.clone(), output: output.clone(),
outpoint: OutPoint::new(tx.txid(), vout), outpoint: OutPoint::new(tx.compute_txid(), vout),
}); });
} }
} }

View File

@@ -18,12 +18,12 @@ use bitcoin::{
absolute::LockTime, absolute::LockTime,
script::{PushBytesBuf, ScriptBuf}, script::{PushBytesBuf, ScriptBuf},
transaction::{Version, Transaction}, transaction::{Version, Transaction},
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address, OutPoint, Sequence, Witness, TxIn, Amount, TxOut,
}; };
use crate::{ use crate::{
crypto::Schnorr, crypto::Schnorr,
wallet::{ReceivedOutput, address_payload}, wallet::{ReceivedOutput, p2tr_script_buf},
}; };
#[rustfmt::skip] #[rustfmt::skip]
@@ -61,7 +61,11 @@ pub struct SignableTransaction {
} }
impl SignableTransaction { impl SignableTransaction {
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 { fn calculate_weight(
inputs: usize,
payments: &[(ScriptBuf, u64)],
change: Option<&ScriptBuf>,
) -> u64 {
// Expand this a full transaction in order to use the bitcoin library's weight function // Expand this a full transaction in order to use the bitcoin library's weight function
let mut tx = Transaction { let mut tx = Transaction {
version: Version(2), version: Version(2),
@@ -86,14 +90,14 @@ impl SignableTransaction {
// The script pub key is not of a fixed size and does have to be used here // The script pub key is not of a fixed size and does have to be used here
.map(|payment| TxOut { .map(|payment| TxOut {
value: Amount::from_sat(payment.1), value: Amount::from_sat(payment.1),
script_pubkey: payment.0.script_pubkey(), script_pubkey: payment.0.clone(),
}) })
.collect(), .collect(),
}; };
if let Some(change) = change { if let Some(change) = change {
// Use a 0 value since we're currently unsure what the change amount will be, and since // Use a 0 value since we're currently unsure what the change amount will be, and since
// the value is fixed size (so any value could be used here) // the value is fixed size (so any value could be used here)
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() }); tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() });
} }
u64::from(tx.weight()) u64::from(tx.weight())
} }
@@ -121,8 +125,8 @@ impl SignableTransaction {
/// If data is specified, an OP_RETURN output will be added with it. /// If data is specified, an OP_RETURN output will be added with it.
pub fn new( pub fn new(
mut inputs: Vec<ReceivedOutput>, mut inputs: Vec<ReceivedOutput>,
payments: &[(Address, u64)], payments: &[(ScriptBuf, u64)],
change: Option<&Address>, change: Option<ScriptBuf>,
data: Option<Vec<u8>>, data: Option<Vec<u8>>,
fee_per_weight: u64, fee_per_weight: u64,
) -> Result<SignableTransaction, TransactionError> { ) -> Result<SignableTransaction, TransactionError> {
@@ -159,10 +163,7 @@ impl SignableTransaction {
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>(); let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
let mut tx_outs = payments let mut tx_outs = payments
.iter() .iter()
.map(|payment| TxOut { .map(|payment| TxOut { value: Amount::from_sat(payment.1), script_pubkey: payment.0.clone() })
value: Amount::from_sat(payment.1),
script_pubkey: payment.0.script_pubkey(),
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// Add the OP_RETURN output // Add the OP_RETURN output
@@ -213,12 +214,11 @@ impl SignableTransaction {
// If there's a change address, check if there's change to give it // If there's a change address, check if there's change to give it
if let Some(change) = change { if let Some(change) = change {
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change)); let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(&change));
let fee_with_change = fee_per_weight * weight_with_change; let fee_with_change = fee_per_weight * weight_with_change;
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) { if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
if value >= DUST { if value >= DUST {
tx_outs tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change });
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
weight = weight_with_change; weight = weight_with_change;
needed_fee = fee_with_change; needed_fee = fee_with_change;
} }
@@ -248,7 +248,7 @@ impl SignableTransaction {
/// Returns the TX ID of the transaction this will create. /// Returns the TX ID of the transaction this will create.
pub fn txid(&self) -> [u8; 32] { pub fn txid(&self) -> [u8; 32] {
let mut res = self.tx.txid().to_byte_array(); let mut res = self.tx.compute_txid().to_byte_array();
res.reverse(); res.reverse();
res res
} }
@@ -288,7 +288,7 @@ impl SignableTransaction {
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes()); transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
let offset = keys.clone().offset(self.offsets[i]); let offset = keys.clone().offset(self.offsets[i]);
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey { if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey {
None?; None?;
} }

View File

@@ -22,11 +22,10 @@ use bitcoin_serai::{
hashes::Hash as HashTrait, hashes::Hash as HashTrait,
blockdata::opcodes::all::OP_RETURN, blockdata::opcodes::all::OP_RETURN,
script::{PushBytesBuf, Instruction, Instructions, Script}, script::{PushBytesBuf, Instruction, Instructions, Script},
address::NetworkChecked,
OutPoint, Amount, TxOut, Transaction, Network, Address, OutPoint, Amount, TxOut, Transaction, Network, Address,
}, },
wallet::{ wallet::{
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction, tweak_keys, p2tr_script_buf, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
}, },
rpc::Rpc, rpc::Rpc,
}; };
@@ -48,7 +47,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
"generatetoaddress", "generatetoaddress",
serde_json::json!([ serde_json::json!([
1, 1,
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()) Address::from_script(&p2tr_script_buf(key).unwrap(), Network::Regtest).unwrap()
]), ]),
) )
.await .await
@@ -69,7 +68,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0])); assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
assert_eq!(outputs.len(), 1); assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0)); assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].compute_txid(), 0));
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat()); assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
assert_eq!( assert_eq!(
@@ -193,7 +192,7 @@ async_sequential! {
assert_eq!(output.offset(), Scalar::ZERO); assert_eq!(output.offset(), Scalar::ZERO);
let inputs = vec![output]; let inputs = vec![output];
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()); let addr = || p2tr_script_buf(key).unwrap();
let payments = vec![(addr(), 1000)]; let payments = vec![(addr(), 1000)];
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
@@ -206,7 +205,7 @@ async_sequential! {
// No change // No change
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
// Consolidation TX // Consolidation TX
assert!(SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
// Data // Data
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
// No outputs // No outputs
@@ -229,7 +228,7 @@ async_sequential! {
); );
assert_eq!( assert_eq!(
SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, 0), SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0),
Err(TransactionError::TooLowFee), Err(TransactionError::TooLowFee),
); );
@@ -261,20 +260,19 @@ async_sequential! {
// Declare payments, change, fee // Declare payments, change, fee
let payments = [ let payments = [
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005), (p2tr_script_buf(key).unwrap(), 1005),
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007) (p2tr_script_buf(offset_key).unwrap(), 1007)
]; ];
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap(); let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
let change_key = key + (ProjectivePoint::GENERATOR * change_offset); let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
let change_addr = let change_addr = p2tr_script_buf(change_key).unwrap();
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
// Create and sign the TX // Create and sign the TX
let tx = SignableTransaction::new( let tx = SignableTransaction::new(
vec![output.clone(), offset_output.clone()], vec![output.clone(), offset_output.clone()],
&payments, &payments,
Some(&change_addr), Some(change_addr.clone()),
None, None,
FEE FEE
).unwrap(); ).unwrap();
@@ -287,7 +285,7 @@ async_sequential! {
// Ensure we can scan it // Ensure we can scan it
let outputs = scanner.scan_transaction(&tx); let outputs = scanner.scan_transaction(&tx);
for (o, output) in outputs.iter().enumerate() { for (o, output) in outputs.iter().enumerate() {
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap())); assert_eq!(output.outpoint(), &OutPoint::new(tx.compute_txid(), u32::try_from(o).unwrap()));
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output); assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
} }
@@ -299,7 +297,7 @@ async_sequential! {
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) { for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
assert_eq!( assert_eq!(
output, output,
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) }, &TxOut { script_pubkey: payment.0.clone(), value: Amount::from_sat(payment.1) },
); );
assert_eq!(scanned.value(), payment.1 ); assert_eq!(scanned.value(), payment.1 );
} }
@@ -314,13 +312,13 @@ async_sequential! {
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee; input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
assert_eq!( assert_eq!(
tx.output[2], tx.output[2],
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) }, TxOut { script_pubkey: change_addr, value: Amount::from_sat(change_amount) },
); );
// This also tests send_raw_transaction and get_transaction, which the RPC test can't // This also tests send_raw_transaction and get_transaction, which the RPC test can't
// effectively test // effectively test
rpc.send_raw_transaction(&tx).await.unwrap(); rpc.send_raw_transaction(&tx).await.unwrap();
let mut hash = *tx.txid().as_raw_hash().as_byte_array(); let mut hash = *tx.compute_txid().as_raw_hash().as_byte_array();
hash.reverse(); hash.reverse();
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap()); assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
assert_eq!(expected_id, hash); assert_eq!(expected_id, hash);
@@ -344,7 +342,7 @@ async_sequential! {
&SignableTransaction::new( &SignableTransaction::new(
vec![output], vec![output],
&[], &[],
Some(&Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())), Some(p2tr_script_buf(key).unwrap()),
Some(data.clone()), Some(data.clone()),
FEE FEE
).unwrap() ).unwrap()

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021" edition = "2021"
publish = false publish = false
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -29,21 +29,21 @@ frost = { package = "modular-frost", path = "../../crypto/frost", default-featur
alloy-core = { version = "0.7", default-features = false } alloy-core = { version = "0.7", default-features = false }
alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] } alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] }
alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false, features = ["k256"] } alloy-consensus = { version = "0.1", default-features = false, features = ["k256"] }
alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-network = { version = "0.1", default-features = false }
alloy-rpc-types = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-rpc-types-eth = { version = "0.1", default-features = false }
alloy-rpc-client = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-rpc-client = { version = "0.1", default-features = false }
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false } alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-provider = { version = "0.1", default-features = false }
alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false, optional = true } alloy-node-bindings = { version = "0.1", default-features = false, optional = true }
[dev-dependencies] [dev-dependencies]
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] } frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] }
tokio = { version = "1", features = ["macros"] } tokio = { version = "1", features = ["macros"] }
alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-node-bindings = { version = "0.1", default-features = false }
[features] [features]
tests = ["alloy-node-bindings"] tests = ["alloy-node-bindings", "frost/tests"]

View File

@@ -21,8 +21,8 @@ tower = "0.4"
serde_json = { version = "1", default-features = false } serde_json = { version = "1", default-features = false }
simple-request = { path = "../../../common/request", default-features = false } simple-request = { path = "../../../common/request", default-features = false }
alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-json-rpc = { version = "0.1", default-features = false }
alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "b79db21734cffddc11753fe62ba571565c896f42", default-features = false } alloy-transport = { version = "0.1", default-features = false }
[features] [features]
default = ["tls"] default = ["tls"]

View File

@@ -0,0 +1,30 @@
[package]
name = "serai-ethereum-relayer"
version = "0.1.0"
description = "A relayer for Serai's Ethereum transactions"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/relayer"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
publish = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
log = { version = "0.4", default-features = false, features = ["std"] }
env_logger = { version = "0.10", default-features = false, features = ["humantime"] }
tokio = { version = "1", default-features = false, features = ["rt", "time", "io-util", "net", "macros"] }
serai-env = { path = "../../../common/env" }
serai-db = { path = "../../../common/db" }
[features]
parity-db = ["serai-db/parity-db"]
rocksdb = ["serai-db/rocksdb"]

View File

@@ -0,0 +1,15 @@
AGPL-3.0-only license
Copyright (c) 2023-2024 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@@ -0,0 +1,4 @@
# Ethereum Transaction Relayer
This server collects Ethereum router commands to be published, offering an RPC
to fetch them.

View File

@@ -0,0 +1,100 @@
pub(crate) use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
net::TcpListener,
};
use serai_db::{Get, DbTxn, Db as DbTrait};
#[tokio::main(flavor = "current_thread")]
async fn main() {
// Override the panic handler with one which will panic if any tokio task panics
{
let existing = std::panic::take_hook();
std::panic::set_hook(Box::new(move |panic| {
existing(panic);
const MSG: &str = "exiting the process due to a task panicking";
println!("{MSG}");
log::error!("{MSG}");
std::process::exit(1);
}));
}
if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", serai_env::var("RUST_LOG").unwrap_or_else(|| "info".to_string()));
}
env_logger::init();
log::info!("Starting Ethereum relayer server...");
// Open the DB
#[allow(unused_variables, unreachable_code)]
let db = {
#[cfg(all(feature = "parity-db", feature = "rocksdb"))]
panic!("built with parity-db and rocksdb");
#[cfg(all(feature = "parity-db", not(feature = "rocksdb")))]
let db =
serai_db::new_parity_db(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
#[cfg(feature = "rocksdb")]
let db =
serai_db::new_rocksdb(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
db
};
// Start command recipience server
// This should not be publicly exposed
// TODO: Add auth
tokio::spawn({
let db = db.clone();
async move {
// 5132 ^ ((b'E' << 8) | b'R')
let server = TcpListener::bind("0.0.0.0:20830").await.unwrap();
loop {
let (mut socket, _) = server.accept().await.unwrap();
let db = db.clone();
tokio::spawn(async move {
let mut db = db.clone();
loop {
let Ok(msg_len) = socket.read_u32_le().await else { break };
let mut buf = vec![0; usize::try_from(msg_len).unwrap()];
let Ok(_) = socket.read_exact(&mut buf).await else { break };
if buf.len() < 5 {
break;
}
let nonce = u32::from_le_bytes(buf[.. 4].try_into().unwrap());
let mut txn = db.txn();
txn.put(nonce.to_le_bytes(), &buf[4 ..]);
txn.commit();
let Ok(()) = socket.write_all(&[1]).await else { break };
log::info!("received signed command #{nonce}");
}
});
}
}
});
// Start command fetch server
// 5132 ^ ((b'E' << 8) | b'R') + 1
let server = TcpListener::bind("0.0.0.0:20831").await.unwrap();
loop {
let (mut socket, _) = server.accept().await.unwrap();
let db = db.clone();
tokio::spawn(async move {
let db = db.clone();
loop {
// Nonce to get the router comamnd for
let mut buf = vec![0; 4];
let Ok(_) = socket.read_exact(&mut buf).await else { break };
let command = db.get(&buf[.. 4]).unwrap_or(vec![]);
let Ok(()) = socket.write_all(&u32::try_from(command.len()).unwrap().to_le_bytes()).await
else {
break;
};
let Ok(()) = socket.write_all(&command).await else { break };
}
});
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,410 +0,0 @@
pub use schnorr::*;
/// This module was auto-generated with ethers-rs Abigen.
/// More information at: <https://github.com/gakonst/ethers-rs>
#[allow(
clippy::enum_variant_names,
clippy::too_many_arguments,
clippy::upper_case_acronyms,
clippy::type_complexity,
dead_code,
non_camel_case_types,
)]
pub mod schnorr {
#[allow(deprecated)]
fn __abi() -> ::ethers_core::abi::Abi {
::ethers_core::abi::ethabi::Contract {
constructor: ::core::option::Option::None,
functions: ::core::convert::From::from([
(
::std::borrow::ToOwned::to_owned("Q"),
::std::vec![
::ethers_core::abi::ethabi::Function {
name: ::std::borrow::ToOwned::to_owned("Q"),
inputs: ::std::vec![],
outputs: ::std::vec![
::ethers_core::abi::ethabi::Param {
name: ::std::string::String::new(),
kind: ::ethers_core::abi::ethabi::ParamType::Uint(256usize),
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("uint256"),
),
},
],
constant: ::core::option::Option::None,
state_mutability: ::ethers_core::abi::ethabi::StateMutability::View,
},
],
),
(
::std::borrow::ToOwned::to_owned("verify"),
::std::vec![
::ethers_core::abi::ethabi::Function {
name: ::std::borrow::ToOwned::to_owned("verify"),
inputs: ::std::vec![
::ethers_core::abi::ethabi::Param {
name: ::std::borrow::ToOwned::to_owned("parity"),
kind: ::ethers_core::abi::ethabi::ParamType::Uint(8usize),
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("uint8"),
),
},
::ethers_core::abi::ethabi::Param {
name: ::std::borrow::ToOwned::to_owned("px"),
kind: ::ethers_core::abi::ethabi::ParamType::FixedBytes(
32usize,
),
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("bytes32"),
),
},
::ethers_core::abi::ethabi::Param {
name: ::std::borrow::ToOwned::to_owned("message"),
kind: ::ethers_core::abi::ethabi::ParamType::FixedBytes(
32usize,
),
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("bytes32"),
),
},
::ethers_core::abi::ethabi::Param {
name: ::std::borrow::ToOwned::to_owned("c"),
kind: ::ethers_core::abi::ethabi::ParamType::FixedBytes(
32usize,
),
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("bytes32"),
),
},
::ethers_core::abi::ethabi::Param {
name: ::std::borrow::ToOwned::to_owned("s"),
kind: ::ethers_core::abi::ethabi::ParamType::FixedBytes(
32usize,
),
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("bytes32"),
),
},
],
outputs: ::std::vec![
::ethers_core::abi::ethabi::Param {
name: ::std::string::String::new(),
kind: ::ethers_core::abi::ethabi::ParamType::Bool,
internal_type: ::core::option::Option::Some(
::std::borrow::ToOwned::to_owned("bool"),
),
},
],
constant: ::core::option::Option::None,
state_mutability: ::ethers_core::abi::ethabi::StateMutability::View,
},
],
),
]),
events: ::std::collections::BTreeMap::new(),
errors: ::core::convert::From::from([
(
::std::borrow::ToOwned::to_owned("InvalidSOrA"),
::std::vec![
::ethers_core::abi::ethabi::AbiError {
name: ::std::borrow::ToOwned::to_owned("InvalidSOrA"),
inputs: ::std::vec![],
},
],
),
(
::std::borrow::ToOwned::to_owned("InvalidSignature"),
::std::vec![
::ethers_core::abi::ethabi::AbiError {
name: ::std::borrow::ToOwned::to_owned("InvalidSignature"),
inputs: ::std::vec![],
},
],
),
]),
receive: false,
fallback: false,
}
}
///The parsed JSON ABI of the contract.
pub static SCHNORR_ABI: ::ethers_contract::Lazy<::ethers_core::abi::Abi> = ::ethers_contract::Lazy::new(
__abi,
);
pub struct Schnorr<M>(::ethers_contract::Contract<M>);
impl<M> ::core::clone::Clone for Schnorr<M> {
fn clone(&self) -> Self {
Self(::core::clone::Clone::clone(&self.0))
}
}
impl<M> ::core::ops::Deref for Schnorr<M> {
type Target = ::ethers_contract::Contract<M>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<M> ::core::ops::DerefMut for Schnorr<M> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<M> ::core::fmt::Debug for Schnorr<M> {
fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
f.debug_tuple(::core::stringify!(Schnorr)).field(&self.address()).finish()
}
}
impl<M: ::ethers_providers::Middleware> Schnorr<M> {
/// Creates a new contract instance with the specified `ethers` client at
/// `address`. The contract derefs to a `ethers::Contract` object.
pub fn new<T: Into<::ethers_core::types::Address>>(
address: T,
client: ::std::sync::Arc<M>,
) -> Self {
Self(
::ethers_contract::Contract::new(
address.into(),
SCHNORR_ABI.clone(),
client,
),
)
}
///Calls the contract's `Q` (0xe493ef8c) function
pub fn q(
&self,
) -> ::ethers_contract::builders::ContractCall<M, ::ethers_core::types::U256> {
self.0
.method_hash([228, 147, 239, 140], ())
.expect("method not found (this should never happen)")
}
///Calls the contract's `verify` (0x9186da4c) function
pub fn verify(
&self,
parity: u8,
px: [u8; 32],
message: [u8; 32],
c: [u8; 32],
s: [u8; 32],
) -> ::ethers_contract::builders::ContractCall<M, bool> {
self.0
.method_hash([145, 134, 218, 76], (parity, px, message, c, s))
.expect("method not found (this should never happen)")
}
}
impl<M: ::ethers_providers::Middleware> From<::ethers_contract::Contract<M>>
for Schnorr<M> {
fn from(contract: ::ethers_contract::Contract<M>) -> Self {
Self::new(contract.address(), contract.client())
}
}
///Custom Error type `InvalidSOrA` with signature `InvalidSOrA()` and selector `0x4e99a12e`
#[derive(
Clone,
::ethers_contract::EthError,
::ethers_contract::EthDisplay,
Default,
Debug,
PartialEq,
Eq,
Hash
)]
#[etherror(name = "InvalidSOrA", abi = "InvalidSOrA()")]
pub struct InvalidSOrA;
///Custom Error type `InvalidSignature` with signature `InvalidSignature()` and selector `0x8baa579f`
#[derive(
Clone,
::ethers_contract::EthError,
::ethers_contract::EthDisplay,
Default,
Debug,
PartialEq,
Eq,
Hash
)]
#[etherror(name = "InvalidSignature", abi = "InvalidSignature()")]
pub struct InvalidSignature;
///Container type for all of the contract's custom errors
#[derive(Clone, ::ethers_contract::EthAbiType, Debug, PartialEq, Eq, Hash)]
pub enum SchnorrErrors {
InvalidSOrA(InvalidSOrA),
InvalidSignature(InvalidSignature),
/// The standard solidity revert string, with selector
/// Error(string) -- 0x08c379a0
RevertString(::std::string::String),
}
impl ::ethers_core::abi::AbiDecode for SchnorrErrors {
fn decode(
data: impl AsRef<[u8]>,
) -> ::core::result::Result<Self, ::ethers_core::abi::AbiError> {
let data = data.as_ref();
if let Ok(decoded) = <::std::string::String as ::ethers_core::abi::AbiDecode>::decode(
data,
) {
return Ok(Self::RevertString(decoded));
}
if let Ok(decoded) = <InvalidSOrA as ::ethers_core::abi::AbiDecode>::decode(
data,
) {
return Ok(Self::InvalidSOrA(decoded));
}
if let Ok(decoded) = <InvalidSignature as ::ethers_core::abi::AbiDecode>::decode(
data,
) {
return Ok(Self::InvalidSignature(decoded));
}
Err(::ethers_core::abi::Error::InvalidData.into())
}
}
impl ::ethers_core::abi::AbiEncode for SchnorrErrors {
fn encode(self) -> ::std::vec::Vec<u8> {
match self {
Self::InvalidSOrA(element) => {
::ethers_core::abi::AbiEncode::encode(element)
}
Self::InvalidSignature(element) => {
::ethers_core::abi::AbiEncode::encode(element)
}
Self::RevertString(s) => ::ethers_core::abi::AbiEncode::encode(s),
}
}
}
impl ::ethers_contract::ContractRevert for SchnorrErrors {
fn valid_selector(selector: [u8; 4]) -> bool {
match selector {
[0x08, 0xc3, 0x79, 0xa0] => true,
_ if selector
== <InvalidSOrA as ::ethers_contract::EthError>::selector() => true,
_ if selector
== <InvalidSignature as ::ethers_contract::EthError>::selector() => {
true
}
_ => false,
}
}
}
impl ::core::fmt::Display for SchnorrErrors {
fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
match self {
Self::InvalidSOrA(element) => ::core::fmt::Display::fmt(element, f),
Self::InvalidSignature(element) => ::core::fmt::Display::fmt(element, f),
Self::RevertString(s) => ::core::fmt::Display::fmt(s, f),
}
}
}
impl ::core::convert::From<::std::string::String> for SchnorrErrors {
fn from(value: String) -> Self {
Self::RevertString(value)
}
}
impl ::core::convert::From<InvalidSOrA> for SchnorrErrors {
fn from(value: InvalidSOrA) -> Self {
Self::InvalidSOrA(value)
}
}
impl ::core::convert::From<InvalidSignature> for SchnorrErrors {
fn from(value: InvalidSignature) -> Self {
Self::InvalidSignature(value)
}
}
///Container type for all input parameters for the `Q` function with signature `Q()` and selector `0xe493ef8c`
#[derive(
Clone,
::ethers_contract::EthCall,
::ethers_contract::EthDisplay,
Default,
Debug,
PartialEq,
Eq,
Hash
)]
#[ethcall(name = "Q", abi = "Q()")]
pub struct QCall;
///Container type for all input parameters for the `verify` function with signature `verify(uint8,bytes32,bytes32,bytes32,bytes32)` and selector `0x9186da4c`
#[derive(
Clone,
::ethers_contract::EthCall,
::ethers_contract::EthDisplay,
Default,
Debug,
PartialEq,
Eq,
Hash
)]
#[ethcall(name = "verify", abi = "verify(uint8,bytes32,bytes32,bytes32,bytes32)")]
pub struct VerifyCall {
pub parity: u8,
pub px: [u8; 32],
pub message: [u8; 32],
pub c: [u8; 32],
pub s: [u8; 32],
}
///Container type for all of the contract's call
#[derive(Clone, ::ethers_contract::EthAbiType, Debug, PartialEq, Eq, Hash)]
pub enum SchnorrCalls {
Q(QCall),
Verify(VerifyCall),
}
impl ::ethers_core::abi::AbiDecode for SchnorrCalls {
fn decode(
data: impl AsRef<[u8]>,
) -> ::core::result::Result<Self, ::ethers_core::abi::AbiError> {
let data = data.as_ref();
if let Ok(decoded) = <QCall as ::ethers_core::abi::AbiDecode>::decode(data) {
return Ok(Self::Q(decoded));
}
if let Ok(decoded) = <VerifyCall as ::ethers_core::abi::AbiDecode>::decode(
data,
) {
return Ok(Self::Verify(decoded));
}
Err(::ethers_core::abi::Error::InvalidData.into())
}
}
impl ::ethers_core::abi::AbiEncode for SchnorrCalls {
fn encode(self) -> Vec<u8> {
match self {
Self::Q(element) => ::ethers_core::abi::AbiEncode::encode(element),
Self::Verify(element) => ::ethers_core::abi::AbiEncode::encode(element),
}
}
}
impl ::core::fmt::Display for SchnorrCalls {
fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
match self {
Self::Q(element) => ::core::fmt::Display::fmt(element, f),
Self::Verify(element) => ::core::fmt::Display::fmt(element, f),
}
}
}
impl ::core::convert::From<QCall> for SchnorrCalls {
fn from(value: QCall) -> Self {
Self::Q(value)
}
}
impl ::core::convert::From<VerifyCall> for SchnorrCalls {
fn from(value: VerifyCall) -> Self {
Self::Verify(value)
}
}
///Container type for all return fields from the `Q` function with signature `Q()` and selector `0xe493ef8c`
#[derive(
Clone,
::ethers_contract::EthAbiType,
::ethers_contract::EthAbiCodec,
Default,
Debug,
PartialEq,
Eq,
Hash
)]
pub struct QReturn(pub ::ethers_core::types::U256);
///Container type for all return fields from the `verify` function with signature `verify(uint8,bytes32,bytes32,bytes32,bytes32)` and selector `0x9186da4c`
#[derive(
Clone,
::ethers_contract::EthAbiType,
::ethers_contract::EthAbiCodec,
Default,
Debug,
PartialEq,
Eq,
Hash
)]
pub struct VerifyReturn(pub bool);
}

View File

@@ -5,7 +5,7 @@ use alloy_consensus::{Signed, TxLegacy};
use alloy_sol_types::{SolCall, SolEvent}; use alloy_sol_types::{SolCall, SolEvent};
use alloy_rpc_types::{BlockNumberOrTag, Filter}; use alloy_rpc_types_eth::{BlockNumberOrTag, Filter};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
@@ -58,14 +58,7 @@ impl Deployer {
/// Construct a new view of the `Deployer`. /// Construct a new view of the `Deployer`.
pub async fn new(provider: Arc<RootProvider<SimpleRequest>>) -> Result<Option<Self>, Error> { pub async fn new(provider: Arc<RootProvider<SimpleRequest>>) -> Result<Option<Self>, Error> {
let address = Self::address(); let address = Self::address();
#[cfg(not(test))] let code = provider.get_code_at(address.into()).await.map_err(|_| Error::ConnectionError)?;
let required_block = BlockNumberOrTag::Finalized;
#[cfg(test)]
let required_block = BlockNumberOrTag::Latest;
let code = provider
.get_code_at(address.into(), required_block.into())
.await
.map_err(|_| Error::ConnectionError)?;
// Contract has yet to be deployed // Contract has yet to be deployed
if code.is_empty() { if code.is_empty() {
return Ok(None); return Ok(None);

View File

@@ -4,7 +4,7 @@ use alloy_core::primitives::{Address, B256, U256};
use alloy_sol_types::{SolInterface, SolEvent}; use alloy_sol_types::{SolInterface, SolEvent};
use alloy_rpc_types::Filter; use alloy_rpc_types_eth::Filter;
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -7,7 +7,7 @@ pub mod alloy {
pub use alloy_consensus as consensus; pub use alloy_consensus as consensus;
pub use alloy_network as network; pub use alloy_network as network;
pub use alloy_rpc_types as rpc_types; pub use alloy_rpc_types_eth as rpc_types;
pub use alloy_simple_request_transport as simple_request_transport; pub use alloy_simple_request_transport as simple_request_transport;
pub use alloy_rpc_client as rpc_client; pub use alloy_rpc_client as rpc_client;
pub use alloy_provider as provider; pub use alloy_provider as provider;

View File

@@ -12,9 +12,9 @@ use alloy_consensus::TxLegacy;
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent}; use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
use alloy_rpc_types::Filter; use alloy_rpc_types_eth::Filter;
#[cfg(test)] #[cfg(test)]
use alloy_rpc_types::{BlockId, TransactionRequest, TransactionInput}; use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -11,7 +11,7 @@ use alloy_core::{
}; };
use alloy_consensus::{SignableTransaction, TxLegacy}; use alloy_consensus::{SignableTransaction, TxLegacy};
use alloy_rpc_types::{BlockNumberOrTag, TransactionReceipt}; use alloy_rpc_types_eth::TransactionReceipt;
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
@@ -57,15 +57,14 @@ pub async fn send(
// let chain_id = provider.get_chain_id().await.unwrap(); // let chain_id = provider.get_chain_id().await.unwrap();
// tx.chain_id = Some(chain_id); // tx.chain_id = Some(chain_id);
tx.chain_id = None; tx.chain_id = None;
tx.nonce = tx.nonce = provider.get_transaction_count(address).await.unwrap();
provider.get_transaction_count(address, BlockNumberOrTag::Latest.into()).await.unwrap();
// 100 gwei // 100 gwei
tx.gas_price = 100_000_000_000u128; tx.gas_price = 100_000_000_000u128;
let sig = wallet.sign_prehash_recoverable(tx.signature_hash().as_ref()).unwrap(); let sig = wallet.sign_prehash_recoverable(tx.signature_hash().as_ref()).unwrap();
assert_eq!(address, tx.clone().into_signed(sig.into()).recover_signer().unwrap()); assert_eq!(address, tx.clone().into_signed(sig.into()).recover_signer().unwrap());
assert!( assert!(
provider.get_balance(address, BlockNumberOrTag::Latest.into()).await.unwrap() > provider.get_balance(address).await.unwrap() >
((U256::from(tx.gas_price) * U256::from(tx.gas_limit)) + tx.value) ((U256::from(tx.gas_price) * U256::from(tx.gas_limit)) + tx.value)
); );

View File

@@ -14,6 +14,7 @@ use frost::{
use alloy_core::primitives::{Address, U256}; use alloy_core::primitives::{Address, U256};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_types_eth::BlockTransactionsKind;
use alloy_rpc_client::ClientBuilder; use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
@@ -84,7 +85,7 @@ async fn setup_test() -> (
async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] { async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] {
client client
.get_block(client.get_block_number().await.unwrap().into(), false) .get_block(client.get_block_number().await.unwrap().into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()

View File

@@ -15,7 +15,7 @@ use alloy_core::primitives::Address;
use alloy_sol_types::SolCall; use alloy_sol_types::SolCall;
use alloy_rpc_types::{TransactionInput, TransactionRequest}; use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder; use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -105,13 +105,13 @@ pub struct Metadata {
/// but the payment ID will be returned here anyway: /// but the payment ID will be returned here anyway:
/// ///
/// 1) If the payment ID is tied to an output received by a subaddress account /// 1) If the payment ID is tied to an output received by a subaddress account
/// that spent Monero in the transaction (the received output is considered /// that spent Monero in the transaction (the received output is considered
/// "change" and is not considered a "payment" in this case). If there are multiple /// "change" and is not considered a "payment" in this case). If there are multiple
/// spending subaddress accounts in a transaction, the highest index spent key image /// spending subaddress accounts in a transaction, the highest index spent key image
/// is used to determine the spending subaddress account. /// is used to determine the spending subaddress account.
/// ///
/// 2) If the payment ID is the unencrypted variant and the block's hf version is /// 2) If the payment ID is the unencrypted variant and the block's hf version is
/// v12 or higher (https://github.com/serai-dex/serai/issues/512) /// v12 or higher (https://github.com/serai-dex/serai/issues/512)
pub payment_id: Option<PaymentId>, pub payment_id: Option<PaymentId>,
/// Arbitrary data encoded in TX extra. /// Arbitrary data encoded in TX extra.
pub arbitrary_data: Vec<Vec<u8>>, pub arbitrary_data: Vec<Vec<u8>>,

View File

@@ -364,8 +364,8 @@ impl Change {
/// 1) The change in the tx is shunted to the fee (fingerprintable fee). /// 1) The change in the tx is shunted to the fee (fingerprintable fee).
/// ///
/// 2) If there are 2 outputs in the tx, there would be no payment ID as is the case when the /// 2) If there are 2 outputs in the tx, there would be no payment ID as is the case when the
/// reference wallet creates 2 output txs, since monero-serai doesn't know which output /// reference wallet creates 2 output txs, since monero-serai doesn't know which output
/// to tie the dummy payment ID to. /// to tie the dummy payment ID to.
pub fn fingerprintable(address: Option<MoneroAddress>) -> Change { pub fn fingerprintable(address: Option<MoneroAddress>) -> Change {
Change { address, view: None } Change { address, view: None }
} }

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/common/zalloc"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = [] keywords = []
edition = "2021" edition = "2021"
rust-version = "1.60" rust-version = "1.77.0"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -19,8 +19,10 @@ workspace = true
[dependencies] [dependencies]
zeroize = { version = "^1.5", default-features = false } zeroize = { version = "^1.5", default-features = false }
[build-dependencies]
rustversion = { version = "1", default-features = false }
[features] [features]
std = ["zeroize/std"] std = ["zeroize/std"]
default = ["std"] default = ["std"]
# Commented for now as it requires nightly and we don't use nightly allocator = []
# allocator = []

10
common/zalloc/build.rs Normal file
View File

@@ -0,0 +1,10 @@
#[rustversion::nightly]
fn main() {
println!("cargo::rustc-check-cfg=cfg(zalloc_rustc_nightly)");
println!("cargo::rustc-cfg=zalloc_rustc_nightly");
}
#[rustversion::not(nightly)]
fn main() {
println!("cargo::rustc-check-cfg=cfg(zalloc_rustc_nightly)");
}

View File

@@ -1,6 +1,6 @@
#![cfg_attr(docsrs, feature(doc_cfg))] #![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(feature = "allocator", feature(allocator_api))] #![cfg_attr(all(zalloc_rustc_nightly, feature = "allocator"), feature(allocator_api))]
//! Implementation of a Zeroizing Allocator, enabling zeroizing memory on deallocation. //! Implementation of a Zeroizing Allocator, enabling zeroizing memory on deallocation.
//! This can either be used with Box (requires nightly and the "allocator" feature) to provide the //! This can either be used with Box (requires nightly and the "allocator" feature) to provide the
@@ -17,12 +17,12 @@ use zeroize::Zeroize;
/// An allocator wrapper which zeroizes its memory on dealloc. /// An allocator wrapper which zeroizes its memory on dealloc.
pub struct ZeroizingAlloc<T>(pub T); pub struct ZeroizingAlloc<T>(pub T);
#[cfg(feature = "allocator")] #[cfg(all(zalloc_rustc_nightly, feature = "allocator"))]
use core::{ use core::{
ptr::NonNull, ptr::NonNull,
alloc::{AllocError, Allocator}, alloc::{AllocError, Allocator},
}; };
#[cfg(feature = "allocator")] #[cfg(all(zalloc_rustc_nightly, feature = "allocator"))]
unsafe impl<T: Allocator> Allocator for ZeroizingAlloc<T> { unsafe impl<T: Allocator> Allocator for ZeroizingAlloc<T> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.0.allocate(layout) self.0.allocate(layout)

View File

@@ -122,7 +122,7 @@ impl QueuedBatchesDb {
pub fn take(txn: &mut impl DbTxn, set: ValidatorSet) -> Vec<Transaction> { pub fn take(txn: &mut impl DbTxn, set: ValidatorSet) -> Vec<Transaction> {
let batches_vec = Self::get(txn, set).unwrap_or_default(); let batches_vec = Self::get(txn, set).unwrap_or_default();
txn.del(&Self::key(set)); txn.del(Self::key(set));
let mut batches: &[u8] = &batches_vec; let mut batches: &[u8] = &batches_vec;
let mut res = vec![]; let mut res = vec![];

View File

@@ -133,7 +133,13 @@ mod impl_pst_for_serai {
key_pair: KeyPair, key_pair: KeyPair,
signature: Signature, signature: Signature,
) { ) {
let tx = SeraiValidatorSets::set_keys(set.network, removed, key_pair, signature); // TODO: BoundedVec as an arg to avoid this expect
let tx = SeraiValidatorSets::set_keys(
set.network,
removed.try_into().expect("removing more than allowed"),
key_pair,
signature,
);
async fn check(serai: SeraiValidatorSets<'_>, set: ValidatorSet, (): ()) -> bool { async fn check(serai: SeraiValidatorSets<'_>, set: ValidatorSet, (): ()) -> bool {
if matches!(serai.keys(set).await, Ok(Some(_))) { if matches!(serai.keys(set).await, Ok(Some(_))) {
log::info!("another coordinator set key pair for {:?}", set); log::info!("another coordinator set key pair for {:?}", set);

View File

@@ -177,14 +177,14 @@ impl<N: Network> BlockData<N> {
let new_block = last_block_or_round(&mut txn, LATEST_BLOCK_KEY, self.number.0)?; let new_block = last_block_or_round(&mut txn, LATEST_BLOCK_KEY, self.number.0)?;
if new_block { if new_block {
// Delete the latest round key // Delete the latest round key
txn.del(&key(LATEST_ROUND_KEY)); txn.del(key(LATEST_ROUND_KEY));
} }
let new_round = last_block_or_round(&mut txn, LATEST_ROUND_KEY, round_number.0.into())?; let new_round = last_block_or_round(&mut txn, LATEST_ROUND_KEY, round_number.0.into())?;
if new_block || new_round { if new_block || new_round {
// Delete the messages for the old round // Delete the messages for the old round
txn.del(&key(PROPOSE_KEY)); txn.del(key(PROPOSE_KEY));
txn.del(&key(PEVOTE_KEY)); txn.del(key(PEVOTE_KEY));
txn.del(&key(PRECOMMIT_KEY)); txn.del(key(PRECOMMIT_KEY));
} }
// Check we haven't sent this message within this round // Check we haven't sent this message within this round

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"] keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -64,10 +64,7 @@ pub struct GeneratorPromotion<C1: Ciphersuite, C2: Ciphersuite> {
_c2: PhantomData<C2>, _c2: PhantomData<C2>,
} }
impl<C1: Ciphersuite, C2: Ciphersuite> GeneratorPromotion<C1, C2> impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<C1, C2> {
where
C2: Ciphersuite<F = C1::F, G = C1::G>,
{
/// Begin promoting keys from one generator to another. Returns a proof this share was properly /// Begin promoting keys from one generator to another. Returns a proof this share was properly
/// promoted. /// promoted.
pub fn promote<R: RngCore + CryptoRng>( pub fn promote<R: RngCore + CryptoRng>(

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -53,11 +53,11 @@ pub(crate) struct Aos<G0: PrimeGroup + Zeroize, G1: PrimeGroup + Zeroize, const
s: [(G0::Scalar, G1::Scalar); RING_LEN], s: [(G0::Scalar, G1::Scalar); RING_LEN],
} }
impl<G0: PrimeGroup + Zeroize, G1: PrimeGroup + Zeroize, const RING_LEN: usize> impl<
Aos<G0, G1, RING_LEN> G0: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
where G1: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
G0::Scalar: PrimeFieldBits + Zeroize, const RING_LEN: usize,
G1::Scalar: PrimeFieldBits + Zeroize, > Aos<G0, G1, RING_LEN>
{ {
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) { fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) {

View File

@@ -76,14 +76,11 @@ pub(crate) struct Bits<
} }
impl< impl<
G0: PrimeGroup + Zeroize, G0: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
G1: PrimeGroup + Zeroize, G1: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
const SIGNATURE: u8, const SIGNATURE: u8,
const RING_LEN: usize, const RING_LEN: usize,
> Bits<G0, G1, SIGNATURE, RING_LEN> > Bits<G0, G1, SIGNATURE, RING_LEN>
where
G0::Scalar: PrimeFieldBits + Zeroize,
G1::Scalar: PrimeFieldBits + Zeroize,
{ {
fn transcript<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) { fn transcript<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) {
transcript.domain_separate(b"bits"); transcript.domain_separate(b"bits");

View File

@@ -112,15 +112,12 @@ pub enum DLEqError {
// anyone who wants it // anyone who wants it
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct __DLEqProof< pub struct __DLEqProof<
G0: PrimeGroup + Zeroize, G0: PrimeGroup<Scalar: PrimeFieldBits> + Zeroize,
G1: PrimeGroup + Zeroize, G1: PrimeGroup<Scalar: PrimeFieldBits> + Zeroize,
const SIGNATURE: u8, const SIGNATURE: u8,
const RING_LEN: usize, const RING_LEN: usize,
const REMAINDER_RING_LEN: usize, const REMAINDER_RING_LEN: usize,
> where > {
G0::Scalar: PrimeFieldBits,
G1::Scalar: PrimeFieldBits,
{
bits: Vec<Bits<G0, G1, SIGNATURE, RING_LEN>>, bits: Vec<Bits<G0, G1, SIGNATURE, RING_LEN>>,
remainder: Option<Bits<G0, G1, SIGNATURE, REMAINDER_RING_LEN>>, remainder: Option<Bits<G0, G1, SIGNATURE, REMAINDER_RING_LEN>>,
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>), poks: (SchnorrPoK<G0>, SchnorrPoK<G1>),
@@ -200,15 +197,12 @@ dleq!(
); );
impl< impl<
G0: PrimeGroup + Zeroize, G0: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
G1: PrimeGroup + Zeroize, G1: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
const SIGNATURE: u8, const SIGNATURE: u8,
const RING_LEN: usize, const RING_LEN: usize,
const REMAINDER_RING_LEN: usize, const REMAINDER_RING_LEN: usize,
> __DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN> > __DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN>
where
G0::Scalar: PrimeFieldBits + Zeroize,
G1::Scalar: PrimeFieldBits + Zeroize,
{ {
pub(crate) fn transcript<T: Transcript>( pub(crate) fn transcript<T: Transcript>(
transcript: &mut T, transcript: &mut T,

View File

@@ -28,10 +28,7 @@ pub(crate) struct SchnorrPoK<G: PrimeGroup + Zeroize> {
s: G::Scalar, s: G::Scalar,
} }
impl<G: PrimeGroup + Zeroize> SchnorrPoK<G> impl<G: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize> SchnorrPoK<G> {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
// Not HRAm due to the lack of m // Not HRAm due to the lack of m
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar { fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar {

View File

@@ -105,19 +105,13 @@ pub enum DLEqError {
/// A proof that points have the same discrete logarithm across generators. /// A proof that points have the same discrete logarithm across generators.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct DLEqProof<G: PrimeGroup> pub struct DLEqProof<G: PrimeGroup<Scalar: Zeroize>> {
where
G::Scalar: Zeroize,
{
c: G::Scalar, c: G::Scalar,
s: G::Scalar, s: G::Scalar,
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
impl<G: PrimeGroup> DLEqProof<G> impl<G: PrimeGroup<Scalar: Zeroize>> DLEqProof<G> {
where
G::Scalar: Zeroize,
{
fn transcript<T: Transcript>(transcript: &mut T, generator: G, nonce: G, point: G) { fn transcript<T: Transcript>(transcript: &mut T, generator: G, nonce: G, point: G) {
transcript.append_message(b"generator", generator.to_bytes()); transcript.append_message(b"generator", generator.to_bytes());
transcript.append_message(b"nonce", nonce.to_bytes()); transcript.append_message(b"nonce", nonce.to_bytes());
@@ -213,20 +207,14 @@ where
/// across some generators, yet with a smaller overall proof size. /// across some generators, yet with a smaller overall proof size.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] #[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct MultiDLEqProof<G: PrimeGroup> pub struct MultiDLEqProof<G: PrimeGroup<Scalar: Zeroize>> {
where
G::Scalar: Zeroize,
{
c: G::Scalar, c: G::Scalar,
s: Vec<G::Scalar>, s: Vec<G::Scalar>,
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[allow(non_snake_case)] #[allow(non_snake_case)]
impl<G: PrimeGroup> MultiDLEqProof<G> impl<G: PrimeGroup<Scalar: Zeroize>> MultiDLEqProof<G> {
where
G::Scalar: Zeroize,
{
/// Prove for each scalar that the series of points created by multiplying it against its /// Prove for each scalar that the series of points created by multiplying it against its
/// matching generators share a discrete logarithm. /// matching generators share a discrete logarithm.
/// This function panics if `generators.len() != scalars.len()`. /// This function panics if `generators.len() != scalars.len()`.

View File

@@ -14,10 +14,7 @@ use transcript::{Transcript, RecommendedTranscript};
use crate::cross_group::schnorr::SchnorrPoK; use crate::cross_group::schnorr::SchnorrPoK;
fn test_schnorr<G: PrimeGroup + Zeroize>() fn test_schnorr<G: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize>() {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let transcript = RecommendedTranscript::new(b"Schnorr Test"); let transcript = RecommendedTranscript::new(b"Schnorr Test");
let mut batch = BatchVerifier::new(10); let mut batch = BatchVerifier::new(10);

View File

@@ -34,7 +34,7 @@ macro_rules! math_op {
impl $Op<$Other> for $Value { impl $Op<$Other> for $Value {
type Output = $Value; type Output = $Value;
fn $op_fn(self, other: $Other) -> Self::Output { fn $op_fn(self, other: $Other) -> Self::Output {
Self($function(self.0, other.0)) $Value($function(self.0, other.0))
} }
} }
impl $Assign<$Other> for $Value { impl $Assign<$Other> for $Value {
@@ -45,7 +45,7 @@ macro_rules! math_op {
impl<'a> $Op<&'a $Other> for $Value { impl<'a> $Op<&'a $Other> for $Value {
type Output = $Value; type Output = $Value;
fn $op_fn(self, other: &'a $Other) -> Self::Output { fn $op_fn(self, other: &'a $Other) -> Self::Output {
Self($function(self.0, other.0)) $Value($function(self.0, other.0))
} }
} }
impl<'a> $Assign<&'a $Other> for $Value { impl<'a> $Assign<&'a $Other> for $Value {
@@ -60,7 +60,7 @@ macro_rules! from_wrapper {
($wrapper: ident, $inner: ident, $uint: ident) => { ($wrapper: ident, $inner: ident, $uint: ident) => {
impl From<$uint> for $wrapper { impl From<$uint> for $wrapper {
fn from(a: $uint) -> $wrapper { fn from(a: $uint) -> $wrapper {
Self(Residue::new(&$inner::from(a))) $wrapper(Residue::new(&$inner::from(a)))
} }
} }
}; };
@@ -127,7 +127,7 @@ macro_rules! field {
impl Neg for $FieldName { impl Neg for $FieldName {
type Output = $FieldName; type Output = $FieldName;
fn neg(self) -> $FieldName { fn neg(self) -> $FieldName {
Self(self.0.neg()) $FieldName(self.0.neg())
} }
} }
@@ -141,13 +141,13 @@ macro_rules! field {
impl $FieldName { impl $FieldName {
/// Perform an exponentiation. /// Perform an exponentiation.
pub fn pow(&self, other: $FieldName) -> $FieldName { pub fn pow(&self, other: $FieldName) -> $FieldName {
let mut table = [Self(Residue::ONE); 16]; let mut table = [$FieldName(Residue::ONE); 16];
table[1] = *self; table[1] = *self;
for i in 2 .. 16 { for i in 2 .. 16 {
table[i] = table[i - 1] * self; table[i] = table[i - 1] * self;
} }
let mut res = Self(Residue::ONE); let mut res = $FieldName(Residue::ONE);
let mut bits = 0; let mut bits = 0;
for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() { for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() {
bits <<= 1; bits <<= 1;
@@ -170,8 +170,8 @@ macro_rules! field {
} }
impl Field for $FieldName { impl Field for $FieldName {
const ZERO: Self = Self(Residue::ZERO); const ZERO: Self = $FieldName(Residue::ZERO);
const ONE: Self = Self(Residue::ONE); const ONE: Self = $FieldName(Residue::ONE);
fn random(mut rng: impl RngCore) -> Self { fn random(mut rng: impl RngCore) -> Self {
let mut bytes = [0; 112]; let mut bytes = [0; 112];
@@ -188,12 +188,12 @@ macro_rules! field {
fn invert(&self) -> CtOption<Self> { fn invert(&self) -> CtOption<Self> {
const NEG_2: $FieldName = const NEG_2: $FieldName =
Self($ResidueType::sub(&$ResidueType::ZERO, &$ResidueType::new(&U448::from_u8(2)))); $FieldName($ResidueType::sub(&$ResidueType::ZERO, &$ResidueType::new(&U448::from_u8(2))));
CtOption::new(self.pow(NEG_2), !self.is_zero()) CtOption::new(self.pow(NEG_2), !self.is_zero())
} }
fn sqrt(&self) -> CtOption<Self> { fn sqrt(&self) -> CtOption<Self> {
const MOD_1_4: $FieldName = Self($ResidueType::new( const MOD_1_4: $FieldName = $FieldName($ResidueType::new(
&$MODULUS.saturating_add(&U448::ONE).wrapping_div(&U448::from_u8(4)), &$MODULUS.saturating_add(&U448::ONE).wrapping_div(&U448::from_u8(4)),
)); ));
@@ -217,14 +217,14 @@ macro_rules! field {
const TWO_INV: Self = $FieldName($ResidueType::new(&U448::from_u8(2)).invert().0); const TWO_INV: Self = $FieldName($ResidueType::new(&U448::from_u8(2)).invert().0);
const MULTIPLICATIVE_GENERATOR: Self = const MULTIPLICATIVE_GENERATOR: Self =
Self(Residue::new(&U448::from_u8($MULTIPLICATIVE_GENERATOR))); $FieldName(Residue::new(&U448::from_u8($MULTIPLICATIVE_GENERATOR)));
// True for both the Ed448 Scalar field and FieldElement field // True for both the Ed448 Scalar field and FieldElement field
const S: u32 = 1; const S: u32 = 1;
// Both fields have their root of unity as -1 // Both fields have their root of unity as -1
const ROOT_OF_UNITY: Self = const ROOT_OF_UNITY: Self =
Self($ResidueType::sub(&$ResidueType::ZERO, &$ResidueType::new(&U448::ONE))); $FieldName($ResidueType::sub(&$ResidueType::ZERO, &$ResidueType::new(&U448::ONE)));
const ROOT_OF_UNITY_INV: Self = Self(Self::ROOT_OF_UNITY.0.invert().0); const ROOT_OF_UNITY_INV: Self = $FieldName(Self::ROOT_OF_UNITY.0.invert().0);
const DELTA: Self = $FieldName(Residue::new(&U448::from_le_hex($DELTA))); const DELTA: Self = $FieldName(Residue::new(&U448::from_le_hex($DELTA)));

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ff-group-te
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["ff", "group", "ecc"] keywords = ["ff", "group", "ecc"]
edition = "2021" edition = "2021"
rust-version = "1.60" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -178,10 +178,7 @@ pub fn test_prime_group<R: RngCore, G: PrimeGroup>(rng: &mut R) {
} }
/// Run all tests offered by this crate on the group. /// Run all tests offered by this crate on the group.
pub fn test_prime_group_bits<R: RngCore, G: PrimeGroup>(rng: &mut R) pub fn test_prime_group_bits<R: RngCore, G: PrimeGroup<Scalar: PrimeFieldBits>>(rng: &mut R) {
where
G::Scalar: PrimeFieldBits,
{
test_prime_field_bits::<R, G::Scalar>(rng); test_prime_field_bits::<R, G::Scalar>(rng);
test_prime_group::<R, G>(rng); test_prime_group::<R, G>(rng);
} }

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["frost", "multisig", "threshold"] keywords = ["frost", "multisig", "threshold"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -10,7 +10,7 @@ integrating with existing systems.
This library offers ciphersuites compatible with the This library offers ciphersuites compatible with the
[IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version [IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version
11 is supported. 15 is supported.
This library was This library was
[audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf), [audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf),

View File

@@ -362,9 +362,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
rho_transcript.append_message(b"message", C::hash_msg(msg)); rho_transcript.append_message(b"message", C::hash_msg(msg));
rho_transcript.append_message( rho_transcript.append_message(
b"preprocesses", b"preprocesses",
&C::hash_commitments( C::hash_commitments(self.params.algorithm.transcript().challenge(b"preprocesses").as_ref()),
self.params.algorithm.transcript().challenge(b"preprocesses").as_ref(),
),
); );
// Generate the per-signer binding factors // Generate the per-signer binding factors

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/multiexp"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["multiexp", "ff", "group"] keywords = ["multiexp", "ff", "group"]
edition = "2021" edition = "2021"
rust-version = "1.70" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -12,27 +12,21 @@ use crate::{multiexp, multiexp_vartime};
// Flatten the contained statements to a single Vec. // Flatten the contained statements to a single Vec.
// Wrapped in Zeroizing in case any of the included statements contain private values. // Wrapped in Zeroizing in case any of the included statements contain private values.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn flat<Id: Copy + Zeroize, G: Group + Zeroize>( fn flat<Id: Copy + Zeroize, G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>(
slice: &[(Id, Vec<(G::Scalar, G)>)], slice: &[(Id, Vec<(G::Scalar, G)>)],
) -> Zeroizing<Vec<(G::Scalar, G)>> ) -> Zeroizing<Vec<(G::Scalar, G)>> {
where
<G as Group>::Scalar: PrimeFieldBits + Zeroize,
{
Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).copied().collect::<Vec<_>>()) Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).copied().collect::<Vec<_>>())
} }
/// A batch verifier intended to verify a series of statements are each equivalent to zero. /// A batch verifier intended to verify a series of statements are each equivalent to zero.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
#[derive(Clone, Zeroize)] #[derive(Clone, Zeroize)]
pub struct BatchVerifier<Id: Copy + Zeroize, G: Group + Zeroize>( pub struct BatchVerifier<Id: Copy + Zeroize, G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>(
Zeroizing<Vec<(Id, Vec<(G::Scalar, G)>)>>, Zeroizing<Vec<(Id, Vec<(G::Scalar, G)>)>>,
) );
where
<G as Group>::Scalar: PrimeFieldBits + Zeroize;
impl<Id: Copy + Zeroize, G: Group + Zeroize> BatchVerifier<Id, G> impl<Id: Copy + Zeroize, G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>
where BatchVerifier<Id, G>
<G as Group>::Scalar: PrimeFieldBits + Zeroize,
{ {
/// Create a new batch verifier, expected to verify the following amount of statements. /// Create a new batch verifier, expected to verify the following amount of statements.
/// ///

View File

@@ -49,10 +49,10 @@ fn u8_from_bool(bit_ref: &mut bool) -> u8 {
// Convert scalars to `window`-sized bit groups, as needed to index a table // Convert scalars to `window`-sized bit groups, as needed to index a table
// This algorithm works for `window <= 8` // This algorithm works for `window <= 8`
pub(crate) fn prep_bits<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> Vec<Vec<u8>> pub(crate) fn prep_bits<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> Vec<Vec<u8>> {
let w_usize = usize::from(window); let w_usize = usize::from(window);
let mut groupings = vec![]; let mut groupings = vec![];
@@ -175,10 +175,7 @@ fn algorithm(len: usize) -> Algorithm {
/// Performs a multiexponentiation, automatically selecting the optimal algorithm based on the /// Performs a multiexponentiation, automatically selecting the optimal algorithm based on the
/// amount of pairs. /// amount of pairs.
pub fn multiexp<G: Group>(pairs: &[(G::Scalar, G)]) -> G pub fn multiexp<G: Group<Scalar: PrimeFieldBits + Zeroize>>(pairs: &[(G::Scalar, G)]) -> G {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
match algorithm(pairs.len()) { match algorithm(pairs.len()) {
Algorithm::Null => Group::identity(), Algorithm::Null => Group::identity(),
Algorithm::Single => pairs[0].1 * pairs[0].0, Algorithm::Single => pairs[0].1 * pairs[0].0,
@@ -190,10 +187,7 @@ where
/// Performs a multiexponentiation in variable time, automatically selecting the optimal algorithm /// Performs a multiexponentiation in variable time, automatically selecting the optimal algorithm
/// based on the amount of pairs. /// based on the amount of pairs.
pub fn multiexp_vartime<G: Group>(pairs: &[(G::Scalar, G)]) -> G pub fn multiexp_vartime<G: Group<Scalar: PrimeFieldBits>>(pairs: &[(G::Scalar, G)]) -> G {
where
G::Scalar: PrimeFieldBits,
{
match algorithm(pairs.len()) { match algorithm(pairs.len()) {
Algorithm::Null => Group::identity(), Algorithm::Null => Group::identity(),
Algorithm::Single => pairs[0].1 * pairs[0].0, Algorithm::Single => pairs[0].1 * pairs[0].0,

View File

@@ -7,10 +7,10 @@ use crate::prep_bits;
// Pippenger's algorithm for multiexponentiation, as published in the SIAM Journal on Computing // Pippenger's algorithm for multiexponentiation, as published in the SIAM Journal on Computing
// DOI: 10.1137/0209022 // DOI: 10.1137/0209022
pub(crate) fn pippenger<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn pippenger<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> G {
let mut bits = prep_bits(pairs, window); let mut bits = prep_bits(pairs, window);
let mut res = G::identity(); let mut res = G::identity();
@@ -37,10 +37,10 @@ where
res res
} }
pub(crate) fn pippenger_vartime<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn pippenger_vartime<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> G {
let bits = prep_bits(pairs, window); let bits = prep_bits(pairs, window);
let mut res = G::identity(); let mut res = G::identity();

View File

@@ -24,10 +24,10 @@ fn prep_tables<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> Vec<Vec<G>> {
// Straus's algorithm for multiexponentiation, as published in The American Mathematical Monthly // Straus's algorithm for multiexponentiation, as published in The American Mathematical Monthly
// DOI: 10.2307/2310929 // DOI: 10.2307/2310929
pub(crate) fn straus<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn straus<G: Group<Scalar: PrimeFieldBits + Zeroize>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits + Zeroize, window: u8,
{ ) -> G {
let mut groupings = prep_bits(pairs, window); let mut groupings = prep_bits(pairs, window);
let tables = prep_tables(pairs, window); let tables = prep_tables(pairs, window);
@@ -48,10 +48,10 @@ where
res res
} }
pub(crate) fn straus_vartime<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn straus_vartime<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> G {
let groupings = prep_bits(pairs, window); let groupings = prep_bits(pairs, window);
let tables = prep_tables(pairs, window); let tables = prep_tables(pairs, window);

View File

@@ -9,10 +9,7 @@ use group::Group;
use crate::BatchVerifier; use crate::BatchVerifier;
pub(crate) fn test_batch<G: Group + Zeroize>() pub(crate) fn test_batch<G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>() {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let valid = |batch: BatchVerifier<_, G>| { let valid = |batch: BatchVerifier<_, G>| {
assert!(batch.verify()); assert!(batch.verify());
assert!(batch.verify_vartime()); assert!(batch.verify_vartime());

View File

@@ -18,10 +18,7 @@ mod batch;
use batch::test_batch; use batch::test_batch;
#[allow(dead_code)] #[allow(dead_code)]
fn benchmark_internal<G: Group>(straus_bool: bool) fn benchmark_internal<G: Group<Scalar: PrimeFieldBits + Zeroize>>(straus_bool: bool) {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let runs: usize = 20; let runs: usize = 20;
let mut start = 0; let mut start = 0;
@@ -86,10 +83,7 @@ where
} }
} }
fn test_multiexp<G: Group>() fn test_multiexp<G: Group<Scalar: PrimeFieldBits + Zeroize>>() {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let test = |pairs: &[_], sum| { let test = |pairs: &[_], sum| {
// These should automatically determine the best algorithm // These should automatically determine the best algorithm
assert_eq!(multiexp(pairs), sum); assert_eq!(multiexp(pairs), sum);

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["schnorr", "ff", "group"] keywords = ["schnorr", "ff", "group"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -52,7 +52,7 @@ fn test_rfc8032() {
SchnorrSignature::<Ed25519>::read::<&[u8]>(&mut hex::decode(vector.2).unwrap().as_ref()) SchnorrSignature::<Ed25519>::read::<&[u8]>(&mut hex::decode(vector.2).unwrap().as_ref())
.unwrap(); .unwrap();
let hram = Sha512::new_with_prefix( let hram = Sha512::new_with_prefix(
&[sig.R.to_bytes().as_ref(), &key.to_bytes(), &hex::decode(vector.1).unwrap()].concat(), [sig.R.to_bytes().as_ref(), &key.to_bytes(), &hex::decode(vector.1).unwrap()].concat(),
); );
assert!(sig.verify(key, Scalar::from_hash(hram))); assert!(sig.verify(key, Scalar::from_hash(hram)));
} }

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorrkel"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["frost", "multisig", "threshold", "schnorrkel"] keywords = ["frost", "multisig", "threshold", "schnorrkel"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/transcript"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["transcript"] keywords = ["transcript"]
edition = "2021" edition = "2021"
rust-version = "1.73" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -3,9 +3,9 @@
Flexible Transcript is a crate offering: Flexible Transcript is a crate offering:
- `Transcript`, a trait offering functions transcripts should implement. - `Transcript`, a trait offering functions transcripts should implement.
- `DigestTranscript`, a competent transcript format instantiated against a - `DigestTranscript`, a competent transcript format instantiated against a
provided hash function. provided hash function.
- `MerlinTranscript`, a wrapper of `merlin` into the trait (available via the - `MerlinTranscript`, a wrapper of `merlin` into the trait (available via the
`merlin` feature). `merlin` feature).
- `RecommendedTranscript`, a transcript recommended for usage in applications. - `RecommendedTranscript`, a transcript recommended for usage in applications.
Currently, this is `DigestTranscript<Blake2b512>` (available via the Currently, this is `DigestTranscript<Blake2b512>` (available via the
`recommended` feature). `recommended` feature).

View File

@@ -1,10 +1,7 @@
use crate::Transcript; use crate::Transcript;
/// Test the sanity of a transcript. /// Test the sanity of a transcript.
pub fn test_transcript<T: Transcript>() pub fn test_transcript<T: Transcript<Challenge: PartialEq>>() {
where
T::Challenge: PartialEq,
{
// Ensure distinct names cause distinct challenges // Ensure distinct names cause distinct challenges
{ {
let mut t1 = T::new(b"1"); let mut t1 = T::new(b"1");

View File

@@ -1,19 +1,19 @@
[advisories] [advisories]
version = 2
db-path = "~/.cargo/advisory-db" db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"] db-urls = ["https://github.com/rustsec/advisory-db"]
vulnerability = "deny"
yanked = "deny" yanked = "deny"
notice = "warn"
unmaintained = "warn"
ignore = [ ignore = [
"RUSTSEC-2020-0168", # mach is unmaintained
"RUSTSEC-2021-0139", # https://github.com/serai-dex/serai/228 "RUSTSEC-2021-0139", # https://github.com/serai-dex/serai/228
"RUSTSEC-2022-0061", # https://github.com/serai-dex/serai/227 "RUSTSEC-2022-0061", # https://github.com/serai-dex/serai/227
] ]
[licenses] [licenses]
unlicensed = "deny" version = 2
allow = [ allow = [
# Effective public domain # Effective public domain
@@ -36,14 +36,11 @@ allow = [
"GPL-3.0 WITH Classpath-exception-2.0", "GPL-3.0 WITH Classpath-exception-2.0",
] ]
copyleft = "deny"
allow-osi-fsf-free = "neither"
default = "deny"
exceptions = [ exceptions = [
{ allow = ["AGPL-3.0"], name = "serai-env" }, { allow = ["AGPL-3.0"], name = "serai-env" },
{ allow = ["AGPL-3.0"], name = "ethereum-serai" }, { allow = ["AGPL-3.0"], name = "ethereum-serai" },
{ allow = ["AGPL-3.0"], name = "serai-ethereum-relayer" },
{ allow = ["AGPL-3.0"], name = "serai-message-queue" }, { allow = ["AGPL-3.0"], name = "serai-message-queue" },
@@ -55,7 +52,7 @@ exceptions = [
{ allow = ["AGPL-3.0"], name = "serai-coins-pallet" }, { allow = ["AGPL-3.0"], name = "serai-coins-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-dex-pallet" }, { allow = ["AGPL-3.0"], name = "serai-dex-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-genesis-liquidity-pallet" }, { allow = ["AGPL-3.0"], name = "serai-genesis-liquidity-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-in-instructions-pallet" }, { allow = ["AGPL-3.0"], name = "serai-in-instructions-pallet" },
@@ -101,7 +98,6 @@ allow-git = [
"https://github.com/rust-lang-nursery/lazy-static.rs", "https://github.com/rust-lang-nursery/lazy-static.rs",
"https://github.com/serai-dex/substrate-bip39", "https://github.com/serai-dex/substrate-bip39",
"https://github.com/serai-dex/substrate", "https://github.com/serai-dex/substrate",
"https://github.com/alloy-rs/alloy",
"https://github.com/monero-rs/base58-monero", "https://github.com/monero-rs/base58-monero",
"https://github.com/orcalabs/dockertest-rs", "https://github.com/orcalabs/dockertest-rs",
] ]

View File

@@ -13,7 +13,7 @@ GEM
forwardable-extended (2.6.0) forwardable-extended (2.6.0)
google-protobuf (3.25.3-x86_64-linux) google-protobuf (3.25.3-x86_64-linux)
http_parser.rb (0.8.0) http_parser.rb (0.8.0)
i18n (1.14.4) i18n (1.14.5)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
jekyll (4.3.3) jekyll (4.3.3)
addressable (~> 2.4) addressable (~> 2.4)
@@ -55,17 +55,19 @@ GEM
mercenary (0.4.0) mercenary (0.4.0)
pathutil (0.16.2) pathutil (0.16.2)
forwardable-extended (~> 2.6) forwardable-extended (~> 2.6)
public_suffix (5.0.4) public_suffix (5.0.5)
rake (13.1.0) rake (13.2.1)
rb-fsevent (0.11.2) rb-fsevent (0.11.2)
rb-inotify (0.10.1) rb-inotify (0.11.1)
ffi (~> 1.0) ffi (~> 1.0)
rexml (3.2.6) rexml (3.2.8)
rouge (4.2.0) strscan (>= 3.0.9)
rouge (4.2.1)
safe_yaml (1.0.5) safe_yaml (1.0.5)
sass-embedded (1.63.6) sass-embedded (1.63.6)
google-protobuf (~> 3.23) google-protobuf (~> 3.23)
rake (>= 13.0.0) rake (>= 13.0.0)
strscan (3.1.0)
terminal-table (3.0.2) terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3) unicode-display_width (>= 1.1.1, < 3)
unicode-display_width (2.5.0) unicode-display_width (2.5.0)

View File

@@ -0,0 +1,11 @@
#!/bin/sh
RPC_USER="${RPC_USER:=serai}"
RPC_PASS="${RPC_PASS:=seraidex}"
# Run Monero
monerod --non-interactive --regtest --offline --fixed-difficulty=1 \
--no-zmq --rpc-bind-ip=0.0.0.0 --rpc-bind-port=18081 --confirm-external-bind \
--rpc-access-control-origins "*" --disable-rpc-ban \
--rpc-login=$RPC_USER:$RPC_PASS \
$1

View File

@@ -1,5 +1,5 @@
# rust:1.77.0-slim-bookworm as of March 22nd, 2024 (GMT) # rust:1.79.0-slim-bookworm as of June 14th, 2024 (GMT)
FROM --platform=linux/amd64 rust@sha256:e785e4aa81f87bc1ee02fa2026ffbc491e0410bdaf6652cea74884373f452664 as deterministic FROM --platform=linux/amd64 rust@sha256:fa189cd885739dd17fc6bb4e132687fce43f2bf42983c0ac39b60e4943201e9c as deterministic
# Move to a Debian package snapshot # Move to a Debian package snapshot
RUN rm -rf /etc/apt/sources.list.d/debian.sources && \ RUN rm -rf /etc/apt/sources.list.d/debian.sources && \

View File

@@ -17,6 +17,7 @@ pub fn coordinator(
let longer_reattempts = if network == Network::Dev { "longer-reattempts" } else { "" }; let longer_reattempts = if network == Network::Dev { "longer-reattempts" } else { "" };
let setup = mimalloc(Os::Debian).to_string() + let setup = mimalloc(Os::Debian).to_string() +
&build_serai_service( &build_serai_service(
"",
network.release(), network.release(),
&format!("{db} {longer_reattempts}"), &format!("{db} {longer_reattempts}"),
"serai-coordinator", "serai-coordinator",

View File

@@ -0,0 +1,39 @@
use std::path::Path;
use crate::{Network, Os, mimalloc, os, build_serai_service, write_dockerfile};
pub fn ethereum_relayer(orchestration_path: &Path, network: Network) {
let setup = mimalloc(Os::Debian).to_string() +
&build_serai_service("", network.release(), network.db(), "serai-ethereum-relayer");
let env_vars = [
("DB_PATH", "/volume/ethereum-relayer-db".to_string()),
("RUST_LOG", "info,serai_ethereum_relayer=trace".to_string()),
];
let mut env_vars_str = String::new();
for (env_var, value) in env_vars {
env_vars_str += &format!(r#"{env_var}=${{{env_var}:="{value}"}} "#);
}
let run_ethereum_relayer = format!(
r#"
# Copy the relayer server binary and relevant license
COPY --from=builder --chown=ethereumrelayer /serai/bin/serai-ethereum-relayer /bin
# Run ethereum-relayer
EXPOSE 20830
EXPOSE 20831
CMD {env_vars_str} serai-ethereum-relayer
"#
);
let run = os(Os::Debian, "", "ethereumrelayer") + &run_ethereum_relayer;
let res = setup + &run;
let mut ethereum_relayer_path = orchestration_path.to_path_buf();
ethereum_relayer_path.push("coins");
ethereum_relayer_path.push("ethereum-relayer");
ethereum_relayer_path.push("Dockerfile");
write_dockerfile(ethereum_relayer_path, &res);
}

View File

@@ -32,6 +32,9 @@ use mimalloc::mimalloc;
mod coins; mod coins;
use coins::*; use coins::*;
mod ethereum_relayer;
use ethereum_relayer::ethereum_relayer;
mod message_queue; mod message_queue;
use message_queue::message_queue; use message_queue::message_queue;
@@ -137,13 +140,13 @@ WORKDIR /home/{user}
} }
} }
fn build_serai_service(release: bool, features: &str, package: &str) -> String { fn build_serai_service(prelude: &str, release: bool, features: &str, package: &str) -> String {
let profile = if release { "release" } else { "debug" }; let profile = if release { "release" } else { "debug" };
let profile_flag = if release { "--release" } else { "" }; let profile_flag = if release { "--release" } else { "" };
format!( format!(
r#" r#"
FROM rust:1.77-slim-bookworm as builder FROM rust:1.79-slim-bookworm as builder
COPY --from=mimalloc-debian libmimalloc.so /usr/lib COPY --from=mimalloc-debian libmimalloc.so /usr/lib
RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload
@@ -159,6 +162,8 @@ RUN apt install -y make protobuf-compiler
# Add the wasm toolchain # Add the wasm toolchain
RUN rustup target add wasm32-unknown-unknown RUN rustup target add wasm32-unknown-unknown
{prelude}
# Add files for build # Add files for build
ADD patches /serai/patches ADD patches /serai/patches
ADD common /serai/common ADD common /serai/common
@@ -278,6 +283,8 @@ fn dockerfiles(network: Network) {
let ethereum_key = infrastructure_keys.remove("ethereum").unwrap(); let ethereum_key = infrastructure_keys.remove("ethereum").unwrap();
let monero_key = infrastructure_keys.remove("monero").unwrap(); let monero_key = infrastructure_keys.remove("monero").unwrap();
ethereum_relayer(&orchestration_path, network);
message_queue( message_queue(
&orchestration_path, &orchestration_path,
network, network,
@@ -361,6 +368,7 @@ fn start(network: Network, services: HashSet<String>) {
let name = match service.as_ref() { let name = match service.as_ref() {
"serai" => "serai", "serai" => "serai",
"coordinator" => "coordinator", "coordinator" => "coordinator",
"ethereum-relayer" => "ethereum-relayer",
"message-queue" => "message-queue", "message-queue" => "message-queue",
"bitcoin-daemon" => "bitcoin", "bitcoin-daemon" => "bitcoin",
"bitcoin-processor" => "bitcoin-processor", "bitcoin-processor" => "bitcoin-processor",
@@ -374,23 +382,17 @@ fn start(network: Network, services: HashSet<String>) {
let serai_runtime_volume = format!("serai-{}-runtime-volume", network.label()); let serai_runtime_volume = format!("serai-{}-runtime-volume", network.label());
if name == "serai" { if name == "serai" {
// Check if it's built by checking if the volume has the expected runtime file // Check if it's built by checking if the volume has the expected runtime file
let wasm_build_container_name = format!("serai-{}-runtime", network.label());
let built = || { let built = || {
if let Ok(path) = Command::new("docker") if let Ok(state_and_status) = Command::new("docker")
.arg("volume")
.arg("inspect") .arg("inspect")
.arg("-f") .arg("-f")
.arg("{{ .Mountpoint }}") .arg("{{.State.Status}}:{{.State.ExitCode}}")
.arg(&serai_runtime_volume) .arg(&wasm_build_container_name)
.output() .output()
{ {
if let Ok(path) = String::from_utf8(path.stdout) { if let Ok(state_and_status) = String::from_utf8(state_and_status.stdout) {
if let Ok(iter) = std::fs::read_dir(PathBuf::from(path.trim())) { return state_and_status.trim() == "exited:0";
for item in iter.flatten() {
if item.file_name() == "serai.wasm" {
return true;
}
}
}
} }
} }
false false
@@ -493,6 +495,10 @@ fn start(network: Network, services: HashSet<String>) {
command command
} }
} }
"ethereum-relayer" => {
// Expose the router command fetch server
command.arg("-p").arg("20831:20831")
}
"monero" => { "monero" => {
// Expose the RPC for tests // Expose the RPC for tests
if network == Network::Dev { if network == Network::Dev {
@@ -559,6 +565,9 @@ Commands:
- `message-queue` - `message-queue`
- `bitcoin-daemon` - `bitcoin-daemon`
- `bitcoin-processor` - `bitcoin-processor`
- `ethereum-daemon`
- `ethereum-processor`
- `ethereum-relayer`
- `monero-daemon` - `monero-daemon`
- `monero-processor` - `monero-processor`
- `monero-wallet-rpc` (if "dev") - `monero-wallet-rpc` (if "dev")
@@ -591,6 +600,9 @@ Commands:
Some("start") => { Some("start") => {
let mut services = HashSet::new(); let mut services = HashSet::new();
for arg in args { for arg in args {
if arg == "ethereum-processor" {
services.insert("ethereum-relayer".to_string());
}
if let Some(ext_network) = arg.strip_suffix("-processor") { if let Some(ext_network) = arg.strip_suffix("-processor") {
services.insert(ext_network.to_string() + "-daemon"); services.insert(ext_network.to_string() + "-daemon");
} }

View File

@@ -13,7 +13,7 @@ pub fn message_queue(
monero_key: <Ristretto as Ciphersuite>::G, monero_key: <Ristretto as Ciphersuite>::G,
) { ) {
let setup = mimalloc(Os::Debian).to_string() + let setup = mimalloc(Os::Debian).to_string() +
&build_serai_service(network.release(), network.db(), "serai-message-queue"); &build_serai_service("", network.release(), network.db(), "serai-message-queue");
let env_vars = [ let env_vars = [
("COORDINATOR_KEY", hex::encode(coordinator_key.to_bytes())), ("COORDINATOR_KEY", hex::encode(coordinator_key.to_bytes())),

View File

@@ -17,6 +17,15 @@ pub fn processor(
) { ) {
let setup = mimalloc(Os::Debian).to_string() + let setup = mimalloc(Os::Debian).to_string() +
&build_serai_service( &build_serai_service(
if coin == "ethereum" {
r#"
RUN cargo install svm-rs
RUN svm install 0.8.25
RUN svm use 0.8.25
"#
} else {
""
},
network.release(), network.release(),
&format!("binaries {} {coin}", network.db()), &format!("binaries {} {coin}", network.db()),
"serai-processor", "serai-processor",
@@ -32,24 +41,32 @@ RUN apt install -y ca-certificates
const RPC_PASS: &str = "seraidex"; const RPC_PASS: &str = "seraidex";
// TODO: Isolate networks // TODO: Isolate networks
let hostname = format!("serai-{}-{coin}", network.label()); let hostname = format!("serai-{}-{coin}", network.label());
let port = match coin { let port = format!(
"bitcoin" => 8332, "{}",
"ethereum" => return, // TODO match coin {
"monero" => 18081, "bitcoin" => 8332,
_ => panic!("unrecognized external network"), "ethereum" => 8545,
}; "monero" => 18081,
_ => panic!("unrecognized external network"),
}
);
let env_vars = [ let mut env_vars = vec![
("MESSAGE_QUEUE_RPC", format!("serai-{}-message-queue", network.label())), ("MESSAGE_QUEUE_RPC", format!("serai-{}-message-queue", network.label())),
("MESSAGE_QUEUE_KEY", hex::encode(coin_key.to_repr())), ("MESSAGE_QUEUE_KEY", hex::encode(coin_key.to_repr())),
("ENTROPY", hex::encode(entropy.as_ref())), ("ENTROPY", hex::encode(entropy.as_ref())),
("NETWORK", coin.to_string()), ("NETWORK", coin.to_string()),
("NETWORK_RPC_LOGIN", format!("{RPC_USER}:{RPC_PASS}")), ("NETWORK_RPC_LOGIN", format!("{RPC_USER}:{RPC_PASS}")),
("NETWORK_RPC_HOSTNAME", hostname), ("NETWORK_RPC_HOSTNAME", hostname),
("NETWORK_RPC_PORT", format!("{port}")), ("NETWORK_RPC_PORT", port),
("DB_PATH", "/volume/processor-db".to_string()), ("DB_PATH", "/volume/processor-db".to_string()),
("RUST_LOG", "info,serai_processor=debug".to_string()), ("RUST_LOG", "info,serai_processor=debug".to_string()),
]; ];
if coin == "ethereum" {
env_vars
.push(("ETHEREUM_RELAYER_HOSTNAME", format!("serai-{}-ethereum-relayer", network.label())));
env_vars.push(("ETHEREUM_RELAYER_PORT", "20830".to_string()));
}
let mut env_vars_str = String::new(); let mut env_vars_str = String::new();
for (env_var, value) in env_vars { for (env_var, value) in env_vars {
env_vars_str += &format!(r#"{env_var}=${{{env_var}:="{value}"}} "#); env_vars_str += &format!(r#"{env_var}=${{{env_var}:="{value}"}} "#);

View File

@@ -11,9 +11,9 @@ pub fn serai(
serai_key: &Zeroizing<<Ristretto as Ciphersuite>::F>, serai_key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
) { ) {
// Always builds in release for performance reasons // Always builds in release for performance reasons
let setup = mimalloc(Os::Debian).to_string() + &build_serai_service(true, "", "serai-node"); let setup = mimalloc(Os::Debian).to_string() + &build_serai_service("", true, "", "serai-node");
let setup_fast_epoch = let setup_fast_epoch =
mimalloc(Os::Debian).to_string() + &build_serai_service(true, "fast-epoch", "serai-node"); mimalloc(Os::Debian).to_string() + &build_serai_service("", true, "fast-epoch", "serai-node");
let env_vars = [("KEY", hex::encode(serai_key.to_repr()))]; let env_vars = [("KEY", hex::encode(serai_key.to_repr()))];
let mut env_vars_str = String::new(); let mut env_vars_str = String::new();

View File

@@ -0,0 +1,11 @@
#!/bin/sh
RPC_USER="${RPC_USER:=serai}"
RPC_PASS="${RPC_PASS:=seraidex}"
# Run Monero
monerod --non-interactive --regtest --offline --fixed-difficulty=1 \
--no-zmq --rpc-bind-ip=0.0.0.0 --rpc-bind-port=18081 --confirm-external-bind \
--rpc-access-control-origins "*" --disable-rpc-ban \
--rpc-login=$RPC_USER:$RPC_PASS \
$1

View File

@@ -0,0 +1,17 @@
[package]
name = "parking_lot"
version = "0.11.2"
description = "parking_lot which patches to the latest update"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/patches/parking_lot"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
rust-version = "1.70"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
parking_lot = "0.12"

View File

@@ -0,0 +1 @@
pub use parking_lot::*;

View File

@@ -0,0 +1,17 @@
[package]
name = "parking_lot_core"
version = "0.8.6"
description = "parking_lot_core which patches to the latest update"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/patches/parking_lot_core"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
rust-version = "1.70"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
parking_lot_core = "0.9"

View File

@@ -0,0 +1 @@
pub use parking_lot_core::*;

View File

@@ -45,7 +45,7 @@ frost-schnorrkel = { path = "../crypto/schnorrkel", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["std"], optional = true } k256 = { version = "^0.13.1", default-features = false, features = ["std"], optional = true }
# Bitcoin # Bitcoin
secp256k1 = { version = "0.28", default-features = false, features = ["std", "global-context", "rand-std"], optional = true } secp256k1 = { version = "0.29", default-features = false, features = ["std", "global-context", "rand-std"], optional = true }
bitcoin-serai = { path = "../coins/bitcoin", default-features = false, features = ["std"], optional = true } bitcoin-serai = { path = "../coins/bitcoin", default-features = false, features = ["std"], optional = true }
# Ethereum # Ethereum

View File

@@ -512,6 +512,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
ProcessorMessage::GeneratedKeyPair { ProcessorMessage::GeneratedKeyPair {
id, id,
substrate_key: generated_substrate_key.unwrap().to_bytes(), substrate_key: generated_substrate_key.unwrap().to_bytes(),
// TODO: This can be made more efficient since tweaked keys may be a subset of keys
network_key: generated_network_key.unwrap().to_bytes().as_ref().to_vec(), network_key: generated_network_key.unwrap().to_bytes().as_ref().to_vec(),
} }
} }

View File

@@ -748,7 +748,15 @@ async fn main() {
#[cfg(feature = "bitcoin")] #[cfg(feature = "bitcoin")]
NetworkId::Bitcoin => run(db, Bitcoin::new(url).await, coordinator).await, NetworkId::Bitcoin => run(db, Bitcoin::new(url).await, coordinator).await,
#[cfg(feature = "ethereum")] #[cfg(feature = "ethereum")]
NetworkId::Ethereum => run(db.clone(), Ethereum::new(db, url).await, coordinator).await, NetworkId::Ethereum => {
let relayer_hostname = env::var("ETHEREUM_RELAYER_HOSTNAME")
.expect("ethereum relayer hostname wasn't specified")
.to_string();
let relayer_port =
env::var("ETHEREUM_RELAYER_PORT").expect("ethereum relayer port wasn't specified");
let relayer_url = relayer_hostname + ":" + &relayer_port;
run(db.clone(), Ethereum::new(db, url, relayer_url).await, coordinator).await
}
#[cfg(feature = "monero")] #[cfg(feature = "monero")]
NetworkId::Monero => run(db, Monero::new(url).await, coordinator).await, NetworkId::Monero => run(db, Monero::new(url).await, coordinator).await,
_ => panic!("spawning a processor for an unsupported network"), _ => panic!("spawning a processor for an unsupported network"),

View File

@@ -231,7 +231,7 @@ impl ForwardedOutputDb {
let res = InInstructionWithBalance::decode(&mut outputs_ref).unwrap(); let res = InInstructionWithBalance::decode(&mut outputs_ref).unwrap();
assert!(outputs_ref.len() < outputs.len()); assert!(outputs_ref.len() < outputs.len());
if outputs_ref.is_empty() { if outputs_ref.is_empty() {
txn.del(&Self::key(balance)); txn.del(Self::key(balance));
} else { } else {
Self::set(txn, balance, &outputs); Self::set(txn, balance, &outputs);
} }

View File

@@ -63,9 +63,22 @@ fn instruction_from_output<N: Network>(
return (presumed_origin, None); return (presumed_origin, None);
} }
let Ok(shorthand) = Shorthand::decode(&mut data) else { return (presumed_origin, None) }; let shorthand = match Shorthand::decode(&mut data) {
let Ok(instruction) = RefundableInInstruction::try_from(shorthand) else { Ok(shorthand) => shorthand,
return (presumed_origin, None); Err(e) => {
info!("data in output {} wasn't valid shorthand: {e:?}", hex::encode(output.id()));
return (presumed_origin, None);
}
};
let instruction = match RefundableInInstruction::try_from(shorthand) {
Ok(instruction) => instruction,
Err(e) => {
info!(
"shorthand in output {} wasn't convertible to a RefundableInInstruction: {e:?}",
hex::encode(output.id())
);
return (presumed_origin, None);
}
}; };
let mut balance = output.balance(); let mut balance = output.balance();

View File

@@ -279,6 +279,8 @@ impl<N: Network, D: Db> ScannerHandle<N, D> {
activation_number: usize, activation_number: usize,
key: <N::Curve as Ciphersuite>::G, key: <N::Curve as Ciphersuite>::G,
) { ) {
info!("Registering key {} in scanner at {activation_number}", hex::encode(key.to_bytes()));
let mut scanner_lock = self.scanner.write().await; let mut scanner_lock = self.scanner.write().await;
let scanner = scanner_lock.as_mut().unwrap(); let scanner = scanner_lock.as_mut().unwrap();
assert!( assert!(
@@ -286,8 +288,6 @@ impl<N: Network, D: Db> ScannerHandle<N, D> {
"activation block of new keys was already scanned", "activation block of new keys was already scanned",
); );
info!("Registering key {} in scanner at {activation_number}", hex::encode(key.to_bytes()));
if scanner.keys.is_empty() { if scanner.keys.is_empty() {
assert!(scanner.ram_scanned.is_none()); assert!(scanner.ram_scanned.is_none());
scanner.ram_scanned = Some(activation_number); scanner.ram_scanned = Some(activation_number);

View File

@@ -116,7 +116,7 @@ impl<N: Network<Scheduler = Self>> SchedulerTrait<N> for Scheduler<N> {
assert!(self.coins.contains(&utxo.balance().coin)); assert!(self.coins.contains(&utxo.balance().coin));
} }
let mut nonce = LastNonce::get(txn).map_or(1, |nonce| nonce + 1); let mut nonce = LastNonce::get(txn).unwrap_or(1);
let mut plans = vec![]; let mut plans = vec![];
for chunk in payments.as_slice().chunks(N::MAX_OUTPUTS) { for chunk in payments.as_slice().chunks(N::MAX_OUTPUTS) {
// Once we rotate, all further payments should be scheduled via the new multisig // Once we rotate, all further payments should be scheduled via the new multisig

View File

@@ -432,7 +432,7 @@ impl<N: UtxoNetwork<Scheduler = Self>> Scheduler<N> {
} }
// If there's a UTXO to restore, restore it // If there's a UTXO to restore, restore it
// This is down now as if there is a to_restore output, and it was inserted into self.utxos // This is done now as if there is a to_restore output, and it was inserted into self.utxos
// earlier, self.utxos.len() may become `N::MAX_INPUTS + 1` // earlier, self.utxos.len() may become `N::MAX_INPUTS + 1`
// The prior block requires the len to be `<= N::MAX_INPUTS` // The prior block requires the len to be `<= N::MAX_INPUTS`
if let Some(to_restore) = to_restore { if let Some(to_restore) = to_restore {
@@ -442,9 +442,10 @@ impl<N: UtxoNetwork<Scheduler = Self>> Scheduler<N> {
txn.put(scheduler_key::<D, _>(&self.key), self.serialize()); txn.put(scheduler_key::<D, _>(&self.key), self.serialize());
log::info!( log::info!(
"created {} plans containing {} payments to sign", "created {} plans containing {} payments to sign, with {} payments pending scheduling",
plans.len(), plans.len(),
payments_at_start - self.payments.len(), payments_at_start - self.payments.len(),
self.payments.len(),
); );
plans plans
} }
@@ -589,7 +590,8 @@ impl<N: UtxoNetwork<Scheduler = Self>> SchedulerTrait<N> for Scheduler<N> {
output: N::Output, output: N::Output,
refund_to: N::Address, refund_to: N::Address,
) -> Plan<N> { ) -> Plan<N> {
Plan { let output_id = output.id().as_ref().to_vec();
let res = Plan {
key: output.key(), key: output.key(),
// Uses a payment as this will still be successfully sent due to fee amortization, // Uses a payment as this will still be successfully sent due to fee amortization,
// and because change is currently always a Serai key // and because change is currently always a Serai key
@@ -597,7 +599,9 @@ impl<N: UtxoNetwork<Scheduler = Self>> SchedulerTrait<N> for Scheduler<N> {
inputs: vec![output], inputs: vec![output],
change: None, change: None,
scheduler_addendum: (), scheduler_addendum: (),
} };
log::info!("refund plan for {} has ID {}", hex::encode(output_id), hex::encode(res.id()));
res
} }
fn shim_forward_plan(output: N::Output, to: <N::Curve as Ciphersuite>::G) -> Option<Plan<N>> { fn shim_forward_plan(output: N::Output, to: <N::Curve as Ciphersuite>::G) -> Option<Plan<N>> {

View File

@@ -20,12 +20,11 @@ use bitcoin_serai::{
key::{Parity, XOnlyPublicKey}, key::{Parity, XOnlyPublicKey},
consensus::{Encodable, Decodable}, consensus::{Encodable, Decodable},
script::Instruction, script::Instruction,
address::{NetworkChecked, Address as BAddress}, Transaction, Block, ScriptBuf,
Transaction, Block, Network as BNetwork, ScriptBuf,
opcodes::all::{OP_SHA256, OP_EQUALVERIFY}, opcodes::all::{OP_SHA256, OP_EQUALVERIFY},
}, },
wallet::{ wallet::{
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, tweak_keys, p2tr_script_buf, ReceivedOutput, Scanner, TransactionError,
SignableTransaction as BSignableTransaction, TransactionMachine, SignableTransaction as BSignableTransaction, TransactionMachine,
}, },
rpc::{RpcError, Rpc}, rpc::{RpcError, Rpc},
@@ -175,7 +174,7 @@ pub struct Fee(u64);
impl TransactionTrait<Bitcoin> for Transaction { impl TransactionTrait<Bitcoin> for Transaction {
type Id = [u8; 32]; type Id = [u8; 32];
fn id(&self) -> Self::Id { fn id(&self) -> Self::Id {
let mut hash = *self.txid().as_raw_hash().as_byte_array(); let mut hash = *self.compute_txid().as_raw_hash().as_byte_array();
hash.reverse(); hash.reverse();
hash hash
} }
@@ -243,7 +242,8 @@ impl EventualityTrait for Eventuality {
buf buf
} }
fn read_completion<R: io::Read>(reader: &mut R) -> io::Result<Transaction> { fn read_completion<R: io::Read>(reader: &mut R) -> io::Result<Transaction> {
Transaction::consensus_decode(reader).map_err(|e| io::Error::other(format!("{e}"))) Transaction::consensus_decode(&mut io::BufReader::with_capacity(0, reader))
.map_err(|e| io::Error::other(format!("{e}")))
} }
} }
@@ -453,7 +453,7 @@ impl Bitcoin {
match BSignableTransaction::new( match BSignableTransaction::new(
inputs.iter().map(|input| input.output.clone()).collect(), inputs.iter().map(|input| input.output.clone()).collect(),
&payments, &payments,
change.as_ref().map(AsRef::as_ref), change.clone().map(Into::into),
None, None,
fee.0, fee.0,
) { ) {
@@ -534,12 +534,14 @@ impl Bitcoin {
input_index: usize, input_index: usize,
private_key: &PrivateKey, private_key: &PrivateKey,
) -> ScriptBuf { ) -> ScriptBuf {
use bitcoin_serai::bitcoin::{Network as BNetwork, Address as BAddress};
let public_key = PublicKey::from_private_key(SECP256K1, private_key); let public_key = PublicKey::from_private_key(SECP256K1, private_key);
let main_addr = BAddress::p2pkh(&public_key, BNetwork::Regtest); let main_addr = BAddress::p2pkh(public_key, BNetwork::Regtest);
let mut der = SECP256K1 let mut der = SECP256K1
.sign_ecdsa_low_r( .sign_ecdsa_low_r(
&Message::from( &Message::from_digest_slice(
SighashCache::new(tx) SighashCache::new(tx)
.legacy_signature_hash( .legacy_signature_hash(
input_index, input_index,
@@ -547,8 +549,10 @@ impl Bitcoin {
EcdsaSighashType::All.to_u32(), EcdsaSighashType::All.to_u32(),
) )
.unwrap() .unwrap()
.to_raw_hash(), .to_raw_hash()
), .as_ref(),
)
.unwrap(),
&private_key.inner, &private_key.inner,
) )
.serialize_der() .serialize_der()
@@ -577,8 +581,10 @@ const MAX_INPUTS: usize = 520;
const MAX_OUTPUTS: usize = 520; const MAX_OUTPUTS: usize = 520;
fn address_from_key(key: ProjectivePoint) -> Address { fn address_from_key(key: ProjectivePoint) -> Address {
Address::new(BAddress::<NetworkChecked>::new(BNetwork::Bitcoin, address_payload(key).unwrap())) Address::new(
.unwrap() p2tr_script_buf(key).expect("creating address from key which isn't properly tweaked"),
)
.expect("couldn't create Serai-representable address for P2TR script")
} }
#[async_trait] #[async_trait]
@@ -724,9 +730,7 @@ impl Network for Bitcoin {
} }
tx.unwrap().output.swap_remove(usize::try_from(input.previous_output.vout).unwrap()) tx.unwrap().output.swap_remove(usize::try_from(input.previous_output.vout).unwrap())
}; };
BAddress::from_script(&spent_output.script_pubkey, BNetwork::Bitcoin) Address::new(spent_output.script_pubkey)
.ok()
.and_then(Address::new)
}; };
let data = Self::extract_serai_data(tx); let data = Self::extract_serai_data(tx);
for output in &mut outputs { for output in &mut outputs {
@@ -858,7 +862,7 @@ impl Network for Bitcoin {
Err(RpcError::ConnectionError) => Err(NetworkError::ConnectionError)?, Err(RpcError::ConnectionError) => Err(NetworkError::ConnectionError)?,
// TODO: Distinguish already in pool vs double spend (other signing attempt succeeded) vs // TODO: Distinguish already in pool vs double spend (other signing attempt succeeded) vs
// invalid transaction // invalid transaction
Err(e) => panic!("failed to publish TX {}: {e}", tx.txid()), Err(e) => panic!("failed to publish TX {}: {e}", tx.compute_txid()),
} }
Ok(()) Ok(())
} }
@@ -894,6 +898,8 @@ impl Network for Bitcoin {
#[cfg(test)] #[cfg(test)]
async fn mine_block(&self) { async fn mine_block(&self) {
use bitcoin_serai::bitcoin::{Network as BNetwork, Address as BAddress};
self self
.rpc .rpc
.rpc_call::<Vec<String>>( .rpc_call::<Vec<String>>(
@@ -906,10 +912,12 @@ impl Network for Bitcoin {
#[cfg(test)] #[cfg(test)]
async fn test_send(&self, address: Address) -> Block { async fn test_send(&self, address: Address) -> Block {
use bitcoin_serai::bitcoin::{Network as BNetwork, Address as BAddress};
let secret_key = SecretKey::new(&mut rand_core::OsRng); let secret_key = SecretKey::new(&mut rand_core::OsRng);
let private_key = PrivateKey::new(secret_key, BNetwork::Regtest); let private_key = PrivateKey::new(secret_key, BNetwork::Regtest);
let public_key = PublicKey::from_private_key(SECP256K1, &private_key); let public_key = PublicKey::from_private_key(SECP256K1, &private_key);
let main_addr = BAddress::p2pkh(&public_key, BNetwork::Regtest); let main_addr = BAddress::p2pkh(public_key, BNetwork::Regtest);
let new_block = self.get_latest_block_number().await.unwrap() + 1; let new_block = self.get_latest_block_number().await.unwrap() + 1;
self self
@@ -923,14 +931,14 @@ impl Network for Bitcoin {
version: Version(2), version: Version(2),
lock_time: LockTime::ZERO, lock_time: LockTime::ZERO,
input: vec![TxIn { input: vec![TxIn {
previous_output: OutPoint { txid: tx.txid(), vout: 0 }, previous_output: OutPoint { txid: tx.compute_txid(), vout: 0 },
script_sig: Script::new().into(), script_sig: Script::new().into(),
sequence: Sequence(u32::MAX), sequence: Sequence(u32::MAX),
witness: Witness::default(), witness: Witness::default(),
}], }],
output: vec![TxOut { output: vec![TxOut {
value: tx.output[0].value - BAmount::from_sat(10000), value: tx.output[0].value - BAmount::from_sat(10000),
script_pubkey: address.as_ref().script_pubkey(), script_pubkey: address.clone().into(),
}], }],
}; };
tx.input[0].script_sig = Self::sign_btc_input_for_p2pkh(&tx, 0, &private_key); tx.input[0].script_sig = Self::sign_btc_input_for_p2pkh(&tx, 0, &private_key);

View File

@@ -13,7 +13,7 @@ use frost::ThresholdKeys;
use ethereum_serai::{ use ethereum_serai::{
alloy::{ alloy::{
primitives::U256, primitives::U256,
rpc_types::{BlockNumberOrTag, Transaction}, rpc_types::{BlockTransactionsKind, BlockNumberOrTag, Transaction},
simple_request_transport::SimpleRequest, simple_request_transport::SimpleRequest,
rpc_client::ClientBuilder, rpc_client::ClientBuilder,
provider::{Provider, RootProvider}, provider::{Provider, RootProvider},
@@ -31,6 +31,11 @@ use tokio::{
time::sleep, time::sleep,
sync::{RwLock, RwLockReadGuard}, sync::{RwLock, RwLockReadGuard},
}; };
#[cfg(not(test))]
use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
net::TcpStream,
};
use serai_client::{ use serai_client::{
primitives::{Coin, Amount, Balance, NetworkId}, primitives::{Coin, Amount, Balance, NetworkId},
@@ -290,6 +295,8 @@ pub struct Ethereum<D: Db> {
// address. Accordingly, all methods present are consistent to a Serai chain with a finalized // address. Accordingly, all methods present are consistent to a Serai chain with a finalized
// first key (regardless of local state), and this is safe. // first key (regardless of local state), and this is safe.
db: D, db: D,
#[cfg_attr(test, allow(unused))]
relayer_url: String,
provider: Arc<RootProvider<SimpleRequest>>, provider: Arc<RootProvider<SimpleRequest>>,
deployer: Deployer, deployer: Deployer,
router: Arc<RwLock<Option<Router>>>, router: Arc<RwLock<Option<Router>>>,
@@ -309,9 +316,9 @@ impl<D: Db> fmt::Debug for Ethereum<D> {
} }
} }
impl<D: Db> Ethereum<D> { impl<D: Db> Ethereum<D> {
pub async fn new(db: D, url: String) -> Self { pub async fn new(db: D, daemon_url: String, relayer_url: String) -> Self {
let provider = Arc::new(RootProvider::new( let provider = Arc::new(RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(url), true), ClientBuilder::default().transport(SimpleRequest::new(daemon_url), true),
)); ));
let mut deployer = Deployer::new(provider.clone()).await; let mut deployer = Deployer::new(provider.clone()).await;
@@ -322,7 +329,9 @@ impl<D: Db> Ethereum<D> {
} }
let deployer = deployer.unwrap().unwrap(); let deployer = deployer.unwrap().unwrap();
Ethereum { db, provider, deployer, router: Arc::new(RwLock::new(None)) } dbg!(&relayer_url);
dbg!(relayer_url.len());
Ethereum { db, relayer_url, provider, deployer, router: Arc::new(RwLock::new(None)) }
} }
// Obtain a reference to the Router, sleeping until it's deployed if it hasn't already been. // Obtain a reference to the Router, sleeping until it's deployed if it hasn't already been.
@@ -423,10 +432,10 @@ impl<D: Db> Network for Ethereum<D> {
async fn get_latest_block_number(&self) -> Result<usize, NetworkError> { async fn get_latest_block_number(&self) -> Result<usize, NetworkError> {
let actual_number = self let actual_number = self
.provider .provider
.get_block(BlockNumberOrTag::Finalized.into(), false) .get_block(BlockNumberOrTag::Finalized.into(), BlockTransactionsKind::Hashes)
.await .await
.map_err(|_| NetworkError::ConnectionError)? .map_err(|_| NetworkError::ConnectionError)?
.expect("no blocks were finalized") .ok_or(NetworkError::ConnectionError)?
.header .header
.number .number
.unwrap(); .unwrap();
@@ -451,7 +460,7 @@ impl<D: Db> Network for Ethereum<D> {
} else { } else {
self self
.provider .provider
.get_block(u64::try_from(start - 1).unwrap().into(), false) .get_block(u64::try_from(start - 1).unwrap().into(), BlockTransactionsKind::Hashes)
.await .await
.ok() .ok()
.flatten() .flatten()
@@ -464,7 +473,7 @@ impl<D: Db> Network for Ethereum<D> {
let end_header = self let end_header = self
.provider .provider
.get_block(u64::try_from(start + 31).unwrap().into(), false) .get_block(u64::try_from(start + 31).unwrap().into(), BlockTransactionsKind::Hashes)
.await .await
.ok() .ok()
.flatten() .flatten()
@@ -714,8 +723,32 @@ impl<D: Db> Network for Ethereum<D> {
// Publish this to the dedicated TX server for a solver to actually publish // Publish this to the dedicated TX server for a solver to actually publish
#[cfg(not(test))] #[cfg(not(test))]
{ {
let _ = completion; let mut msg = vec![];
todo!("TODO"); match completion.command() {
RouterCommand::UpdateSeraiKey { nonce, .. } | RouterCommand::Execute { nonce, .. } => {
msg.extend(&u32::try_from(nonce).unwrap().to_le_bytes());
}
}
completion.write(&mut msg).unwrap();
let Ok(mut socket) = TcpStream::connect(&self.relayer_url).await else {
log::warn!("couldn't connect to the relayer server");
Err(NetworkError::ConnectionError)?
};
let Ok(()) = socket.write_all(&u32::try_from(msg.len()).unwrap().to_le_bytes()).await else {
log::warn!("couldn't send the message's len to the relayer server");
Err(NetworkError::ConnectionError)?
};
let Ok(()) = socket.write_all(&msg).await else {
log::warn!("couldn't write the message to the relayer server");
Err(NetworkError::ConnectionError)?
};
if socket.read_u8().await.ok() != Some(1) {
log::warn!("didn't get the ack from the relayer server");
Err(NetworkError::ConnectionError)?;
}
Ok(())
} }
// Publish this using a dummy account we fund with magic RPC commands // Publish this using a dummy account we fund with magic RPC commands
@@ -774,7 +807,7 @@ impl<D: Db> Network for Ethereum<D> {
async fn get_block_number(&self, id: &<Self::Block as Block<Self>>::Id) -> usize { async fn get_block_number(&self, id: &<Self::Block as Block<Self>>::Id) -> usize {
self self
.provider .provider
.get_block(B256::from(*id).into(), false) .get_block(B256::from(*id).into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()

View File

@@ -70,7 +70,7 @@ mod bitcoin {
// btc key pair to send from // btc key pair to send from
let private_key = PrivateKey::new(SecretKey::new(&mut rand_core::OsRng), BNetwork::Regtest); let private_key = PrivateKey::new(SecretKey::new(&mut rand_core::OsRng), BNetwork::Regtest);
let public_key = PublicKey::from_private_key(SECP256K1, &private_key); let public_key = PublicKey::from_private_key(SECP256K1, &private_key);
let main_addr = BAddress::p2pkh(&public_key, BNetwork::Regtest); let main_addr = BAddress::p2pkh(public_key, BNetwork::Regtest);
// get unlocked coins // get unlocked coins
let new_block = btc.get_latest_block_number().await.unwrap() + 1; let new_block = btc.get_latest_block_number().await.unwrap() + 1;
@@ -107,7 +107,7 @@ mod bitcoin {
version: Version(2), version: Version(2),
lock_time: LockTime::ZERO, lock_time: LockTime::ZERO,
input: vec![TxIn { input: vec![TxIn {
previous_output: OutPoint { txid: tx.txid(), vout: 0 }, previous_output: OutPoint { txid: tx.compute_txid(), vout: 0 },
script_sig: Script::new().into(), script_sig: Script::new().into(),
sequence: Sequence(u32::MAX), sequence: Sequence(u32::MAX),
witness: Witness::default(), witness: Witness::default(),
@@ -128,14 +128,14 @@ mod bitcoin {
version: Version(2), version: Version(2),
lock_time: LockTime::ZERO, lock_time: LockTime::ZERO,
input: vec![TxIn { input: vec![TxIn {
previous_output: OutPoint { txid: tx.txid(), vout: 0 }, previous_output: OutPoint { txid: tx.compute_txid(), vout: 0 },
script_sig: Script::new().into(), script_sig: Script::new().into(),
sequence: Sequence(u32::MAX), sequence: Sequence(u32::MAX),
witness: Witness::new(), witness: Witness::new(),
}], }],
output: vec![TxOut { output: vec![TxOut {
value: tx.output[0].value - BAmount::from_sat(10000), value: tx.output[0].value - BAmount::from_sat(10000),
script_pubkey: serai_btc_address.as_ref().script_pubkey(), script_pubkey: serai_btc_address.into(),
}], }],
}; };
@@ -143,12 +143,14 @@ mod bitcoin {
// This is the standard script with an extra argument of the InInstruction // This is the standard script with an extra argument of the InInstruction
let mut sig = SECP256K1 let mut sig = SECP256K1
.sign_ecdsa_low_r( .sign_ecdsa_low_r(
&Message::from( &Message::from_digest_slice(
SighashCache::new(&tx) SighashCache::new(&tx)
.p2wsh_signature_hash(0, &script, initial_output_value, EcdsaSighashType::All) .p2wsh_signature_hash(0, &script, initial_output_value, EcdsaSighashType::All)
.unwrap() .unwrap()
.to_raw_hash(), .to_raw_hash()
), .as_ref(),
)
.unwrap(),
&private_key.inner, &private_key.inner,
) )
.serialize_der() .serialize_der()
@@ -421,7 +423,7 @@ mod ethereum {
}); });
} }
Ethereum::new(db, url.clone()).await Ethereum::new(db, url.clone(), String::new()).await
}) })
} }
} }

View File

@@ -115,6 +115,12 @@ pub async fn test_scanner<N: Network>(
pub async fn test_no_deadlock_in_multisig_completed<N: Network>( pub async fn test_no_deadlock_in_multisig_completed<N: Network>(
new_network: impl Fn(MemDb) -> Pin<Box<dyn Send + Future<Output = N>>>, new_network: impl Fn(MemDb) -> Pin<Box<dyn Send + Future<Output = N>>>,
) { ) {
// This test scans two blocks then acknowledges one, yet a network with one confirm won't scan
// two blocks before the first is acknowledged (due to the look-ahead limit)
if N::CONFIRMATIONS <= 1 {
return;
}
let mut db = MemDb::new(); let mut db = MemDb::new();
let network = new_network(db.clone()).await; let network = new_network(db.clone()).await;
@@ -139,6 +145,10 @@ pub async fn test_no_deadlock_in_multisig_completed<N: Network>(
let mut txn = db.txn(); let mut txn = db.txn();
NetworkKeyDb::set(&mut txn, Session(0), &key.to_bytes().as_ref().to_vec()); NetworkKeyDb::set(&mut txn, Session(0), &key.to_bytes().as_ref().to_vec());
txn.commit(); txn.commit();
// Sleep for 5 seconds as setting the Network key value will trigger an async task for
// Ethereum
tokio::time::sleep(Duration::from_secs(5)).await;
} }
key key
}; };
@@ -158,6 +168,7 @@ pub async fn test_no_deadlock_in_multisig_completed<N: Network>(
network.mine_block().await; network.mine_block().await;
} }
// Block for the second set of keys registered
let block_id = let block_id =
match timeout(Duration::from_secs(30), scanner.events.recv()).await.unwrap().unwrap() { match timeout(Duration::from_secs(30), scanner.events.recv()).await.unwrap().unwrap() {
ScannerEvent::Block { is_retirement_block, block, outputs: _ } => { ScannerEvent::Block { is_retirement_block, block, outputs: _ } => {
@@ -170,6 +181,7 @@ pub async fn test_no_deadlock_in_multisig_completed<N: Network>(
} }
}; };
// Block for the third set of keys registered
match timeout(Duration::from_secs(30), scanner.events.recv()).await.unwrap().unwrap() { match timeout(Duration::from_secs(30), scanner.events.recv()).await.unwrap().unwrap() {
ScannerEvent::Block { .. } => {} ScannerEvent::Block { .. } => {}
ScannerEvent::Completed(_, _, _, _, _) => { ScannerEvent::Completed(_, _, _, _, _) => {

View File

@@ -1,5 +1,5 @@
[toolchain] [toolchain]
channel = "1.77" channel = "1.79"
targets = ["wasm32-unknown-unknown"] targets = ["wasm32-unknown-unknown"]
profile = "minimal" profile = "minimal"
components = ["rust-src", "rustfmt", "clippy"] components = ["rust-src", "rustfmt", "clippy"]

View File

@@ -16,28 +16,50 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true workspace = true
[dependencies] [dependencies]
scale = { package = "parity-scale-codec", version = "3", features = ["derive"] } scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] }
scale-info = { version = "2", features = ["derive"] } scale-info = { version = "2", default-features = false, features = ["derive"] }
borsh = { version = "1", features = ["derive", "de_strict_order"], optional = true } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true }
serde = { version = "1", features = ["derive", "alloc"], optional = true } serde = { version = "1", default-features = false, features = ["derive", "alloc"], optional = true }
sp-core = { git = "https://github.com/serai-dex/substrate" } sp-core = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-runtime = { git = "https://github.com/serai-dex/substrate" } sp-runtime = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-consensus-babe = { git = "https://github.com/serai-dex/substrate" } sp-consensus-babe = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-consensus-grandpa = { git = "https://github.com/serai-dex/substrate" } sp-consensus-grandpa = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-primitives = { path = "../primitives", version = "0.1" } frame-support = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-coins-primitives = { path = "../coins/primitives", version = "0.1" }
serai-validator-sets-primitives = { path = "../validator-sets/primitives", version = "0.1" }
serai-genesis-liquidity-primitives = { path = "../genesis-liquidity/primitives", version = "0.1" }
serai-in-instructions-primitives = { path = "../in-instructions/primitives", version = "0.1" }
serai-signals-primitives = { path = "../signals/primitives", version = "0.1" }
frame-support = { git = "https://github.com/serai-dex/substrate" } serai-primitives = { path = "../primitives", version = "0.1", default-features = false }
serai-coins-primitives = { path = "../coins/primitives", version = "0.1", default-features = false }
serai-validator-sets-primitives = { path = "../validator-sets/primitives", version = "0.1", default-features = false }
serai-genesis-liquidity-primitives = { path = "../genesis-liquidity/primitives", version = "0.1", default-features = false }
serai-in-instructions-primitives = { path = "../in-instructions/primitives", version = "0.1", default-features = false }
serai-signals-primitives = { path = "../signals/primitives", version = "0.1", default-features = false }
[features] [features]
std = [
"scale/std",
"scale-info/std",
"borsh?/std",
"serde?/std",
"sp-core/std",
"sp-runtime/std",
"sp-consensus-babe/std",
"sp-consensus-grandpa/std",
"frame-support/std",
"serai-primitives/std",
"serai-coins-primitives/std",
"serai-validator-sets-primitives/std",
"serai-genesis-liquidity-primitives/std",
"serai-in-instructions-primitives/std",
"serai-signals-primitives/std",
]
borsh = [ borsh = [
"dep:borsh", "dep:borsh",
"serai-primitives/borsh", "serai-primitives/borsh",
@@ -56,3 +78,4 @@ serde = [
"serai-in-instructions-primitives/serde", "serai-in-instructions-primitives/serde",
"serai-signals-primitives/serde", "serai-signals-primitives/serde",
] ]
default = ["std"]

View File

@@ -4,7 +4,7 @@ use serai_primitives::{Header, SeraiAddress};
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub struct ReportEquivocation { pub struct ReportEquivocation {
pub equivocation_proof: Box<EquivocationProof<Header>>, pub equivocation_proof: alloc::boxed::Box<EquivocationProof<Header>>,
pub key_owner_proof: SeraiAddress, pub key_owner_proof: SeraiAddress,
} }

View File

@@ -5,7 +5,8 @@ use primitives::OutInstructionWithBalance;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
transfer { to: SeraiAddress, balance: Balance }, transfer { to: SeraiAddress, balance: Balance },
burn { balance: Balance }, burn { balance: Balance },
@@ -14,7 +15,17 @@ pub enum Call {
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum LiquidityTokensCall {
transfer { to: SeraiAddress, balance: Balance },
burn { balance: Balance },
}
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
Mint { to: SeraiAddress, balance: Balance }, Mint { to: SeraiAddress, balance: Balance },
Burn { from: SeraiAddress, balance: Balance }, Burn { from: SeraiAddress, balance: Balance },

View File

@@ -6,7 +6,8 @@ type PoolId = Coin;
type MaxSwapPathLength = sp_core::ConstU32<3>; type MaxSwapPathLength = sp_core::ConstU32<3>;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
add_liquidity { add_liquidity {
coin: Coin, coin: Coin,
@@ -38,7 +39,8 @@ pub enum Call {
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
PoolCreated { PoolCreated {
pool_id: PoolId, pool_id: PoolId,

View File

@@ -4,7 +4,7 @@ use serai_primitives::{BlockNumber, SeraiAddress};
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub struct ReportEquivocation { pub struct ReportEquivocation {
pub equivocation_proof: Box<EquivocationProof<[u8; 32], BlockNumber>>, pub equivocation_proof: alloc::boxed::Box<EquivocationProof<[u8; 32], BlockNumber>>,
pub key_owner_proof: SeraiAddress, pub key_owner_proof: SeraiAddress,
} }
@@ -15,10 +15,10 @@ pub enum Call {
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
NewAuthorities { authority_set: Vec<(SeraiAddress, u64)> }, NewAuthorities { authority_set: alloc::vec::Vec<(SeraiAddress, u64)> },
// TODO: Remove these // TODO: Remove these
Paused, Paused,
Resumed, Resumed,

View File

@@ -5,14 +5,16 @@ use primitives::SignedBatch;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
execute_batch { batch: SignedBatch }, execute_batch { batch: SignedBatch },
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
Batch { network: NetworkId, id: u32, block: BlockHash, instructions_hash: [u8; 32] }, Batch { network: NetworkId, id: u32, block: BlockHash, instructions_hash: [u8; 32] },
InstructionFailure { network: NetworkId, id: u32, index: u32 }, InstructionFailure { network: NetworkId, id: u32, index: u32 },

View File

@@ -1,5 +1,12 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
extern crate alloc;
pub use serai_primitives as primitives;
pub mod system; pub mod system;
pub mod timestamp; pub mod timestamp;
@@ -16,15 +23,13 @@ pub mod genesis_liquidity;
pub mod babe; pub mod babe;
pub mod grandpa; pub mod grandpa;
pub use serai_primitives as primitives; pub mod tx;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub enum Call { pub enum Call {
System,
Timestamp(timestamp::Call), Timestamp(timestamp::Call),
TransactionPayment,
Coins(coins::Call), Coins(coins::Call),
LiquidityTokens(coins::Call), LiquidityTokens(coins::LiquidityTokensCall),
Dex(dex::Call), Dex(dex::Call),
GenesisLiquidity(genesis_liquidity::Call), GenesisLiquidity(genesis_liquidity::Call),
ValidatorSets(validator_sets::Call), ValidatorSets(validator_sets::Call),
@@ -57,16 +62,20 @@ pub enum Event {
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, Copy, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub struct Extra { pub struct Extra {
pub era: sp_runtime::generic::Era, pub era: sp_runtime::generic::Era,
pub nonce: scale::Compact<u32>, #[codec(compact)]
pub tip: scale::Compact<u64>, pub nonce: u32,
#[codec(compact)]
pub tip: u64,
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub struct SignedPayloadExtra { pub struct SignedPayloadExtra {
pub spec_version: u32, pub spec_version: u32,
pub tx_version: u32, pub tx_version: u32,
@@ -74,4 +83,4 @@ pub struct SignedPayloadExtra {
pub mortality_checkpoint: [u8; 32], pub mortality_checkpoint: [u8; 32],
} }
pub type Transaction = primitives::Transaction<Call, Extra>; pub type Transaction = tx::Transaction<Call, Extra>;

View File

@@ -7,7 +7,8 @@ use primitives::SignalId;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
register_retirement_signal { in_favor_of: [u8; 32] }, register_retirement_signal { in_favor_of: [u8; 32] },
revoke_retirement_signal { retirement_signal_id: [u8; 32] }, revoke_retirement_signal { retirement_signal_id: [u8; 32] },
@@ -18,7 +19,8 @@ pub enum Call {
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
RetirementSignalRegistered { RetirementSignalRegistered {
signal_id: [u8; 32], signal_id: [u8; 32],

View File

@@ -3,7 +3,6 @@ use frame_support::dispatch::{DispatchInfo, DispatchError};
use serai_primitives::SeraiAddress; use serai_primitives::SeraiAddress;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum Event { pub enum Event {
ExtrinsicSuccess { dispatch_info: DispatchInfo }, ExtrinsicSuccess { dispatch_info: DispatchInfo },
ExtrinsicFailed { dispatch_error: DispatchError, dispatch_info: DispatchInfo }, ExtrinsicFailed { dispatch_error: DispatchError, dispatch_info: DispatchInfo },

View File

@@ -1,5 +1,9 @@
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
set { now: scale::Compact<u64> }, set {
#[codec(compact)]
now: u64,
},
} }

183
substrate/abi/src/tx.rs Normal file
View File

@@ -0,0 +1,183 @@
use scale::Encode;
use sp_core::sr25519::{Public, Signature};
use sp_runtime::traits::Verify;
use serai_primitives::SeraiAddress;
use frame_support::dispatch::GetDispatchInfo;
pub trait TransactionMember:
Clone + PartialEq + Eq + core::fmt::Debug + scale::Encode + scale::Decode + scale_info::TypeInfo
{
}
impl<
T: Clone
+ PartialEq
+ Eq
+ core::fmt::Debug
+ scale::Encode
+ scale::Decode
+ scale_info::TypeInfo,
> TransactionMember for T
{
}
type TransactionEncodeAs<'a, Extra> =
(&'a crate::Call, &'a Option<(SeraiAddress, Signature, Extra)>);
type TransactionDecodeAs<Extra> = (crate::Call, Option<(SeraiAddress, Signature, Extra)>);
// We use our own Transaction struct, over UncheckedExtrinsic, for more control, a bit more
// simplicity, and in order to be immune to https://github.com/paritytech/polkadot-sdk/issues/2947
#[allow(private_bounds)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Transaction<
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember,
> {
call: crate::Call,
mapped_call: Call,
signature: Option<(SeraiAddress, Signature, Extra)>,
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
Transaction<Call, Extra>
{
pub fn new(call: crate::Call, signature: Option<(SeraiAddress, Signature, Extra)>) -> Self {
Self { call: call.clone(), mapped_call: call.into(), signature }
}
pub fn call(&self) -> &crate::Call {
&self.call
}
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
scale::Encode for Transaction<Call, Extra>
{
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let tx: TransactionEncodeAs<Extra> = (&self.call, &self.signature);
tx.using_encoded(f)
}
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
scale::Decode for Transaction<Call, Extra>
{
fn decode<I: scale::Input>(input: &mut I) -> Result<Self, scale::Error> {
let (call, signature) = TransactionDecodeAs::decode(input)?;
let mapped_call = Call::from(call.clone());
Ok(Self { call, mapped_call, signature })
}
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
scale_info::TypeInfo for Transaction<Call, Extra>
{
type Identity = TransactionDecodeAs<Extra>;
// Define the type info as the info of the type equivalent to what we encode as
fn type_info() -> scale_info::Type {
TransactionDecodeAs::<Extra>::type_info()
}
}
#[cfg(feature = "serde")]
mod _serde {
use scale::Encode;
use serde::{ser::*, de::*};
use super::*;
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
Serialize for Transaction<Call, Extra>
{
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let encoded = self.encode();
serializer.serialize_bytes(&encoded)
}
}
#[cfg(feature = "std")]
impl<
'a,
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember,
> Deserialize<'a> for Transaction<Call, Extra>
{
fn deserialize<D: Deserializer<'a>>(de: D) -> Result<Self, D::Error> {
let bytes = sp_core::bytes::deserialize(de)?;
<Self as scale::Decode>::decode(&mut &bytes[..])
.map_err(|e| serde::de::Error::custom(format!("invalid transaction: {e}")))
}
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call> + TryInto<crate::Call>,
Extra: 'static + TransactionMember,
> sp_runtime::traits::Extrinsic for Transaction<Call, Extra>
{
type Call = Call;
type SignaturePayload = (SeraiAddress, Signature, Extra);
fn is_signed(&self) -> Option<bool> {
Some(self.signature.is_some())
}
fn new(call: Call, signature: Option<Self::SignaturePayload>) -> Option<Self> {
Some(Self { call: call.clone().try_into().ok()?, mapped_call: call, signature })
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call> + TryInto<crate::Call>,
Extra: 'static + TransactionMember,
> frame_support::traits::ExtrinsicCall for Transaction<Call, Extra>
{
fn call(&self) -> &Call {
&self.mapped_call
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember + sp_runtime::traits::SignedExtension,
> sp_runtime::traits::ExtrinsicMetadata for Transaction<Call, Extra>
{
type SignedExtensions = Extra;
const VERSION: u8 = 0;
}
impl<
Call: 'static + TransactionMember + From<crate::Call> + GetDispatchInfo,
Extra: 'static + TransactionMember,
> GetDispatchInfo for Transaction<Call, Extra>
{
fn get_dispatch_info(&self) -> frame_support::dispatch::DispatchInfo {
self.mapped_call.get_dispatch_info()
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember + sp_runtime::traits::SignedExtension,
> sp_runtime::traits::BlindCheckable for Transaction<Call, Extra>
{
type Checked = sp_runtime::generic::CheckedExtrinsic<Public, Call, Extra>;
fn check(
self,
) -> Result<Self::Checked, sp_runtime::transaction_validity::TransactionValidityError> {
Ok(match self.signature {
Some((signer, signature, extra)) => {
if !signature.verify(
(&self.call, &extra, extra.additional_signed()?).encode().as_slice(),
&signer.into(),
) {
Err(sp_runtime::transaction_validity::InvalidTransaction::BadProof)?
}
sp_runtime::generic::CheckedExtrinsic {
signed: Some((signer.into(), extra)),
function: self.mapped_call,
}
}
None => sp_runtime::generic::CheckedExtrinsic { signed: None, function: self.mapped_call },
})
}
}

Some files were not shown because too many files have changed in this diff Show More