7 Commits

Author SHA1 Message Date
Luke Parker
0b30ac175e Restore workspace-wide clippy
Fixes accumulated errors in the Substrate code. Modifies the runtime build to
work with a modern clippy. Removes e2e tests from the workspace.
2025-01-19 02:27:35 -05:00
Luke Parker
47560fa9a9 Test manually implemented serializations in the Router lib 2025-01-19 00:45:26 -05:00
Luke Parker
9d57c4eb4d Downscope dependencies in serai-processor-ethereum-primitives, const-hex decode bytecode in ethereum-schnorr-contract 2025-01-19 00:16:50 -05:00
Luke Parker
642ba00952 Update Deployer README, 80-character line length 2025-01-19 00:03:56 -05:00
Luke Parker
3c9c12d320 Test the Deployer contract 2025-01-18 23:58:38 -05:00
Luke Parker
f6b52b3fd3 Maximum line length of 80 in Deployer.sol 2025-01-18 15:22:58 -05:00
Luke Parker
0d906363a0 Simplify and test deterministically_sign 2025-01-18 15:13:39 -05:00
39 changed files with 3718 additions and 396 deletions

3339
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -144,9 +144,9 @@ members = [
"tests/docker", "tests/docker",
"tests/message-queue", "tests/message-queue",
"tests/processor", # TODO "tests/processor",
"tests/coordinator", # TODO "tests/coordinator",
"tests/full-stack", # TODO "tests/full-stack",
"tests/reproducible-runtime", "tests/reproducible-runtime",
] ]

View File

@@ -180,7 +180,7 @@ impl<D: Db> ContinuallyRan for CanonicalEventStream<D> {
batch = Some(ExecutedBatch { batch = Some(ExecutedBatch {
id: *id, id: *id,
publisher: *publishing_session, publisher: *publishing_session,
external_network_block_hash: *external_network_block_hash, external_network_block_hash: external_network_block_hash.0,
in_instructions_hash: *in_instructions_hash, in_instructions_hash: *in_instructions_hash,
in_instruction_results: in_instruction_results in_instruction_results: in_instruction_results
.iter() .iter()

View File

@@ -16,6 +16,8 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true workspace = true
[dependencies] [dependencies]
const-hex = { version = "1", default-features = false, features = ["std", "core-error"] }
subtle = { version = "2", default-features = false, features = ["std"] } subtle = { version = "2", default-features = false, features = ["std"] }
sha3 = { version = "0.10", default-features = false, features = ["std"] } sha3 = { version = "0.10", default-features = false, features = ["std"] }
group = { version = "0.13", default-features = false, features = ["alloc"] } group = { version = "0.13", default-features = false, features = ["alloc"] }

View File

@@ -2,4 +2,5 @@
An Ethereum contract to verify Schnorr signatures. An Ethereum contract to verify Schnorr signatures.
This crate will fail to build if `solc` is not installed and available. This crate will fail to build if the expected version of `solc` is not
installed and available.

View File

@@ -4,8 +4,16 @@
#![allow(non_snake_case)] #![allow(non_snake_case)]
/// The initialization bytecode of the Schnorr library. /// The initialization bytecode of the Schnorr library.
pub const INIT_BYTECODE: &str = pub const BYTECODE: &[u8] = {
include_str!(concat!(env!("OUT_DIR"), "/ethereum-schnorr-contract/Schnorr.bin")); const BYTECODE_HEX: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/ethereum-schnorr-contract/Schnorr.bin"));
const BYTECODE: [u8; BYTECODE_HEX.len() / 2] =
match const_hex::const_decode_to_array::<{ BYTECODE_HEX.len() / 2 }>(BYTECODE_HEX) {
Ok(bytecode) => bytecode,
Err(_) => panic!("Schnorr.bin did not contain valid hex"),
};
&BYTECODE
};
mod public_key; mod public_key;
pub use public_key::PublicKey; pub use public_key::PublicKey;

View File

@@ -18,11 +18,7 @@ use crate::{Signature, tests::test_key};
fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> { fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
let sig = ecdsa::Signature::from_scalars(r, s).ok()?; let sig = ecdsa::Signature::from_scalars(r, s).ok()?;
let message: [u8; 32] = message.to_repr().into(); let message: [u8; 32] = message.to_repr().into();
alloy_core::primitives::Signature::from_signature_and_parity( alloy_core::primitives::PrimitiveSignature::from_signature_and_parity(sig, odd_y)
sig,
alloy_core::primitives::Parity::Parity(odd_y),
)
.ok()?
.recover_address_from_prehash(&alloy_core::primitives::B256::from(message)) .recover_address_from_prehash(&alloy_core::primitives::B256::from(message))
.ok() .ok()
.map(Into::into) .map(Into::into)

View File

@@ -56,7 +56,7 @@ impl AggregateRangeWitness {
} }
} }
impl<'a> AggregateRangeStatement<'a> { impl AggregateRangeStatement<'_> {
fn initial_transcript(&self) -> (Scalar, Vec<EdwardsPoint>) { fn initial_transcript(&self) -> (Scalar, Vec<EdwardsPoint>) {
let V = self.commitments.iter().map(|c| c * INV_EIGHT()).collect::<Vec<_>>(); let V = self.commitments.iter().map(|c| c * INV_EIGHT()).collect::<Vec<_>>();
(keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V) (keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)

View File

@@ -9,6 +9,7 @@ use crate::{
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/ // https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
// tests/unit_tests/test_tx_utils.cpp // tests/unit_tests/test_tx_utils.cpp
// which is licensed // which is licensed
#[allow(clippy::empty_line_after_outer_attr)] // rustfmt is for the comment, not for the const
#[rustfmt::skip] #[rustfmt::skip]
/* /*
Copyright (c) 2014-2022, The Monero Project Copyright (c) 2014-2022, The Monero Project

View File

@@ -26,7 +26,7 @@ TODO
}; };
tx.gas_limit = 1_000_000u64.into(); tx.gas_limit = 1_000_000u64.into();
tx.gas_price = 1_000_000_000u64.into(); tx.gas_price = 1_000_000_000u64.into();
let tx = ethereum_serai::crypto::deterministically_sign(&tx); let tx = ethereum_serai::crypto::deterministically_sign(tx);
if self.provider.get_transaction_by_hash(*tx.hash()).await.unwrap().is_none() { if self.provider.get_transaction_by_hash(*tx.hash()).await.unwrap().is_none() {
self self

View File

@@ -109,7 +109,7 @@ pub async fn deploy_contract(
input: bin, input: bin,
}; };
let deployment_tx = deterministically_sign(&deployment_tx); let deployment_tx = deterministically_sign(deployment_tx);
// Fund the deployer address // Fund the deployer address
fund_account( fund_account(

View File

@@ -33,3 +33,11 @@ ethereum-primitives = { package = "serai-processor-ethereum-primitives", path =
[build-dependencies] [build-dependencies]
build-solidity-contracts = { path = "../../../networks/ethereum/build-contracts", default-features = false } build-solidity-contracts = { path = "../../../networks/ethereum/build-contracts", default-features = false }
[dev-dependencies]
alloy-rpc-client = { version = "0.9", default-features = false }
alloy-node-bindings = { version = "0.9", default-features = false }
tokio = { version = "1.0", default-features = false, features = ["rt-multi-thread", "macros"] }
ethereum-test-primitives = { package = "serai-ethereum-test-primitives", path = "../test-primitives" }

View File

@@ -4,20 +4,26 @@ The deployer for Serai's Ethereum contracts.
## Goals ## Goals
It should be possible to efficiently locate the Serai Router on an blockchain with the EVM, without It should be possible to efficiently locate the Serai Router on a blockchain
relying on any centralized (or even federated) entities. While deploying and locating an instance of with the EVM, without relying on any centralized (or even federated) entities.
the Router would be trivial, by using a fixed signature for the deployment transaction, the Router While deploying and locating an instance of the Router would be trivial, by
must be constructed with the correct key for the Serai network (or set to have the correct key using a fixed signature for the deployment transaction, the Router must be
post-construction). Since this cannot be guaranteed to occur, the process must be retryable and the constructed with the correct key for the Serai network (or set to have the
first successful invocation must be efficiently findable. correct key post-construction). Since this cannot be guaranteed to occur, the
process must be retryable and the first successful invocation must be
efficiently findable.
## Methodology ## Methodology
We define a contract, the Deployer, to deploy the router. This contract could use `CREATE2` with the We define a contract, the Deployer, to deploy the Router. This contract could
key representing Serai as the salt, yet this would be open to collision attacks with just 2**80 use `CREATE2` with the key representing Serai as the salt, yet this would be
complexity. Instead, we use `CREATE` which would require 2**80 on-chain transactions (infeasible) to open to collision attacks with just 2\*\*80 complexity. Instead, we use
use as the basis of a collision. `CREATE` which would require 2\*\*80 on-chain transactions (infeasible) to use
as the basis of a collision.
In order to efficiently find the contract for a key, the Deployer contract saves the addresses of In order to efficiently find the contract for a key, the Deployer contract
deployed contracts (indexed by the initialization code hash). This allows using a single call to a saves the addresses of deployed contracts (indexed by the initialization code's
contract with a known address to find the proper Router. hash). This allows using a single call to a contract with a known address to
find the proper Router. Saving the address to the state enables finding the
Router's address even if the connected-to node's logs have been pruned for
historical blocks.

View File

@@ -4,29 +4,30 @@ pragma solidity ^0.8.26;
/* /*
The expected deployment process of Serai's Router is as follows: The expected deployment process of Serai's Router is as follows:
1) A transaction deploying Deployer is made. Then, a deterministic signature is 1) A transaction deploying Deployer is made. Then, a deterministic signature
created such that an account with an unknown private key is the creator of is created such that an account with an unknown private key is the creator
the contract. Anyone can fund this address, and once anyone does, the of the contract. Anyone can fund this address, and once anyone does, the
transaction deploying Deployer can be published by anyone. No other transaction deploying Deployer can be published by anyone. No other
transaction may be made from that account. transaction may be made from that account.
2) Anyone deploys the Router through the Deployer. This uses a sequential nonce 2) Anyone deploys the Router through the Deployer. This uses a sequential
such that meet-in-the-middle attacks, with complexity 2**80, aren't feasible. nonce such that meet-in-the-middle attacks, with complexity 2**80, aren't
While such attacks would still be feasible if the Deployer's address was feasible. While such attacks would still be feasible if the Deployer's
controllable, the usage of a deterministic signature with a NUMS method address was controllable, the usage of a deterministic signature with a
prevents that. NUMS method prevents that.
This doesn't have any denial-of-service risks and will resolve once anyone steps This doesn't have any denial-of-service risks and will resolve once anyone
forward as deployer. This does fail to guarantee an identical address across steps forward as deployer. This does fail to guarantee an identical address
every chain, though it enables letting anyone efficiently ask the Deployer for for the Router across every chain, though it enables anyone to efficiently
the address (with the Deployer having an identical address on every chain). ask the Deployer for the address (with the Deployer having an identical
address on every chain).
Unfortunately, guaranteeing identical addresses aren't feasible. We'd need the Unfortunately, guaranteeing identical addresses for the Router isn't
Deployer contract to use a consistent salt for the Router, yet the Router must feasible. We'd need the Deployer contract to use a consistent salt for the
be deployed with a specific public key for Serai. Since Ethereum isn't able to Router, yet the Router must be deployed with a specific public key for Serai.
determine a valid public key (one the result of a Serai DKG) from a dishonest Since Ethereum isn't able to determine a valid public key (one the result of
public key, we have to allow multiple deployments with Serai being the one to a Serai DKG) from a dishonest public key (one arbitrary), we have to allow
determine which to use. multiple deployments with Serai being the one to determine which to use.
The alternative would be to have a council publish the Serai key on-Ethereum, The alternative would be to have a council publish the Serai key on-Ethereum,
with Serai verifying the published result. This would introduce a DoS risk in with Serai verifying the published result. This would introduce a DoS risk in
@@ -68,15 +69,18 @@ contract Deployer {
/* /*
Check this wasn't prior deployed. Check this wasn't prior deployed.
This is a post-check, not a pre-check (in violation of the CEI pattern). If we used a This is a post-check, not a pre-check (in violation of the CEI pattern).
pre-check, a deployed contract could re-enter the Deployer to deploy the same contract If we used a pre-check, a deployed contract could re-enter the Deployer
multiple times due to the inner call updating state and then the outer call overwriting it. to deploy the same contract multiple times due to the inner call updating
The post-check causes the outer call to error once the inner call updates state. state and then the outer call overwriting it. The post-check causes the
outer call to error once the inner call updates state.
This does mean contract deployment may fail if deployment causes arbitrary execution which This does mean contract deployment may fail if deployment causes
maliciously nests deployment of the being-deployed contract. Such an inner call won't fail, arbitrary execution which maliciously nests deployment of the
yet the outer call would. The usage of a re-entrancy guard would call the inner call to fail being-deployed contract. Such an inner call won't fail, yet the outer
while the outer call succeeds. This is considered so edge-case it isn't worth handling. call would. The usage of a re-entrancy guard would cause the inner call
to fail while the outer call succeeds. This is considered so edge-case it
isn't worth handling.
*/ */
if (deployments[initCodeHash] != address(0)) { if (deployments[initCodeHash] != address(0)) {
revert PriorDeployed(); revert PriorDeployed();

View File

@@ -4,7 +4,7 @@
use std::sync::Arc; use std::sync::Arc;
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind}; use alloy_core::primitives::{hex, Address, U256, Bytes, TxKind};
use alloy_consensus::{Signed, TxLegacy}; use alloy_consensus::{Signed, TxLegacy};
use alloy_sol_types::SolCall; use alloy_sol_types::SolCall;
@@ -14,6 +14,9 @@ use alloy_transport::{TransportErrorKind, RpcError};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
#[cfg(test)]
mod tests;
#[rustfmt::skip] #[rustfmt::skip]
#[expect(warnings)] #[expect(warnings)]
#[expect(needless_pass_by_value)] #[expect(needless_pass_by_value)]
@@ -24,6 +27,17 @@ mod abi {
alloy_sol_macro::sol!("contracts/Deployer.sol"); alloy_sol_macro::sol!("contracts/Deployer.sol");
} }
const BYTECODE: &[u8] = {
const BYTECODE_HEX: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/serai-processor-ethereum-deployer/Deployer.bin"));
const BYTECODE: [u8; BYTECODE_HEX.len() / 2] =
match hex::const_decode_to_array::<{ BYTECODE_HEX.len() / 2 }>(BYTECODE_HEX) {
Ok(bytecode) => bytecode,
Err(_) => panic!("Deployer.bin did not contain valid hex"),
};
&BYTECODE
};
/// The Deployer contract for the Serai Router contract. /// The Deployer contract for the Serai Router contract.
/// ///
/// This Deployer has a deterministic address, letting it be immediately identified on any instance /// This Deployer has a deterministic address, letting it be immediately identified on any instance
@@ -38,24 +52,45 @@ impl Deployer {
/// funded for this transaction to be submitted. This account has no known private key to anyone /// funded for this transaction to be submitted. This account has no known private key to anyone
/// so ETH sent can be neither misappropriated nor returned. /// so ETH sent can be neither misappropriated nor returned.
pub fn deployment_tx() -> Signed<TxLegacy> { pub fn deployment_tx() -> Signed<TxLegacy> {
pub const BYTECODE: &[u8] = let bytecode = Bytes::from(BYTECODE);
include_bytes!(concat!(env!("OUT_DIR"), "/serai-processor-ethereum-deployer/Deployer.bin"));
let bytecode =
Bytes::from_hex(BYTECODE).expect("compiled-in Deployer bytecode wasn't valid hex");
// Legacy transactions are used to ensure the widest possible degree of support across EVMs
let tx = TxLegacy { let tx = TxLegacy {
chain_id: None, chain_id: None,
nonce: 0, nonce: 0,
// 100 gwei /*
This needs to use a fixed gas price to achieve a deterministic address. The gas price is
fixed to 100 gwei, which should be generous, in order to make this unlikely to get stuck.
While potentially expensive, this only has to occur per chain this is deployed on.
If this is too low of a gas price, private mempools can be used, with other transactions in
the bundle raising the gas price to acceptable levels. While this strategy could be
entirely relied upon, allowing the gas price paid to reflect the network's actual gas
price, that wouldn't work for EVM networks without private mempools.
That leaves this as failing only if it violates a protocol constant, or if the gas price is
too low on a network without private mempools to publish via. In that case, this code
should to be forked to accept an enum of which network the deployment is for (with the gas
price derivative of that, as common as possible across networks to minimize the amount of
addresses representing the Deployer).
*/
gas_price: 100_000_000_000u128, gas_price: 100_000_000_000u128,
// TODO: Use a more accurate gas limit /*
gas_limit: 1_000_000u64, This is twice the cost of deployment as of Ethereum's Cancun upgrade. The wide margin is to
increase the likelihood of surviving changes to the cost of contract deployment (notably
the gas cost of calldata). While wasteful, this only has to be done once per chain and is
accepted accordingly.
If this is ever unacceptable, the parameterization suggested in case the `gas_price` is
unacceptable should be implemented.
*/
gas_limit: 300_698,
to: TxKind::Create, to: TxKind::Create,
value: U256::ZERO, value: U256::ZERO,
input: bytecode, input: bytecode,
}; };
ethereum_primitives::deterministically_sign(&tx) ethereum_primitives::deterministically_sign(tx)
} }
/// Obtain the deterministic address for this contract. /// Obtain the deterministic address for this contract.

View File

@@ -0,0 +1,107 @@
use std::sync::Arc;
use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider};
use alloy_node_bindings::Anvil;
use crate::{
abi::Deployer::{PriorDeployed, DeploymentFailed, DeployerErrors},
Deployer,
};
#[tokio::test]
async fn test_deployer() {
const CANCUN: &str = "cancun";
const LATEST: &str = "latest";
for network in [CANCUN, LATEST] {
let anvil = Anvil::new().arg("--hardfork").arg(network).spawn();
let provider = Arc::new(RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
));
// Deploy the Deployer
{
let deployment_tx = Deployer::deployment_tx();
let gas_programmed = deployment_tx.tx().gas_limit;
let receipt = ethereum_test_primitives::publish_tx(&provider, deployment_tx).await;
assert!(receipt.status());
assert_eq!(receipt.contract_address.unwrap(), Deployer::address());
if network == CANCUN {
// Check the gas programmed was twice the gas used
// We only check this for cancun as the constant was programmed per cancun's gas pricing
assert_eq!(2 * receipt.gas_used, gas_programmed);
}
}
// Deploy the deployer with the deployer
let mut deploy_tx = Deployer::deploy_tx(crate::BYTECODE.to_vec());
deploy_tx.gas_price = 100_000_000_000u128;
deploy_tx.gas_limit = 1_000_000;
{
let deploy_tx = ethereum_primitives::deterministically_sign(deploy_tx.clone());
let receipt = ethereum_test_primitives::publish_tx(&provider, deploy_tx).await;
assert!(receipt.status());
}
// Verify we can now find the deployer
{
let deployer = Deployer::new(provider.clone()).await.unwrap().unwrap();
let deployed_deployer = deployer
.find_deployment(ethereum_primitives::keccak256(crate::BYTECODE))
.await
.unwrap()
.unwrap();
assert_eq!(
provider.get_code_at(deployed_deployer).await.unwrap(),
provider.get_code_at(Deployer::address()).await.unwrap(),
);
assert!(deployed_deployer != Deployer::address());
}
// Verify deploying the same init code multiple times fails
{
let mut deploy_tx = deploy_tx;
// Change the gas price to cause a distinct message, and with it, a distinct signer
deploy_tx.gas_price += 1;
let deploy_tx = ethereum_primitives::deterministically_sign(deploy_tx);
let receipt = ethereum_test_primitives::publish_tx(&provider, deploy_tx.clone()).await;
assert!(!receipt.status());
let call = TransactionRequest::default()
.to(Deployer::address())
.input(TransactionInput::new(deploy_tx.tx().input.clone()));
let call_err = provider.call(&call).await.unwrap_err();
assert!(matches!(
call_err.as_error_resp().unwrap().as_decoded_error::<DeployerErrors>(true).unwrap(),
DeployerErrors::PriorDeployed(PriorDeployed {}),
));
}
// Verify deployment failures yield errors properly
{
// 0xfe is an invalid opcode which is guaranteed to remain invalid
let mut deploy_tx = Deployer::deploy_tx(vec![0xfe]);
deploy_tx.gas_price = 100_000_000_000u128;
deploy_tx.gas_limit = 1_000_000;
let deploy_tx = ethereum_primitives::deterministically_sign(deploy_tx);
let receipt = ethereum_test_primitives::publish_tx(&provider, deploy_tx.clone()).await;
assert!(!receipt.status());
let call = TransactionRequest::default()
.to(Deployer::address())
.input(TransactionInput::new(deploy_tx.tx().input.clone()));
let call_err = provider.call(&call).await.unwrap_err();
assert!(matches!(
call_err.as_error_resp().unwrap().as_decoded_error::<DeployerErrors>(true).unwrap(),
DeployerErrors::DeploymentFailed(DeploymentFailed {}),
));
}
}
}

View File

@@ -20,5 +20,5 @@ workspace = true
group = { version = "0.13", default-features = false } group = { version = "0.13", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic"] } k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic"] }
alloy-core = { version = "0.8", default-features = false } alloy-primitives = { version = "0.8", default-features = false }
alloy-consensus = { version = "0.9", default-features = false, features = ["k256"] } alloy-consensus = { version = "0.9", default-features = false, features = ["k256"] }

View File

@@ -5,44 +5,76 @@
use group::ff::PrimeField; use group::ff::PrimeField;
use k256::Scalar; use k256::Scalar;
use alloy_core::primitives::PrimitiveSignature; use alloy_primitives::PrimitiveSignature;
use alloy_consensus::{SignableTransaction, Signed, TxLegacy}; use alloy_consensus::{SignableTransaction, Signed, TxLegacy};
/// The Keccak256 hash function. /// The Keccak256 hash function.
pub fn keccak256(data: impl AsRef<[u8]>) -> [u8; 32] { pub fn keccak256(data: impl AsRef<[u8]>) -> [u8; 32] {
alloy_core::primitives::keccak256(data.as_ref()).into() alloy_primitives::keccak256(data.as_ref()).into()
} }
/// Deterministically sign a transaction. /// Deterministically sign a transaction.
/// ///
/// This signs a transaction via setting `r = 1, s = 1`, and incrementing `r` until a signer is /// This signs a transaction via setting a signature of `r = 1, s = 1`. The purpose of this is to
/// recoverable from the signature for this transaction. The purpose of this is to be able to send /// be able to send a transaction from an account which no one knows the private key for and no
/// a transaction from a known account which no one knows the private key for. /// other messages may be signed for from.
/// ///
/// This function panics if passed a transaction with a non-None chain ID. This is because the /// This function panics if passed a transaction with a non-None chain ID. This is because the
/// signer for this transaction is only singular across any/all EVM instances if it isn't binding /// signer for this transaction is only singular across any/all EVM instances if it isn't binding
/// to an instance. /// to an instance.
pub fn deterministically_sign(tx: &TxLegacy) -> Signed<TxLegacy> { pub fn deterministically_sign(tx: TxLegacy) -> Signed<TxLegacy> {
assert!( assert!(
tx.chain_id.is_none(), tx.chain_id.is_none(),
"chain ID was Some when deterministically signing a TX (causing a non-singular signer)" "chain ID was Some when deterministically signing a TX (causing a non-singular signer)"
); );
let mut r = Scalar::ONE; /*
ECDSA signatures are:
- x = private key
- k = rand()
- R = k * G
- r = R.x()
- s = (H(m) + (r * x)) * k.invert()
Key recovery is performed via:
- a = s * R = (H(m) + (r * x)) * G
- b = a - (H(m) * G) = (r * x) * G
- X = b / r = x * G
- X = ((s * R) - (H(m) * G)) * r.invert()
This requires `r` be non-zero and `R` be recoverable from `r` and the parity byte. For
`r = 1, s = 1`, this sets `X` to `R - (H(m) * G)`. Since there is an `R` recoverable for
`r = 1`, since the `R` is a point with an unknown discrete logarithm w.r.t. the generator, and
since the resulting key is dependent on the message signed for, this will always work to
the specification.
*/
let r = Scalar::ONE;
let s = Scalar::ONE; let s = Scalar::ONE;
loop {
// Create the signature
let r_bytes: [u8; 32] = r.to_repr().into(); let r_bytes: [u8; 32] = r.to_repr().into();
let s_bytes: [u8; 32] = s.to_repr().into(); let s_bytes: [u8; 32] = s.to_repr().into();
let signature = let signature =
PrimitiveSignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), false); PrimitiveSignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), false);
// Check if this is a valid signature let res = tx.into_signed(signature);
let tx = tx.clone().into_signed(signature); debug_assert!(res.recover_signer().is_ok());
if tx.recover_signer().is_ok() { res
return tx; }
}
#[test]
r += Scalar::ONE; fn test_deterministically_sign() {
} let tx = TxLegacy { chain_id: None, ..Default::default() };
let signed = deterministically_sign(tx.clone());
assert!(signed.recover_signer().is_ok());
let one = alloy_primitives::U256::from(1u64);
assert_eq!(signed.signature().r(), one);
assert_eq!(signed.signature().s(), one);
let mut other_tx = tx.clone();
other_tx.nonce += 1;
// Signing a distinct message should yield a distinct signer
assert!(
signed.recover_signer().unwrap() != deterministically_sign(other_tx).recover_signer().unwrap()
);
} }

View File

@@ -207,7 +207,7 @@ impl From<&[(SeraiAddress, U256)]> for OutInstructions {
/// An action which was executed by the Router. /// An action which was executed by the Router.
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub enum Executed { pub enum Executed {
/// Set a new key. /// New key was set.
SetKey { SetKey {
/// The nonce this was done with. /// The nonce this was done with.
nonce: u64, nonce: u64,

View File

@@ -22,6 +22,8 @@ use ethereum_deployer::Deployer;
use crate::{Coin, OutInstructions, Router}; use crate::{Coin, OutInstructions, Router};
mod read_write;
#[test] #[test]
fn execute_reentrancy_guard() { fn execute_reentrancy_guard() {
let hash = alloy_core::primitives::keccak256(b"ReentrancyGuard Router.execute"); let hash = alloy_core::primitives::keccak256(b"ReentrancyGuard Router.execute");
@@ -84,11 +86,11 @@ async fn setup_test(
// Set a gas price (100 gwei) // Set a gas price (100 gwei)
tx.gas_price = 100_000_000_000; tx.gas_price = 100_000_000_000;
// Sign it // Sign it
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
// Publish it // Publish it
let receipt = ethereum_test_primitives::publish_tx(&provider, tx).await; let receipt = ethereum_test_primitives::publish_tx(&provider, tx).await;
assert!(receipt.status()); assert!(receipt.status());
assert_eq!(u128::from(Router::DEPLOYMENT_GAS), ((receipt.gas_used + 1000) / 1000) * 1000); assert_eq!(Router::DEPLOYMENT_GAS, ((receipt.gas_used + 1000) / 1000) * 1000);
let router = Router::new(provider.clone(), &public_key).await.unwrap().unwrap(); let router = Router::new(provider.clone(), &public_key).await.unwrap().unwrap();
@@ -123,13 +125,10 @@ async fn confirm_next_serai_key(
let mut tx = router.confirm_next_serai_key(&sig); let mut tx = router.confirm_next_serai_key(&sig);
tx.gas_price = 100_000_000_000; tx.gas_price = 100_000_000_000;
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
let receipt = ethereum_test_primitives::publish_tx(provider, tx).await; let receipt = ethereum_test_primitives::publish_tx(provider, tx).await;
assert!(receipt.status()); assert!(receipt.status());
assert_eq!( assert_eq!(Router::CONFIRM_NEXT_SERAI_KEY_GAS, ((receipt.gas_used + 1000) / 1000) * 1000);
u128::from(Router::CONFIRM_NEXT_SERAI_KEY_GAS),
((receipt.gas_used + 1000) / 1000) * 1000
);
receipt receipt
} }
@@ -164,10 +163,10 @@ async fn test_update_serai_key() {
let mut tx = router.update_serai_key(&update_to, &sig); let mut tx = router.update_serai_key(&update_to, &sig);
tx.gas_price = 100_000_000_000; tx.gas_price = 100_000_000_000;
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
let receipt = ethereum_test_primitives::publish_tx(&provider, tx).await; let receipt = ethereum_test_primitives::publish_tx(&provider, tx).await;
assert!(receipt.status()); assert!(receipt.status());
assert_eq!(u128::from(Router::UPDATE_SERAI_KEY_GAS), ((receipt.gas_used + 1000) / 1000) * 1000); assert_eq!(Router::UPDATE_SERAI_KEY_GAS, ((receipt.gas_used + 1000) / 1000) * 1000);
assert_eq!(router.key(receipt.block_hash.unwrap().into()).await.unwrap(), Some(key.1)); assert_eq!(router.key(receipt.block_hash.unwrap().into()).await.unwrap(), Some(key.1));
assert_eq!(router.next_key(receipt.block_hash.unwrap().into()).await.unwrap(), Some(update_to)); assert_eq!(router.next_key(receipt.block_hash.unwrap().into()).await.unwrap(), Some(update_to));
@@ -199,7 +198,7 @@ async fn test_eth_in_instruction() {
.abi_encode() .abi_encode()
.into(), .into(),
}; };
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
let signer = tx.recover_signer().unwrap(); let signer = tx.recover_signer().unwrap();
let receipt = ethereum_test_primitives::publish_tx(&provider, tx).await; let receipt = ethereum_test_primitives::publish_tx(&provider, tx).await;
@@ -250,7 +249,7 @@ async fn publish_outs(
let mut tx = router.execute(coin, fee, outs, &sig); let mut tx = router.execute(coin, fee, outs, &sig);
tx.gas_price = 100_000_000_000; tx.gas_price = 100_000_000_000;
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
ethereum_test_primitives::publish_tx(provider, tx).await ethereum_test_primitives::publish_tx(provider, tx).await
} }
@@ -270,7 +269,7 @@ async fn test_eth_address_out_instruction() {
let instructions = OutInstructions::from([].as_slice()); let instructions = OutInstructions::from([].as_slice());
let receipt = publish_outs(&provider, &router, key, 2, Coin::Ether, fee, instructions).await; let receipt = publish_outs(&provider, &router, key, 2, Coin::Ether, fee, instructions).await;
assert!(receipt.status()); assert!(receipt.status());
assert_eq!(u128::from(Router::EXECUTE_BASE_GAS), ((receipt.gas_used + 1000) / 1000) * 1000); assert_eq!(Router::EXECUTE_BASE_GAS, ((receipt.gas_used + 1000) / 1000) * 1000);
assert_eq!(router.next_nonce(receipt.block_hash.unwrap().into()).await.unwrap(), 3); assert_eq!(router.next_nonce(receipt.block_hash.unwrap().into()).await.unwrap(), 3);
} }
@@ -307,10 +306,10 @@ async fn escape_hatch(
let mut tx = router.escape_hatch(escape_to, &sig); let mut tx = router.escape_hatch(escape_to, &sig);
tx.gas_price = 100_000_000_000; tx.gas_price = 100_000_000_000;
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
let receipt = ethereum_test_primitives::publish_tx(provider, tx).await; let receipt = ethereum_test_primitives::publish_tx(provider, tx).await;
assert!(receipt.status()); assert!(receipt.status());
assert_eq!(u128::from(Router::ESCAPE_HATCH_GAS), ((receipt.gas_used + 1000) / 1000) * 1000); assert_eq!(Router::ESCAPE_HATCH_GAS, ((receipt.gas_used + 1000) / 1000) * 1000);
receipt receipt
} }
@@ -321,7 +320,7 @@ async fn escape(
) -> TransactionReceipt { ) -> TransactionReceipt {
let mut tx = router.escape(coin.address()); let mut tx = router.escape(coin.address());
tx.gas_price = 100_000_000_000; tx.gas_price = 100_000_000_000;
let tx = ethereum_primitives::deterministically_sign(&tx); let tx = ethereum_primitives::deterministically_sign(tx);
let receipt = ethereum_test_primitives::publish_tx(provider, tx).await; let receipt = ethereum_test_primitives::publish_tx(provider, tx).await;
assert!(receipt.status()); assert!(receipt.status());
receipt receipt

View File

@@ -0,0 +1,85 @@
use rand_core::{RngCore, OsRng};
use alloy_core::primitives::U256;
use crate::{Coin, InInstruction, Executed};
fn coins() -> [Coin; 2] {
[Coin::Ether, {
let mut erc20 = [0; 20];
OsRng.fill_bytes(&mut erc20);
Coin::Erc20(erc20.into())
}]
}
#[test]
fn test_coin_read_write() {
for coin in coins() {
let mut res = vec![];
coin.write(&mut res).unwrap();
assert_eq!(coin, Coin::read(&mut res.as_slice()).unwrap());
}
}
#[test]
fn test_in_instruction_read_write() {
for coin in coins() {
let instruction = InInstruction {
id: (
{
let mut tx_id = [0; 32];
OsRng.fill_bytes(&mut tx_id);
tx_id
},
OsRng.next_u64(),
),
from: {
let mut from = [0; 20];
OsRng.fill_bytes(&mut from);
from
},
coin,
amount: U256::from_le_bytes({
let mut amount = [0; 32];
OsRng.fill_bytes(&mut amount);
amount
}),
data: {
let len = usize::try_from(OsRng.next_u64() % 65536).unwrap();
let mut data = vec![0; len];
OsRng.fill_bytes(&mut data);
data
},
};
let mut buf = vec![];
instruction.write(&mut buf).unwrap();
assert_eq!(InInstruction::read(&mut buf.as_slice()).unwrap(), instruction);
}
}
#[test]
fn test_executed_read_write() {
for executed in [
Executed::SetKey {
nonce: OsRng.next_u64(),
key: {
let mut key = [0; 32];
OsRng.fill_bytes(&mut key);
key
},
},
Executed::Batch {
nonce: OsRng.next_u64(),
message_hash: {
let mut message_hash = [0; 32];
OsRng.fill_bytes(&mut message_hash);
message_hash
},
},
] {
let mut res = vec![];
executed.write(&mut res).unwrap();
assert_eq!(executed, Executed::read(&mut res.as_slice()).unwrap());
}
}

View File

@@ -76,7 +76,7 @@ pub async fn deploy_contract(
input: bin.into(), input: bin.into(),
}; };
let deployment_tx = deterministically_sign(&deployment_tx); let deployment_tx = deterministically_sign(deployment_tx);
let receipt = publish_tx(provider, deployment_tx).await; let receipt = publish_tx(provider, deployment_tx).await;
assert!(receipt.status()); assert!(receipt.status());

View File

@@ -5,6 +5,7 @@ use blake2::{digest::typenum::U32, Digest, Blake2b};
use scale::Encode; use scale::Encode;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};
use serai_primitives::BlockHash;
use serai_in_instructions_primitives::{MAX_BATCH_SIZE, Batch}; use serai_in_instructions_primitives::{MAX_BATCH_SIZE, Batch};
use primitives::{ use primitives::{
@@ -106,7 +107,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
// If this block is notable, create the Batch(s) for it // If this block is notable, create the Batch(s) for it
if notable { if notable {
let network = S::NETWORK; let network = S::NETWORK;
let external_network_block_hash = index::block_id(&txn, block_number); let external_network_block_hash = BlockHash(index::block_id(&txn, block_number));
let mut batch_id = BatchDb::<S>::acquire_batch_id(&mut txn); let mut batch_id = BatchDb::<S>::acquire_batch_id(&mut txn);
// start with empty batch // start with empty batch

View File

@@ -20,7 +20,7 @@ pub enum Event {
network: NetworkId, network: NetworkId,
publishing_session: Session, publishing_session: Session,
id: u32, id: u32,
external_network_block_hash: [u8; 32], external_network_block_hash: BlockHash,
in_instructions_hash: [u8; 32], in_instructions_hash: [u8; 32],
in_instruction_results: bitvec::vec::BitVec<u8, bitvec::order::Lsb0>, in_instruction_results: bitvec::vec::BitVec<u8, bitvec::order::Lsb0>,
}, },

View File

@@ -8,12 +8,13 @@ use blake2::{
use scale::Encode; use scale::Encode;
use serai_client::{ use serai_client::{
primitives::{Amount, NetworkId, Coin, Balance, BlockHash, SeraiAddress}, primitives::{BlockHash, NetworkId, Coin, Amount, Balance, SeraiAddress},
coins::CoinsEvent,
validator_sets::primitives::Session,
in_instructions::{ in_instructions::{
primitives::{InInstruction, InInstructionWithBalance, Batch}, primitives::{InInstruction, InInstructionWithBalance, Batch},
InInstructionsEvent, InInstructionsEvent,
}, },
coins::CoinsEvent,
Serai, Serai,
}; };
@@ -32,9 +33,13 @@ serai_test!(
let amount = Amount(OsRng.next_u64().saturating_add(1)); let amount = Amount(OsRng.next_u64().saturating_add(1));
let balance = Balance { coin, amount }; let balance = Balance { coin, amount };
let mut external_network_block_hash = BlockHash([0; 32]);
OsRng.fill_bytes(&mut external_network_block_hash.0);
let batch = Batch { let batch = Batch {
network, network,
id, id,
external_network_block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Transfer(address), instruction: InInstruction::Transfer(address),
balance, balance,
@@ -51,8 +56,11 @@ serai_test!(
batches, batches,
vec![InInstructionsEvent::Batch { vec![InInstructionsEvent::Batch {
network, network,
publishing_session: Session(0),
id, id,
instructions_hash: Blake2b::<U32>::digest(batch.instructions.encode()).into(), external_network_block_hash,
in_instructions_hash: Blake2b::<U32>::digest(batch.instructions.encode()).into(),
in_instruction_results: bitvec::bitvec![u8, bitvec::order::Lsb0; 1; 1],
}] }]
); );
} }

View File

@@ -7,19 +7,22 @@ use blake2::{
use scale::Encode; use scale::Encode;
use serai_abi::coins::primitives::OutInstructionWithBalance;
use sp_core::Pair; use sp_core::Pair;
use serai_client::{ use serai_client::{
primitives::{ primitives::{
Amount, NetworkId, Coin, Balance, BlockHash, SeraiAddress, ExternalAddress, BlockHash, NetworkId, Coin, Amount, Balance, SeraiAddress, ExternalAddress,
insecure_pair_from_name, insecure_pair_from_name,
}, },
coins::{
primitives::{OutInstruction, OutInstructionWithBalance},
CoinsEvent,
},
validator_sets::primitives::Session,
in_instructions::{ in_instructions::{
InInstructionsEvent, InInstructionsEvent,
primitives::{InInstruction, InInstructionWithBalance, Batch}, primitives::{InInstruction, InInstructionWithBalance, Batch},
}, },
coins::{primitives::OutInstruction, CoinsEvent},
Serai, SeraiCoins, Serai, SeraiCoins,
}; };
@@ -45,7 +48,7 @@ serai_test!(
let batch = Batch { let batch = Batch {
network, network,
id, id,
block: block_hash, external_network_block_hash: block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Transfer(address), instruction: InInstruction::Transfer(address),
balance, balance,
@@ -61,9 +64,11 @@ serai_test!(
batches, batches,
vec![InInstructionsEvent::Batch { vec![InInstructionsEvent::Batch {
network, network,
publishing_session: Session(0),
id, id,
block: block_hash, external_network_block_hash: block_hash,
instructions_hash: Blake2b::<U32>::digest(batch.instructions.encode()).into(), in_instructions_hash: Blake2b::<U32>::digest(batch.instructions.encode()).into(),
in_instruction_results: bitvec::bitvec![u8, bitvec::order::Lsb0; 1; 1],
}] }]
); );

View File

@@ -10,12 +10,12 @@ use schnorrkel::Schnorrkel;
use sp_core::{sr25519::Signature, Pair as PairTrait}; use sp_core::{sr25519::Signature, Pair as PairTrait};
use serai_abi::{ use serai_abi::{
genesis_liquidity::primitives::{oraclize_values_message, Values},
validator_sets::primitives::{musig_context, Session, ValidatorSet},
in_instructions::primitives::{InInstruction, InInstructionWithBalance, Batch},
primitives::{ primitives::{
Amount, NetworkId, Coin, Balance, BlockHash, SeraiAddress, insecure_pair_from_name, BlockHash, NetworkId, Coin, Amount, Balance, SeraiAddress, insecure_pair_from_name,
}, },
validator_sets::primitives::{musig_context, Session, ValidatorSet},
genesis_liquidity::primitives::{oraclize_values_message, Values},
in_instructions::primitives::{InInstruction, InInstructionWithBalance, Batch},
}; };
use serai_client::{Serai, SeraiGenesisLiquidity}; use serai_client::{Serai, SeraiGenesisLiquidity};
@@ -53,7 +53,7 @@ pub async fn set_up_genesis(
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// set up bloch hash // set up block hash
let mut block = BlockHash([0; 32]); let mut block = BlockHash([0; 32]);
OsRng.fill_bytes(&mut block.0); OsRng.fill_bytes(&mut block.0);
@@ -65,7 +65,12 @@ pub async fn set_up_genesis(
}) })
.or_insert(0); .or_insert(0);
let batch = Batch { network: coin.network(), id: batch_ids[&coin.network()], instructions }; let batch = Batch {
network: coin.network(),
external_network_block_hash: block,
id: batch_ids[&coin.network()],
instructions,
};
provide_batch(serai, batch).await; provide_batch(serai, batch).await;
} }

View File

@@ -9,7 +9,7 @@ use scale::Encode;
use sp_core::Pair; use sp_core::Pair;
use serai_client::{ use serai_client::{
primitives::{insecure_pair_from_name, BlockHash, NetworkId, Balance, SeraiAddress}, primitives::{BlockHash, NetworkId, Balance, SeraiAddress, insecure_pair_from_name},
validator_sets::primitives::{ValidatorSet, KeyPair}, validator_sets::primitives::{ValidatorSet, KeyPair},
in_instructions::{ in_instructions::{
primitives::{Batch, SignedBatch, batch_message, InInstruction, InInstructionWithBalance}, primitives::{Batch, SignedBatch, batch_message, InInstruction, InInstructionWithBalance},
@@ -45,16 +45,29 @@ pub async fn provide_batch(serai: &Serai, batch: Batch) -> [u8; 32] {
) )
.await; .await;
let batches = serai.as_of(block).in_instructions().batch_events().await.unwrap(); {
// TODO: impl From<Batch> for BatchEvent? let mut batches = serai.as_of(block).in_instructions().batch_events().await.unwrap();
assert_eq!(batches.len(), 1);
let InInstructionsEvent::Batch {
network,
publishing_session,
id,
external_network_block_hash,
in_instructions_hash,
in_instruction_results: _,
} = batches.swap_remove(0)
else {
panic!("Batch event wasn't Batch event")
};
assert_eq!(network, batch.network);
assert_eq!(publishing_session, session);
assert_eq!(id, batch.id);
assert_eq!(external_network_block_hash, batch.external_network_block_hash);
assert_eq!( assert_eq!(
batches, in_instructions_hash,
vec![InInstructionsEvent::Batch { <[u8; 32]>::from(Blake2b::<U32>::digest(batch.instructions.encode()))
network: batch.network,
id: batch.id,
instructions_hash: Blake2b::<U32>::digest(batch.instructions.encode()).into(),
}],
); );
}
// TODO: Check the tokens events // TODO: Check the tokens events
@@ -75,7 +88,7 @@ pub async fn mint_coin(
let batch = Batch { let batch = Batch {
network, network,
id: batch_id, id: batch_id,
block: block_hash, external_network_block_hash: block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Transfer(address), instruction: InInstruction::Transfer(address),
balance, balance,

View File

@@ -6,8 +6,8 @@ use serai_abi::in_instructions::primitives::DexCall;
use serai_client::{ use serai_client::{
primitives::{ primitives::{
Amount, NetworkId, Coin, Balance, BlockHash, insecure_pair_from_name, ExternalAddress, BlockHash, NetworkId, Coin, Amount, Balance, SeraiAddress, ExternalAddress,
SeraiAddress, insecure_pair_from_name,
}, },
in_instructions::primitives::{ in_instructions::primitives::{
InInstruction, InInstructionWithBalance, Batch, IN_INSTRUCTION_EXECUTOR, OutAddress, InInstruction, InInstructionWithBalance, Batch, IN_INSTRUCTION_EXECUTOR, OutAddress,
@@ -229,7 +229,7 @@ serai_test!(
let batch = Batch { let batch = Batch {
network: NetworkId::Bitcoin, network: NetworkId::Bitcoin,
id: batch_id, id: batch_id,
block: block_hash, external_network_block_hash: block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Dex(DexCall::SwapAndAddLiquidity(pair.public().into())), instruction: InInstruction::Dex(DexCall::SwapAndAddLiquidity(pair.public().into())),
balance: Balance { coin: Coin::Bitcoin, amount: Amount(20_000_000_000_000) }, balance: Balance { coin: Coin::Bitcoin, amount: Amount(20_000_000_000_000) },
@@ -313,7 +313,7 @@ serai_test!(
let batch = Batch { let batch = Batch {
network: NetworkId::Monero, network: NetworkId::Monero,
id: coin1_batch_id, id: coin1_batch_id,
block: block_hash, external_network_block_hash: block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Dex(DexCall::Swap(out_balance, out_address)), instruction: InInstruction::Dex(DexCall::Swap(out_balance, out_address)),
balance: Balance { coin: coin1, amount: Amount(200_000_000_000_000) }, balance: Balance { coin: coin1, amount: Amount(200_000_000_000_000) },
@@ -353,7 +353,7 @@ serai_test!(
let batch = Batch { let batch = Batch {
network: NetworkId::Ethereum, network: NetworkId::Ethereum,
id: coin2_batch_id, id: coin2_batch_id,
block: block_hash, external_network_block_hash: block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Dex(DexCall::Swap(out_balance, out_address.clone())), instruction: InInstruction::Dex(DexCall::Swap(out_balance, out_address.clone())),
balance: Balance { coin: coin2, amount: Amount(200_000_000_000) }, balance: Balance { coin: coin2, amount: Amount(200_000_000_000) },
@@ -391,7 +391,7 @@ serai_test!(
let batch = Batch { let batch = Batch {
network: NetworkId::Monero, network: NetworkId::Monero,
id: coin1_batch_id, id: coin1_batch_id,
block: block_hash, external_network_block_hash: block_hash,
instructions: vec![InInstructionWithBalance { instructions: vec![InInstructionWithBalance {
instruction: InInstruction::Dex(DexCall::Swap(out_balance, out_address.clone())), instruction: InInstruction::Dex(DexCall::Swap(out_balance, out_address.clone())),
balance: Balance { coin: coin1, amount: Amount(100_000_000_000_000) }, balance: Balance { coin: coin1, amount: Amount(100_000_000_000_000) },

View File

@@ -4,13 +4,13 @@ use rand_core::{RngCore, OsRng};
use serai_client::TemporalSerai; use serai_client::TemporalSerai;
use serai_abi::{ use serai_abi::{
emissions::primitives::{INITIAL_REWARD_PER_BLOCK, SECURE_BY},
in_instructions::primitives::Batch,
primitives::{ primitives::{
BlockHash, Coin, COINS, FAST_EPOCH_DURATION, FAST_EPOCH_INITIAL_PERIOD, NETWORKS, NETWORKS, COINS, TARGET_BLOCK_TIME, FAST_EPOCH_DURATION, FAST_EPOCH_INITIAL_PERIOD, BlockHash,
TARGET_BLOCK_TIME, Coin,
}, },
validator_sets::primitives::Session, validator_sets::primitives::Session,
emissions::primitives::{INITIAL_REWARD_PER_BLOCK, SECURE_BY},
in_instructions::primitives::Batch,
}; };
use serai_client::{ use serai_client::{
@@ -42,7 +42,16 @@ async fn send_batches(serai: &Serai, ids: &mut HashMap<NetworkId, u32>) {
let mut block = BlockHash([0; 32]); let mut block = BlockHash([0; 32]);
OsRng.fill_bytes(&mut block.0); OsRng.fill_bytes(&mut block.0);
provide_batch(serai, Batch { network, id: ids[&network], block, instructions: vec![] }).await; provide_batch(
serai,
Batch {
network,
id: ids[&network],
external_network_block_hash: block,
instructions: vec![],
},
)
.await;
} }
} }
} }

View File

@@ -7,7 +7,7 @@ use sp_core::{
use serai_client::{ use serai_client::{
primitives::{ primitives::{
FAST_EPOCH_DURATION, TARGET_BLOCK_TIME, NETWORKS, EmbeddedEllipticCurve, NetworkId, BlockHash, FAST_EPOCH_DURATION, TARGET_BLOCK_TIME, NETWORKS, BlockHash, NetworkId, EmbeddedEllipticCurve,
insecure_pair_from_name, insecure_pair_from_name,
}, },
validator_sets::{ validator_sets::{
@@ -311,7 +311,8 @@ async fn validator_set_rotation() {
// provide a batch to complete the handover and retire the previous set // provide a batch to complete the handover and retire the previous set
let mut block_hash = BlockHash([0; 32]); let mut block_hash = BlockHash([0; 32]);
OsRng.fill_bytes(&mut block_hash.0); OsRng.fill_bytes(&mut block_hash.0);
let batch = Batch { network, id: 0, block: block_hash, instructions: vec![] }; let batch =
Batch { network, id: 0, external_network_block_hash: block_hash, instructions: vec![] };
publish_tx( publish_tx(
&serai, &serai,
&SeraiInInstructions::execute_batch(SignedBatch { &SeraiInInstructions::execute_batch(SignedBatch {

View File

@@ -19,6 +19,8 @@ ignored = ["scale", "scale-info"]
workspace = true workspace = true
[dependencies] [dependencies]
bitvec = { version = "1", default-features = false, features = ["alloc"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] }
scale-info = { version = "2", default-features = false, features = ["derive"] } scale-info = { version = "2", default-features = false, features = ["derive"] }

View File

@@ -63,10 +63,10 @@ pub mod pallet {
Batch { Batch {
network: NetworkId, network: NetworkId,
publishing_session: Session, publishing_session: Session,
external_network_block_hash: [u8; 32],
id: u32, id: u32,
external_network_block_hash: BlockHash,
in_instructions_hash: [u8; 32], in_instructions_hash: [u8; 32],
in_instruction_results: BitVec<u8, Lsb0>, in_instruction_results: bitvec::vec::BitVec<u8, bitvec::order::Lsb0>,
}, },
Halt { Halt {
network: NetworkId, network: NetworkId,
@@ -101,9 +101,10 @@ pub mod pallet {
// Use a dedicated transaction layer when executing this InInstruction // Use a dedicated transaction layer when executing this InInstruction
// This lets it individually error without causing any storage modifications // This lets it individually error without causing any storage modifications
#[frame_support::transactional] #[frame_support::transactional]
fn execute(instruction: InInstructionWithBalance) -> Result<(), DispatchError> { fn execute(instruction: &InInstructionWithBalance) -> Result<(), DispatchError> {
match instruction.instruction { match &instruction.instruction {
InInstruction::Transfer(address) => { InInstruction::Transfer(address) => {
let address = *address;
Coins::<T>::mint(address.into(), instruction.balance)?; Coins::<T>::mint(address.into(), instruction.balance)?;
} }
InInstruction::Dex(call) => { InInstruction::Dex(call) => {
@@ -113,6 +114,7 @@ pub mod pallet {
match call { match call {
DexCall::SwapAndAddLiquidity(address) => { DexCall::SwapAndAddLiquidity(address) => {
let origin = RawOrigin::Signed(IN_INSTRUCTION_EXECUTOR.into()); let origin = RawOrigin::Signed(IN_INSTRUCTION_EXECUTOR.into());
let address = *address;
let coin = instruction.balance.coin; let coin = instruction.balance.coin;
// mint the given coin on the account // mint the given coin on the account
@@ -207,7 +209,9 @@ pub mod pallet {
let coin_balance = let coin_balance =
Coins::<T>::balance(IN_INSTRUCTION_EXECUTOR.into(), out_balance.coin); Coins::<T>::balance(IN_INSTRUCTION_EXECUTOR.into(), out_balance.coin);
let instruction = OutInstructionWithBalance { let instruction = OutInstructionWithBalance {
instruction: OutInstruction { address: out_address.as_external().unwrap() }, instruction: OutInstruction {
address: out_address.clone().as_external().unwrap(),
},
balance: Balance { coin: out_balance.coin, amount: coin_balance }, balance: Balance { coin: out_balance.coin, amount: coin_balance },
}; };
Coins::<T>::burn_with_instruction(origin.into(), instruction)?; Coins::<T>::burn_with_instruction(origin.into(), instruction)?;
@@ -216,12 +220,14 @@ pub mod pallet {
} }
} }
InInstruction::GenesisLiquidity(address) => { InInstruction::GenesisLiquidity(address) => {
let address = *address;
Coins::<T>::mint(GENESIS_LIQUIDITY_ACCOUNT.into(), instruction.balance)?; Coins::<T>::mint(GENESIS_LIQUIDITY_ACCOUNT.into(), instruction.balance)?;
GenesisLiq::<T>::add_coin_liquidity(address.into(), instruction.balance)?; GenesisLiq::<T>::add_coin_liquidity(address.into(), instruction.balance)?;
} }
InInstruction::SwapToStakedSRI(address, network) => { InInstruction::SwapToStakedSRI(address, network) => {
let address = *address;
Coins::<T>::mint(POL_ACCOUNT.into(), instruction.balance)?; Coins::<T>::mint(POL_ACCOUNT.into(), instruction.balance)?;
Emissions::<T>::swap_to_staked_sri(address.into(), network, instruction.balance)?; Emissions::<T>::swap_to_staked_sri(address.into(), *network, instruction.balance)?;
} }
} }
Ok(()) Ok(())
@@ -259,7 +265,7 @@ pub mod pallet {
impl<T: Config> Pallet<T> { impl<T: Config> Pallet<T> {
#[pallet::call_index(0)] #[pallet::call_index(0)]
#[pallet::weight((0, DispatchClass::Operational))] // TODO #[pallet::weight((0, DispatchClass::Operational))] // TODO
pub fn execute_batch(origin: OriginFor<T>, batch: SignedBatch) -> DispatchResult { pub fn execute_batch(origin: OriginFor<T>, _batch: SignedBatch) -> DispatchResult {
ensure_none(origin)?; ensure_none(origin)?;
// The entire Batch execution is handled in pre_dispatch // The entire Batch execution is handled in pre_dispatch
@@ -309,7 +315,7 @@ pub mod pallet {
Err(InvalidTransaction::BadProof)?; Err(InvalidTransaction::BadProof)?;
} }
let batch = batch.batch; let batch = &batch.batch;
if Halted::<T>::contains_key(network) { if Halted::<T>::contains_key(network) {
Err(InvalidTransaction::Custom(1))?; Err(InvalidTransaction::Custom(1))?;
@@ -343,8 +349,8 @@ pub mod pallet {
LastBatch::<T>::insert(batch.network, batch.id); LastBatch::<T>::insert(batch.network, batch.id);
let in_instructions_hash = blake2_256(&batch.instructions.encode()); let in_instructions_hash = blake2_256(&batch.instructions.encode());
let mut in_instruction_results = BitVec::new(); let mut in_instruction_results = bitvec::vec::BitVec::new();
for (i, instruction) in batch.instructions.into_iter().enumerate() { for instruction in &batch.instructions {
// Verify this coin is for this network // Verify this coin is for this network
if instruction.balance.coin.network() != batch.network { if instruction.balance.coin.network() != batch.network {
Err(InvalidTransaction::Custom(2))?; Err(InvalidTransaction::Custom(2))?;
@@ -363,7 +369,7 @@ pub mod pallet {
}); });
ValidTransaction::with_tag_prefix("in-instructions") ValidTransaction::with_tag_prefix("in-instructions")
.and_provides((batch.batch.network, batch.batch.id)) .and_provides((batch.network, batch.id))
// Set a 10 block longevity, though this should be included in the next block // Set a 10 block longevity, though this should be included in the next block
.longevity(10) .longevity(10)
.propagate(true) .propagate(true)

View File

@@ -19,7 +19,8 @@ use sp_application_crypto::sr25519::Signature;
use sp_std::vec::Vec; use sp_std::vec::Vec;
use sp_runtime::RuntimeDebug; use sp_runtime::RuntimeDebug;
use serai_primitives::{Balance, NetworkId, SeraiAddress, ExternalAddress, system_address}; #[rustfmt::skip]
use serai_primitives::{BlockHash, NetworkId, Balance, SeraiAddress, ExternalAddress, system_address};
mod shorthand; mod shorthand;
pub use shorthand::*; pub use shorthand::*;
@@ -106,7 +107,7 @@ pub struct InInstructionWithBalance {
pub struct Batch { pub struct Batch {
pub network: NetworkId, pub network: NetworkId,
pub id: u32, pub id: u32,
pub external_network_block_hash: [u8; 32], pub external_network_block_hash: BlockHash,
pub instructions: Vec<InInstructionWithBalance>, pub instructions: Vec<InInstructionWithBalance>,
} }

View File

@@ -20,71 +20,71 @@ workspace = true
name = "serai-node" name = "serai-node"
[dependencies] [dependencies]
#rand_core = "0.6" rand_core = "0.6"
#zeroize = "1" zeroize = "1"
#hex = "0.4" hex = "0.4"
#log = "0.4" log = "0.4"
#schnorrkel = "0.11" schnorrkel = "0.11"
#ciphersuite = { path = "../../crypto/ciphersuite" } ciphersuite = { path = "../../crypto/ciphersuite" }
#embedwards25519 = { path = "../../crypto/evrf/embedwards25519" } embedwards25519 = { path = "../../crypto/evrf/embedwards25519" }
#secq256k1 = { path = "../../crypto/evrf/secq256k1" } secq256k1 = { path = "../../crypto/evrf/secq256k1" }
#libp2p = "0.52" libp2p = "0.52"
#sp-core = { git = "https://github.com/serai-dex/substrate" } sp-core = { git = "https://github.com/serai-dex/substrate" }
#sp-keystore = { git = "https://github.com/serai-dex/substrate" } sp-keystore = { git = "https://github.com/serai-dex/substrate" }
#sp-timestamp = { git = "https://github.com/serai-dex/substrate" } sp-timestamp = { git = "https://github.com/serai-dex/substrate" }
#sp-io = { git = "https://github.com/serai-dex/substrate" } sp-io = { git = "https://github.com/serai-dex/substrate" }
#sp-blockchain = { git = "https://github.com/serai-dex/substrate" } sp-blockchain = { git = "https://github.com/serai-dex/substrate" }
#sp-api = { git = "https://github.com/serai-dex/substrate" } sp-api = { git = "https://github.com/serai-dex/substrate" }
#sp-block-builder = { git = "https://github.com/serai-dex/substrate" } sp-block-builder = { git = "https://github.com/serai-dex/substrate" }
#sp-consensus-babe = { git = "https://github.com/serai-dex/substrate" } sp-consensus-babe = { git = "https://github.com/serai-dex/substrate" }
#frame-benchmarking = { git = "https://github.com/serai-dex/substrate" } frame-benchmarking = { git = "https://github.com/serai-dex/substrate" }
#serai-runtime = { path = "../runtime", features = ["std"] } serai-runtime = { path = "../runtime", features = ["std"] }
#clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive"] }
#futures-util = "0.3" futures-util = "0.3"
#tokio = { version = "1", features = ["sync", "rt-multi-thread"] } tokio = { version = "1", features = ["sync", "rt-multi-thread"] }
#jsonrpsee = { version = "0.16", features = ["server"] } jsonrpsee = { version = "0.16", features = ["server"] }
#sc-offchain = { git = "https://github.com/serai-dex/substrate" } sc-offchain = { git = "https://github.com/serai-dex/substrate" }
#sc-transaction-pool = { git = "https://github.com/serai-dex/substrate" } sc-transaction-pool = { git = "https://github.com/serai-dex/substrate" }
#sc-transaction-pool-api = { git = "https://github.com/serai-dex/substrate" } sc-transaction-pool-api = { git = "https://github.com/serai-dex/substrate" }
#sc-basic-authorship = { git = "https://github.com/serai-dex/substrate" } sc-basic-authorship = { git = "https://github.com/serai-dex/substrate" }
#sc-executor = { git = "https://github.com/serai-dex/substrate" } sc-executor = { git = "https://github.com/serai-dex/substrate" }
#sc-service = { git = "https://github.com/serai-dex/substrate" } sc-service = { git = "https://github.com/serai-dex/substrate" }
#sc-client-api = { git = "https://github.com/serai-dex/substrate" } sc-client-api = { git = "https://github.com/serai-dex/substrate" }
#sc-network-common = { git = "https://github.com/serai-dex/substrate" } sc-network-common = { git = "https://github.com/serai-dex/substrate" }
#sc-network = { git = "https://github.com/serai-dex/substrate" } sc-network = { git = "https://github.com/serai-dex/substrate" }
#sc-consensus = { git = "https://github.com/serai-dex/substrate" } sc-consensus = { git = "https://github.com/serai-dex/substrate" }
#sc-consensus-babe = { git = "https://github.com/serai-dex/substrate" } sc-consensus-babe = { git = "https://github.com/serai-dex/substrate" }
#sc-consensus-grandpa = { git = "https://github.com/serai-dex/substrate" } sc-consensus-grandpa = { git = "https://github.com/serai-dex/substrate" }
#sc-authority-discovery = { git = "https://github.com/serai-dex/substrate" } sc-authority-discovery = { git = "https://github.com/serai-dex/substrate" }
#sc-telemetry = { git = "https://github.com/serai-dex/substrate" } sc-telemetry = { git = "https://github.com/serai-dex/substrate" }
#sc-cli = { git = "https://github.com/serai-dex/substrate" } sc-cli = { git = "https://github.com/serai-dex/substrate" }
#sc-rpc-api = { git = "https://github.com/serai-dex/substrate" } sc-rpc-api = { git = "https://github.com/serai-dex/substrate" }
#substrate-frame-rpc-system = { git = "https://github.com/serai-dex/substrate" } substrate-frame-rpc-system = { git = "https://github.com/serai-dex/substrate" }
#pallet-transaction-payment-rpc = { git = "https://github.com/serai-dex/substrate" } pallet-transaction-payment-rpc = { git = "https://github.com/serai-dex/substrate" }
#serai-env = { path = "../../common/env" } serai-env = { path = "../../common/env" }
[build-dependencies] [build-dependencies]
#substrate-build-script-utils = { git = "https://github.com/serai-dex/substrate" } substrate-build-script-utils = { git = "https://github.com/serai-dex/substrate" }
[features] [features]
#default = [] default = []
#fast-epoch = ["serai-runtime/fast-epoch"] fast-epoch = ["serai-runtime/fast-epoch"]
#runtime-benchmarks = [ runtime-benchmarks = [
# "frame-benchmarking/runtime-benchmarks", "frame-benchmarking/runtime-benchmarks",
# "serai-runtime/runtime-benchmarks", "serai-runtime/runtime-benchmarks",
#] ]

View File

@@ -1,5 +1,12 @@
use substrate_wasm_builder::WasmBuilder; use substrate_wasm_builder::WasmBuilder;
fn main() { fn main() {
WasmBuilder::new().with_current_project().export_heap_base().import_memory().build() WasmBuilder::new()
.with_current_project()
// https://substrate.stackexchange.com/questions/12124
// TODO: Remove once we've moved to polkadot-sdk
.disable_runtime_version_section_check()
.export_heap_base()
.import_memory()
.build()
} }

View File

@@ -5,8 +5,6 @@ use scale::{Encode, Decode};
use serai_abi::Call; use serai_abi::Call;
use crate::{ use crate::{
Vec,
primitives::{PublicKey, SeraiAddress},
timestamp, coins, dex, genesis_liquidity, timestamp, coins, dex, genesis_liquidity,
validator_sets::{self, MembershipProof}, validator_sets::{self, MembershipProof},
in_instructions, signals, babe, grandpa, RuntimeCall, in_instructions, signals, babe, grandpa, RuntimeCall,

View File

@@ -1203,7 +1203,7 @@ pub mod pallet {
// There must have been a previous session is PendingSlashReport is populated // There must have been a previous session is PendingSlashReport is populated
let set = let set =
ValidatorSet { network, session: Session(Self::session(network).unwrap().0 - 1) }; ValidatorSet { network, session: Session(Self::session(network).unwrap().0 - 1) };
if !key.verify(&report_slashes_message(&set, slashes), signature) { if !key.verify(&slashes.report_slashes_message(), signature) {
Err(InvalidTransaction::BadProof)?; Err(InvalidTransaction::BadProof)?;
} }

View File

@@ -90,7 +90,8 @@ fn basic_functionality() {
}, },
b"Hello, World!".to_vec(), b"Hello, World!".to_vec(),
) )
.await; .await
.unwrap();
// Queue this twice, which message-queue should de-duplicate // Queue this twice, which message-queue should de-duplicate
for _ in 0 .. 2 { for _ in 0 .. 2 {
@@ -103,7 +104,8 @@ fn basic_functionality() {
}, },
b"Hello, World, again!".to_vec(), b"Hello, World, again!".to_vec(),
) )
.await; .await
.unwrap();
} }
// Successfully get it // Successfully get it
@@ -146,7 +148,8 @@ fn basic_functionality() {
}, },
b"Hello, World!".to_vec(), b"Hello, World!".to_vec(),
) )
.await; .await
.unwrap();
let monero = MessageQueue::new( let monero = MessageQueue::new(
Service::Processor(NetworkId::Monero), Service::Processor(NetworkId::Monero),