6 Commits

Author SHA1 Message Date
Luke Parker
4efaec0f7b Correct invalid constant 2024-06-19 10:32:08 -04:00
Luke Parker
b87f0dcd97 Merge branch 'develop' into HEAD 2024-06-19 10:09:18 -04:00
Luke Parker
253cf3253d Correct hash for 1.79.0-slim-bookworm docker image 2024-06-13 19:00:01 -04:00
Luke Parker
03445b3020 Update httparse, as 1.9.2 was yanked 2024-06-13 16:49:58 -04:00
Luke Parker
9af111b4aa Rust 1.79, cargo update 2024-06-13 15:57:08 -04:00
Luke Parker
41ce5b1738 Use the serai_abi::Call in the actual Transaction type
We prior required they had the same encoding, yet this ensures they do by
making them one and the same. This does require an large, ugly, From/TryInto
block which is deemed preferable for moving this more and more into syntax
(from semantics).

Further improvements (notably re: Extra) is possible, and this already lets us
strip some members from the Call enum.
2024-06-03 23:38:22 -04:00
62 changed files with 1090 additions and 622 deletions

573
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin" repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021" edition = "2021"
publish = false publish = false
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -29,21 +29,21 @@ frost = { package = "modular-frost", path = "../../crypto/frost", default-featur
alloy-core = { version = "0.7", default-features = false } alloy-core = { version = "0.7", default-features = false }
alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] } alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] }
alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false, features = ["k256"] } alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false, features = ["k256"] }
alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
alloy-rpc-types = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-rpc-types-eth = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
alloy-rpc-client = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-rpc-client = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false } alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false, optional = true } alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false, optional = true }
[dev-dependencies] [dev-dependencies]
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] } frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] }
tokio = { version = "1", features = ["macros"] } tokio = { version = "1", features = ["macros"] }
alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
[features] [features]
tests = ["alloy-node-bindings", "frost/tests"] tests = ["alloy-node-bindings", "frost/tests"]

View File

@@ -21,8 +21,8 @@ tower = "0.4"
serde_json = { version = "1", default-features = false } serde_json = { version = "1", default-features = false }
simple-request = { path = "../../../common/request", default-features = false } simple-request = { path = "../../../common/request", default-features = false }
alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "64feb9bc51c8021ea08535694c44de84222f474e", default-features = false } alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "9edb7d184592322e97b587c60368e33ef1dfa323", default-features = false }
[features] [features]
default = ["tls"] default = ["tls"]

View File

@@ -5,7 +5,7 @@ use alloy_consensus::{Signed, TxLegacy};
use alloy_sol_types::{SolCall, SolEvent}; use alloy_sol_types::{SolCall, SolEvent};
use alloy_rpc_types::{BlockNumberOrTag, Filter}; use alloy_rpc_types_eth::{BlockNumberOrTag, Filter};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -4,7 +4,7 @@ use alloy_core::primitives::{Address, B256, U256};
use alloy_sol_types::{SolInterface, SolEvent}; use alloy_sol_types::{SolInterface, SolEvent};
use alloy_rpc_types::Filter; use alloy_rpc_types_eth::Filter;
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -7,7 +7,7 @@ pub mod alloy {
pub use alloy_consensus as consensus; pub use alloy_consensus as consensus;
pub use alloy_network as network; pub use alloy_network as network;
pub use alloy_rpc_types as rpc_types; pub use alloy_rpc_types_eth as rpc_types;
pub use alloy_simple_request_transport as simple_request_transport; pub use alloy_simple_request_transport as simple_request_transport;
pub use alloy_rpc_client as rpc_client; pub use alloy_rpc_client as rpc_client;
pub use alloy_provider as provider; pub use alloy_provider as provider;

View File

@@ -12,9 +12,9 @@ use alloy_consensus::TxLegacy;
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent}; use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
use alloy_rpc_types::Filter; use alloy_rpc_types_eth::Filter;
#[cfg(test)] #[cfg(test)]
use alloy_rpc_types::{BlockId, TransactionRequest, TransactionInput}; use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -11,7 +11,7 @@ use alloy_core::{
}; };
use alloy_consensus::{SignableTransaction, TxLegacy}; use alloy_consensus::{SignableTransaction, TxLegacy};
use alloy_rpc_types::TransactionReceipt; use alloy_rpc_types_eth::TransactionReceipt;
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -14,6 +14,7 @@ use frost::{
use alloy_core::primitives::{Address, U256}; use alloy_core::primitives::{Address, U256};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_types_eth::BlockTransactionsKind;
use alloy_rpc_client::ClientBuilder; use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
@@ -84,7 +85,7 @@ async fn setup_test() -> (
async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] { async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] {
client client
.get_block(client.get_block_number().await.unwrap().into(), false) .get_block(client.get_block_number().await.unwrap().into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()

View File

@@ -15,7 +15,7 @@ use alloy_core::primitives::Address;
use alloy_sol_types::SolCall; use alloy_sol_types::SolCall;
use alloy_rpc_types::{TransactionInput, TransactionRequest}; use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
use alloy_simple_request_transport::SimpleRequest; use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder; use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -133,7 +133,13 @@ mod impl_pst_for_serai {
key_pair: KeyPair, key_pair: KeyPair,
signature: Signature, signature: Signature,
) { ) {
let tx = SeraiValidatorSets::set_keys(set.network, removed, key_pair, signature); // TODO: BoundedVec as an arg to avoid this expect
let tx = SeraiValidatorSets::set_keys(
set.network,
removed.try_into().expect("removing more than allowed"),
key_pair,
signature,
);
async fn check(serai: SeraiValidatorSets<'_>, set: ValidatorSet, (): ()) -> bool { async fn check(serai: SeraiValidatorSets<'_>, set: ValidatorSet, (): ()) -> bool {
if matches!(serai.keys(set).await, Ok(Some(_))) { if matches!(serai.keys(set).await, Ok(Some(_))) {
log::info!("another coordinator set key pair for {:?}", set); log::info!("another coordinator set key pair for {:?}", set);

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"] keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -64,10 +64,7 @@ pub struct GeneratorPromotion<C1: Ciphersuite, C2: Ciphersuite> {
_c2: PhantomData<C2>, _c2: PhantomData<C2>,
} }
impl<C1: Ciphersuite, C2: Ciphersuite> GeneratorPromotion<C1, C2> impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<C1, C2> {
where
C2: Ciphersuite<F = C1::F, G = C1::G>,
{
/// Begin promoting keys from one generator to another. Returns a proof this share was properly /// Begin promoting keys from one generator to another. Returns a proof this share was properly
/// promoted. /// promoted.
pub fn promote<R: RngCore + CryptoRng>( pub fn promote<R: RngCore + CryptoRng>(

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq" repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -53,11 +53,11 @@ pub(crate) struct Aos<G0: PrimeGroup + Zeroize, G1: PrimeGroup + Zeroize, const
s: [(G0::Scalar, G1::Scalar); RING_LEN], s: [(G0::Scalar, G1::Scalar); RING_LEN],
} }
impl<G0: PrimeGroup + Zeroize, G1: PrimeGroup + Zeroize, const RING_LEN: usize> impl<
Aos<G0, G1, RING_LEN> G0: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
where G1: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
G0::Scalar: PrimeFieldBits + Zeroize, const RING_LEN: usize,
G1::Scalar: PrimeFieldBits + Zeroize, > Aos<G0, G1, RING_LEN>
{ {
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) { fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) {

View File

@@ -76,14 +76,11 @@ pub(crate) struct Bits<
} }
impl< impl<
G0: PrimeGroup + Zeroize, G0: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
G1: PrimeGroup + Zeroize, G1: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
const SIGNATURE: u8, const SIGNATURE: u8,
const RING_LEN: usize, const RING_LEN: usize,
> Bits<G0, G1, SIGNATURE, RING_LEN> > Bits<G0, G1, SIGNATURE, RING_LEN>
where
G0::Scalar: PrimeFieldBits + Zeroize,
G1::Scalar: PrimeFieldBits + Zeroize,
{ {
fn transcript<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) { fn transcript<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) {
transcript.domain_separate(b"bits"); transcript.domain_separate(b"bits");

View File

@@ -112,15 +112,12 @@ pub enum DLEqError {
// anyone who wants it // anyone who wants it
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct __DLEqProof< pub struct __DLEqProof<
G0: PrimeGroup + Zeroize, G0: PrimeGroup<Scalar: PrimeFieldBits> + Zeroize,
G1: PrimeGroup + Zeroize, G1: PrimeGroup<Scalar: PrimeFieldBits> + Zeroize,
const SIGNATURE: u8, const SIGNATURE: u8,
const RING_LEN: usize, const RING_LEN: usize,
const REMAINDER_RING_LEN: usize, const REMAINDER_RING_LEN: usize,
> where > {
G0::Scalar: PrimeFieldBits,
G1::Scalar: PrimeFieldBits,
{
bits: Vec<Bits<G0, G1, SIGNATURE, RING_LEN>>, bits: Vec<Bits<G0, G1, SIGNATURE, RING_LEN>>,
remainder: Option<Bits<G0, G1, SIGNATURE, REMAINDER_RING_LEN>>, remainder: Option<Bits<G0, G1, SIGNATURE, REMAINDER_RING_LEN>>,
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>), poks: (SchnorrPoK<G0>, SchnorrPoK<G1>),
@@ -200,15 +197,12 @@ dleq!(
); );
impl< impl<
G0: PrimeGroup + Zeroize, G0: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
G1: PrimeGroup + Zeroize, G1: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize,
const SIGNATURE: u8, const SIGNATURE: u8,
const RING_LEN: usize, const RING_LEN: usize,
const REMAINDER_RING_LEN: usize, const REMAINDER_RING_LEN: usize,
> __DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN> > __DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN>
where
G0::Scalar: PrimeFieldBits + Zeroize,
G1::Scalar: PrimeFieldBits + Zeroize,
{ {
pub(crate) fn transcript<T: Transcript>( pub(crate) fn transcript<T: Transcript>(
transcript: &mut T, transcript: &mut T,

View File

@@ -28,10 +28,7 @@ pub(crate) struct SchnorrPoK<G: PrimeGroup + Zeroize> {
s: G::Scalar, s: G::Scalar,
} }
impl<G: PrimeGroup + Zeroize> SchnorrPoK<G> impl<G: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize> SchnorrPoK<G> {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
// Not HRAm due to the lack of m // Not HRAm due to the lack of m
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar { fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar {

View File

@@ -105,19 +105,13 @@ pub enum DLEqError {
/// A proof that points have the same discrete logarithm across generators. /// A proof that points have the same discrete logarithm across generators.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct DLEqProof<G: PrimeGroup> pub struct DLEqProof<G: PrimeGroup<Scalar: Zeroize>> {
where
G::Scalar: Zeroize,
{
c: G::Scalar, c: G::Scalar,
s: G::Scalar, s: G::Scalar,
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
impl<G: PrimeGroup> DLEqProof<G> impl<G: PrimeGroup<Scalar: Zeroize>> DLEqProof<G> {
where
G::Scalar: Zeroize,
{
fn transcript<T: Transcript>(transcript: &mut T, generator: G, nonce: G, point: G) { fn transcript<T: Transcript>(transcript: &mut T, generator: G, nonce: G, point: G) {
transcript.append_message(b"generator", generator.to_bytes()); transcript.append_message(b"generator", generator.to_bytes());
transcript.append_message(b"nonce", nonce.to_bytes()); transcript.append_message(b"nonce", nonce.to_bytes());
@@ -213,20 +207,14 @@ where
/// across some generators, yet with a smaller overall proof size. /// across some generators, yet with a smaller overall proof size.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] #[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct MultiDLEqProof<G: PrimeGroup> pub struct MultiDLEqProof<G: PrimeGroup<Scalar: Zeroize>> {
where
G::Scalar: Zeroize,
{
c: G::Scalar, c: G::Scalar,
s: Vec<G::Scalar>, s: Vec<G::Scalar>,
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[allow(non_snake_case)] #[allow(non_snake_case)]
impl<G: PrimeGroup> MultiDLEqProof<G> impl<G: PrimeGroup<Scalar: Zeroize>> MultiDLEqProof<G> {
where
G::Scalar: Zeroize,
{
/// Prove for each scalar that the series of points created by multiplying it against its /// Prove for each scalar that the series of points created by multiplying it against its
/// matching generators share a discrete logarithm. /// matching generators share a discrete logarithm.
/// This function panics if `generators.len() != scalars.len()`. /// This function panics if `generators.len() != scalars.len()`.

View File

@@ -14,10 +14,7 @@ use transcript::{Transcript, RecommendedTranscript};
use crate::cross_group::schnorr::SchnorrPoK; use crate::cross_group::schnorr::SchnorrPoK;
fn test_schnorr<G: PrimeGroup + Zeroize>() fn test_schnorr<G: PrimeGroup<Scalar: PrimeFieldBits + Zeroize> + Zeroize>() {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let transcript = RecommendedTranscript::new(b"Schnorr Test"); let transcript = RecommendedTranscript::new(b"Schnorr Test");
let mut batch = BatchVerifier::new(10); let mut batch = BatchVerifier::new(10);

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ff-group-te
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["ff", "group", "ecc"] keywords = ["ff", "group", "ecc"]
edition = "2021" edition = "2021"
rust-version = "1.60" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -178,10 +178,7 @@ pub fn test_prime_group<R: RngCore, G: PrimeGroup>(rng: &mut R) {
} }
/// Run all tests offered by this crate on the group. /// Run all tests offered by this crate on the group.
pub fn test_prime_group_bits<R: RngCore, G: PrimeGroup>(rng: &mut R) pub fn test_prime_group_bits<R: RngCore, G: PrimeGroup<Scalar: PrimeFieldBits>>(rng: &mut R) {
where
G::Scalar: PrimeFieldBits,
{
test_prime_field_bits::<R, G::Scalar>(rng); test_prime_field_bits::<R, G::Scalar>(rng);
test_prime_group::<R, G>(rng); test_prime_group::<R, G>(rng);
} }

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["frost", "multisig", "threshold"] keywords = ["frost", "multisig", "threshold"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/multiexp"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["multiexp", "ff", "group"] keywords = ["multiexp", "ff", "group"]
edition = "2021" edition = "2021"
rust-version = "1.70" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -12,27 +12,21 @@ use crate::{multiexp, multiexp_vartime};
// Flatten the contained statements to a single Vec. // Flatten the contained statements to a single Vec.
// Wrapped in Zeroizing in case any of the included statements contain private values. // Wrapped in Zeroizing in case any of the included statements contain private values.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn flat<Id: Copy + Zeroize, G: Group + Zeroize>( fn flat<Id: Copy + Zeroize, G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>(
slice: &[(Id, Vec<(G::Scalar, G)>)], slice: &[(Id, Vec<(G::Scalar, G)>)],
) -> Zeroizing<Vec<(G::Scalar, G)>> ) -> Zeroizing<Vec<(G::Scalar, G)>> {
where
<G as Group>::Scalar: PrimeFieldBits + Zeroize,
{
Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).copied().collect::<Vec<_>>()) Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).copied().collect::<Vec<_>>())
} }
/// A batch verifier intended to verify a series of statements are each equivalent to zero. /// A batch verifier intended to verify a series of statements are each equivalent to zero.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
#[derive(Clone, Zeroize)] #[derive(Clone, Zeroize)]
pub struct BatchVerifier<Id: Copy + Zeroize, G: Group + Zeroize>( pub struct BatchVerifier<Id: Copy + Zeroize, G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>(
Zeroizing<Vec<(Id, Vec<(G::Scalar, G)>)>>, Zeroizing<Vec<(Id, Vec<(G::Scalar, G)>)>>,
) );
where
<G as Group>::Scalar: PrimeFieldBits + Zeroize;
impl<Id: Copy + Zeroize, G: Group + Zeroize> BatchVerifier<Id, G> impl<Id: Copy + Zeroize, G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>
where BatchVerifier<Id, G>
<G as Group>::Scalar: PrimeFieldBits + Zeroize,
{ {
/// Create a new batch verifier, expected to verify the following amount of statements. /// Create a new batch verifier, expected to verify the following amount of statements.
/// ///

View File

@@ -49,10 +49,10 @@ fn u8_from_bool(bit_ref: &mut bool) -> u8 {
// Convert scalars to `window`-sized bit groups, as needed to index a table // Convert scalars to `window`-sized bit groups, as needed to index a table
// This algorithm works for `window <= 8` // This algorithm works for `window <= 8`
pub(crate) fn prep_bits<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> Vec<Vec<u8>> pub(crate) fn prep_bits<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> Vec<Vec<u8>> {
let w_usize = usize::from(window); let w_usize = usize::from(window);
let mut groupings = vec![]; let mut groupings = vec![];
@@ -175,10 +175,7 @@ fn algorithm(len: usize) -> Algorithm {
/// Performs a multiexponentiation, automatically selecting the optimal algorithm based on the /// Performs a multiexponentiation, automatically selecting the optimal algorithm based on the
/// amount of pairs. /// amount of pairs.
pub fn multiexp<G: Group>(pairs: &[(G::Scalar, G)]) -> G pub fn multiexp<G: Group<Scalar: PrimeFieldBits + Zeroize>>(pairs: &[(G::Scalar, G)]) -> G {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
match algorithm(pairs.len()) { match algorithm(pairs.len()) {
Algorithm::Null => Group::identity(), Algorithm::Null => Group::identity(),
Algorithm::Single => pairs[0].1 * pairs[0].0, Algorithm::Single => pairs[0].1 * pairs[0].0,
@@ -190,10 +187,7 @@ where
/// Performs a multiexponentiation in variable time, automatically selecting the optimal algorithm /// Performs a multiexponentiation in variable time, automatically selecting the optimal algorithm
/// based on the amount of pairs. /// based on the amount of pairs.
pub fn multiexp_vartime<G: Group>(pairs: &[(G::Scalar, G)]) -> G pub fn multiexp_vartime<G: Group<Scalar: PrimeFieldBits>>(pairs: &[(G::Scalar, G)]) -> G {
where
G::Scalar: PrimeFieldBits,
{
match algorithm(pairs.len()) { match algorithm(pairs.len()) {
Algorithm::Null => Group::identity(), Algorithm::Null => Group::identity(),
Algorithm::Single => pairs[0].1 * pairs[0].0, Algorithm::Single => pairs[0].1 * pairs[0].0,

View File

@@ -7,10 +7,10 @@ use crate::prep_bits;
// Pippenger's algorithm for multiexponentiation, as published in the SIAM Journal on Computing // Pippenger's algorithm for multiexponentiation, as published in the SIAM Journal on Computing
// DOI: 10.1137/0209022 // DOI: 10.1137/0209022
pub(crate) fn pippenger<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn pippenger<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> G {
let mut bits = prep_bits(pairs, window); let mut bits = prep_bits(pairs, window);
let mut res = G::identity(); let mut res = G::identity();
@@ -37,10 +37,10 @@ where
res res
} }
pub(crate) fn pippenger_vartime<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn pippenger_vartime<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> G {
let bits = prep_bits(pairs, window); let bits = prep_bits(pairs, window);
let mut res = G::identity(); let mut res = G::identity();

View File

@@ -24,10 +24,10 @@ fn prep_tables<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> Vec<Vec<G>> {
// Straus's algorithm for multiexponentiation, as published in The American Mathematical Monthly // Straus's algorithm for multiexponentiation, as published in The American Mathematical Monthly
// DOI: 10.2307/2310929 // DOI: 10.2307/2310929
pub(crate) fn straus<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn straus<G: Group<Scalar: PrimeFieldBits + Zeroize>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits + Zeroize, window: u8,
{ ) -> G {
let mut groupings = prep_bits(pairs, window); let mut groupings = prep_bits(pairs, window);
let tables = prep_tables(pairs, window); let tables = prep_tables(pairs, window);
@@ -48,10 +48,10 @@ where
res res
} }
pub(crate) fn straus_vartime<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G pub(crate) fn straus_vartime<G: Group<Scalar: PrimeFieldBits>>(
where pairs: &[(G::Scalar, G)],
G::Scalar: PrimeFieldBits, window: u8,
{ ) -> G {
let groupings = prep_bits(pairs, window); let groupings = prep_bits(pairs, window);
let tables = prep_tables(pairs, window); let tables = prep_tables(pairs, window);

View File

@@ -9,10 +9,7 @@ use group::Group;
use crate::BatchVerifier; use crate::BatchVerifier;
pub(crate) fn test_batch<G: Group + Zeroize>() pub(crate) fn test_batch<G: Group<Scalar: PrimeFieldBits + Zeroize> + Zeroize>() {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let valid = |batch: BatchVerifier<_, G>| { let valid = |batch: BatchVerifier<_, G>| {
assert!(batch.verify()); assert!(batch.verify());
assert!(batch.verify_vartime()); assert!(batch.verify_vartime());

View File

@@ -18,10 +18,7 @@ mod batch;
use batch::test_batch; use batch::test_batch;
#[allow(dead_code)] #[allow(dead_code)]
fn benchmark_internal<G: Group>(straus_bool: bool) fn benchmark_internal<G: Group<Scalar: PrimeFieldBits + Zeroize>>(straus_bool: bool) {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let runs: usize = 20; let runs: usize = 20;
let mut start = 0; let mut start = 0;
@@ -86,10 +83,7 @@ where
} }
} }
fn test_multiexp<G: Group>() fn test_multiexp<G: Group<Scalar: PrimeFieldBits + Zeroize>>() {
where
G::Scalar: PrimeFieldBits + Zeroize,
{
let test = |pairs: &[_], sum| { let test = |pairs: &[_], sum| {
// These should automatically determine the best algorithm // These should automatically determine the best algorithm
assert_eq!(multiexp(pairs), sum); assert_eq!(multiexp(pairs), sum);

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["schnorr", "ff", "group"] keywords = ["schnorr", "ff", "group"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorrkel"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["frost", "multisig", "threshold", "schnorrkel"] keywords = ["frost", "multisig", "threshold", "schnorrkel"]
edition = "2021" edition = "2021"
rust-version = "1.74" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/crypto/transcript"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["transcript"] keywords = ["transcript"]
edition = "2021" edition = "2021"
rust-version = "1.73" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -1,10 +1,7 @@
use crate::Transcript; use crate::Transcript;
/// Test the sanity of a transcript. /// Test the sanity of a transcript.
pub fn test_transcript<T: Transcript>() pub fn test_transcript<T: Transcript<Challenge: PartialEq>>() {
where
T::Challenge: PartialEq,
{
// Ensure distinct names cause distinct challenges // Ensure distinct names cause distinct challenges
{ {
let mut t1 = T::new(b"1"); let mut t1 = T::new(b"1");

View File

@@ -1,5 +1,5 @@
# rust:1.77.0-slim-bookworm as of March 22nd, 2024 (GMT) # rust:1.79.0-slim-bookworm as of June 14th, 2024 (GMT)
FROM --platform=linux/amd64 rust@sha256:e785e4aa81f87bc1ee02fa2026ffbc491e0410bdaf6652cea74884373f452664 as deterministic FROM --platform=linux/amd64 rust@sha256:fa189cd885739dd17fc6bb4e132687fce43f2bf42983c0ac39b60e4943201e9c as deterministic
# Move to a Debian package snapshot # Move to a Debian package snapshot
RUN rm -rf /etc/apt/sources.list.d/debian.sources && \ RUN rm -rf /etc/apt/sources.list.d/debian.sources && \

View File

@@ -146,7 +146,7 @@ fn build_serai_service(prelude: &str, release: bool, features: &str, package: &s
format!( format!(
r#" r#"
FROM rust:1.77-slim-bookworm as builder FROM rust:1.79-slim-bookworm as builder
COPY --from=mimalloc-debian libmimalloc.so /usr/lib COPY --from=mimalloc-debian libmimalloc.so /usr/lib
RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload

View File

@@ -13,7 +13,7 @@ use frost::ThresholdKeys;
use ethereum_serai::{ use ethereum_serai::{
alloy::{ alloy::{
primitives::U256, primitives::U256,
rpc_types::{BlockNumberOrTag, Transaction}, rpc_types::{BlockTransactionsKind, BlockNumberOrTag, Transaction},
simple_request_transport::SimpleRequest, simple_request_transport::SimpleRequest,
rpc_client::ClientBuilder, rpc_client::ClientBuilder,
provider::{Provider, RootProvider}, provider::{Provider, RootProvider},
@@ -432,7 +432,7 @@ impl<D: Db> Network for Ethereum<D> {
async fn get_latest_block_number(&self) -> Result<usize, NetworkError> { async fn get_latest_block_number(&self) -> Result<usize, NetworkError> {
let actual_number = self let actual_number = self
.provider .provider
.get_block(BlockNumberOrTag::Finalized.into(), false) .get_block(BlockNumberOrTag::Finalized.into(), BlockTransactionsKind::Hashes)
.await .await
.map_err(|_| NetworkError::ConnectionError)? .map_err(|_| NetworkError::ConnectionError)?
.ok_or(NetworkError::ConnectionError)? .ok_or(NetworkError::ConnectionError)?
@@ -460,7 +460,7 @@ impl<D: Db> Network for Ethereum<D> {
} else { } else {
self self
.provider .provider
.get_block(u64::try_from(start - 1).unwrap().into(), false) .get_block(u64::try_from(start - 1).unwrap().into(), BlockTransactionsKind::Hashes)
.await .await
.ok() .ok()
.flatten() .flatten()
@@ -473,7 +473,7 @@ impl<D: Db> Network for Ethereum<D> {
let end_header = self let end_header = self
.provider .provider
.get_block(u64::try_from(start + 31).unwrap().into(), false) .get_block(u64::try_from(start + 31).unwrap().into(), BlockTransactionsKind::Hashes)
.await .await
.ok() .ok()
.flatten() .flatten()
@@ -807,7 +807,7 @@ impl<D: Db> Network for Ethereum<D> {
async fn get_block_number(&self, id: &<Self::Block as Block<Self>>::Id) -> usize { async fn get_block_number(&self, id: &<Self::Block as Block<Self>>::Id) -> usize {
self self
.provider .provider
.get_block(B256::from(*id).into(), false) .get_block(B256::from(*id).into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()

View File

@@ -1,5 +1,5 @@
[toolchain] [toolchain]
channel = "1.77" channel = "1.79"
targets = ["wasm32-unknown-unknown"] targets = ["wasm32-unknown-unknown"]
profile = "minimal" profile = "minimal"
components = ["rust-src", "rustfmt", "clippy"] components = ["rust-src", "rustfmt", "clippy"]

View File

@@ -16,27 +16,48 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true workspace = true
[dependencies] [dependencies]
scale = { package = "parity-scale-codec", version = "3", features = ["derive"] } scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] }
scale-info = { version = "2", features = ["derive"] } scale-info = { version = "2", default-features = false, features = ["derive"] }
borsh = { version = "1", features = ["derive", "de_strict_order"], optional = true } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true }
serde = { version = "1", features = ["derive", "alloc"], optional = true } serde = { version = "1", default-features = false, features = ["derive", "alloc"], optional = true }
sp-core = { git = "https://github.com/serai-dex/substrate" } sp-core = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-runtime = { git = "https://github.com/serai-dex/substrate" } sp-runtime = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-consensus-babe = { git = "https://github.com/serai-dex/substrate" } sp-consensus-babe = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-consensus-grandpa = { git = "https://github.com/serai-dex/substrate" } sp-consensus-grandpa = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-primitives = { path = "../primitives", version = "0.1" } frame-support = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-coins-primitives = { path = "../coins/primitives", version = "0.1" }
serai-validator-sets-primitives = { path = "../validator-sets/primitives", version = "0.1" }
serai-in-instructions-primitives = { path = "../in-instructions/primitives", version = "0.1" }
serai-signals-primitives = { path = "../signals/primitives", version = "0.1" }
frame-support = { git = "https://github.com/serai-dex/substrate" } serai-primitives = { path = "../primitives", version = "0.1", default-features = false }
serai-coins-primitives = { path = "../coins/primitives", version = "0.1", default-features = false }
serai-validator-sets-primitives = { path = "../validator-sets/primitives", version = "0.1", default-features = false }
serai-in-instructions-primitives = { path = "../in-instructions/primitives", version = "0.1", default-features = false }
serai-signals-primitives = { path = "../signals/primitives", version = "0.1", default-features = false }
[features] [features]
std = [
"scale/std",
"scale-info/std",
"borsh?/std",
"serde?/std",
"sp-core/std",
"sp-runtime/std",
"sp-consensus-babe/std",
"sp-consensus-grandpa/std",
"frame-support/std",
"serai-primitives/std",
"serai-coins-primitives/std",
"serai-validator-sets-primitives/std",
"serai-in-instructions-primitives/std",
"serai-signals-primitives/std",
]
borsh = [ borsh = [
"dep:borsh", "dep:borsh",
"serai-primitives/borsh", "serai-primitives/borsh",
@@ -53,3 +74,4 @@ serde = [
"serai-in-instructions-primitives/serde", "serai-in-instructions-primitives/serde",
"serai-signals-primitives/serde", "serai-signals-primitives/serde",
] ]
default = ["std"]

View File

@@ -4,7 +4,7 @@ use serai_primitives::{Header, SeraiAddress};
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub struct ReportEquivocation { pub struct ReportEquivocation {
pub equivocation_proof: Box<EquivocationProof<Header>>, pub equivocation_proof: alloc::boxed::Box<EquivocationProof<Header>>,
pub key_owner_proof: SeraiAddress, pub key_owner_proof: SeraiAddress,
} }

View File

@@ -5,7 +5,8 @@ use primitives::OutInstructionWithBalance;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
transfer { to: SeraiAddress, balance: Balance }, transfer { to: SeraiAddress, balance: Balance },
burn { balance: Balance }, burn { balance: Balance },
@@ -14,7 +15,17 @@ pub enum Call {
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum LiquidityTokensCall {
transfer { to: SeraiAddress, balance: Balance },
burn { balance: Balance },
}
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
Mint { to: SeraiAddress, balance: Balance }, Mint { to: SeraiAddress, balance: Balance },
Burn { from: SeraiAddress, balance: Balance }, Burn { from: SeraiAddress, balance: Balance },

View File

@@ -6,7 +6,8 @@ type PoolId = Coin;
type MaxSwapPathLength = sp_core::ConstU32<3>; type MaxSwapPathLength = sp_core::ConstU32<3>;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
add_liquidity { add_liquidity {
coin: Coin, coin: Coin,
@@ -38,7 +39,8 @@ pub enum Call {
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
PoolCreated { PoolCreated {
pool_id: PoolId, pool_id: PoolId,

View File

@@ -4,7 +4,7 @@ use serai_primitives::{BlockNumber, SeraiAddress};
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub struct ReportEquivocation { pub struct ReportEquivocation {
pub equivocation_proof: Box<EquivocationProof<[u8; 32], BlockNumber>>, pub equivocation_proof: alloc::boxed::Box<EquivocationProof<[u8; 32], BlockNumber>>,
pub key_owner_proof: SeraiAddress, pub key_owner_proof: SeraiAddress,
} }
@@ -15,10 +15,10 @@ pub enum Call {
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
NewAuthorities { authority_set: Vec<(SeraiAddress, u64)> }, NewAuthorities { authority_set: alloc::vec::Vec<(SeraiAddress, u64)> },
// TODO: Remove these // TODO: Remove these
Paused, Paused,
Resumed, Resumed,

View File

@@ -5,14 +5,16 @@ use primitives::SignedBatch;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
execute_batch { batch: SignedBatch }, execute_batch { batch: SignedBatch },
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
Batch { network: NetworkId, id: u32, block: BlockHash, instructions_hash: [u8; 32] }, Batch { network: NetworkId, id: u32, block: BlockHash, instructions_hash: [u8; 32] },
InstructionFailure { network: NetworkId, id: u32, index: u32 }, InstructionFailure { network: NetworkId, id: u32, index: u32 },

View File

@@ -1,5 +1,12 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
extern crate alloc;
pub use serai_primitives as primitives;
pub mod system; pub mod system;
pub mod timestamp; pub mod timestamp;
@@ -14,15 +21,13 @@ pub mod signals;
pub mod babe; pub mod babe;
pub mod grandpa; pub mod grandpa;
pub use serai_primitives as primitives; pub mod tx;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub enum Call { pub enum Call {
System,
Timestamp(timestamp::Call), Timestamp(timestamp::Call),
TransactionPayment,
Coins(coins::Call), Coins(coins::Call),
LiquidityTokens(coins::Call), LiquidityTokens(coins::LiquidityTokensCall),
Dex(dex::Call), Dex(dex::Call),
ValidatorSets(validator_sets::Call), ValidatorSets(validator_sets::Call),
InInstructions(in_instructions::Call), InInstructions(in_instructions::Call),
@@ -53,16 +58,20 @@ pub enum Event {
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, Copy, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub struct Extra { pub struct Extra {
pub era: sp_runtime::generic::Era, pub era: sp_runtime::generic::Era,
pub nonce: scale::Compact<u32>, #[codec(compact)]
pub tip: scale::Compact<u64>, pub nonce: u32,
#[codec(compact)]
pub tip: u64,
} }
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub struct SignedPayloadExtra { pub struct SignedPayloadExtra {
pub spec_version: u32, pub spec_version: u32,
pub tx_version: u32, pub tx_version: u32,
@@ -70,4 +79,4 @@ pub struct SignedPayloadExtra {
pub mortality_checkpoint: [u8; 32], pub mortality_checkpoint: [u8; 32],
} }
pub type Transaction = primitives::Transaction<Call, Extra>; pub type Transaction = tx::Transaction<Call, Extra>;

View File

@@ -7,7 +7,8 @@ use primitives::SignalId;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
register_retirement_signal { in_favor_of: [u8; 32] }, register_retirement_signal { in_favor_of: [u8; 32] },
revoke_retirement_signal { retirement_signal_id: [u8; 32] }, revoke_retirement_signal { retirement_signal_id: [u8; 32] },
@@ -18,7 +19,8 @@ pub enum Call {
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
RetirementSignalRegistered { RetirementSignalRegistered {
signal_id: [u8; 32], signal_id: [u8; 32],

View File

@@ -3,7 +3,6 @@ use frame_support::dispatch::{DispatchInfo, DispatchError};
use serai_primitives::SeraiAddress; use serai_primitives::SeraiAddress;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum Event { pub enum Event {
ExtrinsicSuccess { dispatch_info: DispatchInfo }, ExtrinsicSuccess { dispatch_info: DispatchInfo },
ExtrinsicFailed { dispatch_error: DispatchError, dispatch_info: DispatchInfo }, ExtrinsicFailed { dispatch_error: DispatchError, dispatch_info: DispatchInfo },

View File

@@ -1,5 +1,9 @@
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
set { now: scale::Compact<u64> }, set {
#[codec(compact)]
now: u64,
},
} }

183
substrate/abi/src/tx.rs Normal file
View File

@@ -0,0 +1,183 @@
use scale::Encode;
use sp_core::sr25519::{Public, Signature};
use sp_runtime::traits::Verify;
use serai_primitives::SeraiAddress;
use frame_support::dispatch::GetDispatchInfo;
pub trait TransactionMember:
Clone + PartialEq + Eq + core::fmt::Debug + scale::Encode + scale::Decode + scale_info::TypeInfo
{
}
impl<
T: Clone
+ PartialEq
+ Eq
+ core::fmt::Debug
+ scale::Encode
+ scale::Decode
+ scale_info::TypeInfo,
> TransactionMember for T
{
}
type TransactionEncodeAs<'a, Extra> =
(&'a crate::Call, &'a Option<(SeraiAddress, Signature, Extra)>);
type TransactionDecodeAs<Extra> = (crate::Call, Option<(SeraiAddress, Signature, Extra)>);
// We use our own Transaction struct, over UncheckedExtrinsic, for more control, a bit more
// simplicity, and in order to be immune to https://github.com/paritytech/polkadot-sdk/issues/2947
#[allow(private_bounds)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Transaction<
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember,
> {
call: crate::Call,
mapped_call: Call,
signature: Option<(SeraiAddress, Signature, Extra)>,
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
Transaction<Call, Extra>
{
pub fn new(call: crate::Call, signature: Option<(SeraiAddress, Signature, Extra)>) -> Self {
Self { call: call.clone(), mapped_call: call.into(), signature }
}
pub fn call(&self) -> &crate::Call {
&self.call
}
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
scale::Encode for Transaction<Call, Extra>
{
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let tx: TransactionEncodeAs<Extra> = (&self.call, &self.signature);
tx.using_encoded(f)
}
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
scale::Decode for Transaction<Call, Extra>
{
fn decode<I: scale::Input>(input: &mut I) -> Result<Self, scale::Error> {
let (call, signature) = TransactionDecodeAs::decode(input)?;
let mapped_call = Call::from(call.clone());
Ok(Self { call, mapped_call, signature })
}
}
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
scale_info::TypeInfo for Transaction<Call, Extra>
{
type Identity = TransactionDecodeAs<Extra>;
// Define the type info as the info of the type equivalent to what we encode as
fn type_info() -> scale_info::Type {
TransactionDecodeAs::<Extra>::type_info()
}
}
#[cfg(feature = "serde")]
mod _serde {
use scale::Encode;
use serde::{ser::*, de::*};
use super::*;
impl<Call: 'static + TransactionMember + From<crate::Call>, Extra: 'static + TransactionMember>
Serialize for Transaction<Call, Extra>
{
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let encoded = self.encode();
serializer.serialize_bytes(&encoded)
}
}
#[cfg(feature = "std")]
impl<
'a,
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember,
> Deserialize<'a> for Transaction<Call, Extra>
{
fn deserialize<D: Deserializer<'a>>(de: D) -> Result<Self, D::Error> {
let bytes = sp_core::bytes::deserialize(de)?;
<Self as scale::Decode>::decode(&mut &bytes[..])
.map_err(|e| serde::de::Error::custom(format!("invalid transaction: {e}")))
}
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call> + TryInto<crate::Call>,
Extra: 'static + TransactionMember,
> sp_runtime::traits::Extrinsic for Transaction<Call, Extra>
{
type Call = Call;
type SignaturePayload = (SeraiAddress, Signature, Extra);
fn is_signed(&self) -> Option<bool> {
Some(self.signature.is_some())
}
fn new(call: Call, signature: Option<Self::SignaturePayload>) -> Option<Self> {
Some(Self { call: call.clone().try_into().ok()?, mapped_call: call, signature })
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call> + TryInto<crate::Call>,
Extra: 'static + TransactionMember,
> frame_support::traits::ExtrinsicCall for Transaction<Call, Extra>
{
fn call(&self) -> &Call {
&self.mapped_call
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember + sp_runtime::traits::SignedExtension,
> sp_runtime::traits::ExtrinsicMetadata for Transaction<Call, Extra>
{
type SignedExtensions = Extra;
const VERSION: u8 = 0;
}
impl<
Call: 'static + TransactionMember + From<crate::Call> + GetDispatchInfo,
Extra: 'static + TransactionMember,
> GetDispatchInfo for Transaction<Call, Extra>
{
fn get_dispatch_info(&self) -> frame_support::dispatch::DispatchInfo {
self.mapped_call.get_dispatch_info()
}
}
impl<
Call: 'static + TransactionMember + From<crate::Call>,
Extra: 'static + TransactionMember + sp_runtime::traits::SignedExtension,
> sp_runtime::traits::BlindCheckable for Transaction<Call, Extra>
{
type Checked = sp_runtime::generic::CheckedExtrinsic<Public, Call, Extra>;
fn check(
self,
) -> Result<Self::Checked, sp_runtime::transaction_validity::TransactionValidityError> {
Ok(match self.signature {
Some((signer, signature, extra)) => {
if !signature.verify(
(&self.call, &extra, extra.additional_signed()?).encode().as_slice(),
&signer.into(),
) {
Err(sp_runtime::transaction_validity::InvalidTransaction::BadProof)?
}
sp_runtime::generic::CheckedExtrinsic {
signed: Some((signer.into(), extra)),
function: self.mapped_call,
}
}
None => sp_runtime::generic::CheckedExtrinsic { signed: None, function: self.mapped_call },
})
}
}

View File

@@ -1,4 +1,4 @@
use sp_core::{ConstU32, bounded_vec::BoundedVec}; use sp_core::{ConstU32, bounded::BoundedVec};
pub use serai_validator_sets_primitives as primitives; pub use serai_validator_sets_primitives as primitives;
@@ -6,11 +6,12 @@ use serai_primitives::*;
use serai_validator_sets_primitives::*; use serai_validator_sets_primitives::*;
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Call { pub enum Call {
set_keys { set_keys {
network: NetworkId, network: NetworkId,
removed_participants: Vec<SeraiAddress>, removed_participants: BoundedVec<SeraiAddress, ConstU32<{ MAX_KEY_SHARES_PER_SET / 3 }>>,
key_pair: KeyPair, key_pair: KeyPair,
signature: Signature, signature: Signature,
}, },
@@ -35,7 +36,8 @@ pub enum Call {
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)] #[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))] #[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize, borsh::BorshDeserialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", derive(serde::Serialize))]
#[cfg_attr(all(feature = "std", feature = "serde"), derive(serde::Deserialize))]
pub enum Event { pub enum Event {
NewSet { NewSet {
set: ValidatorSet, set: ValidatorSet,

View File

@@ -3,7 +3,7 @@ use thiserror::Error;
use async_lock::RwLock; use async_lock::RwLock;
use simple_request::{hyper, Request, Client}; use simple_request::{hyper, Request, Client};
use scale::{Compact, Decode, Encode}; use scale::{Decode, Encode};
use serde::{Serialize, Deserialize, de::DeserializeOwned}; use serde::{Serialize, Deserialize, de::DeserializeOwned};
pub use sp_core::{ pub use sp_core::{
@@ -43,8 +43,8 @@ impl Block {
/// Returns the time of this block, set by its producer, in milliseconds since the epoch. /// Returns the time of this block, set by its producer, in milliseconds since the epoch.
pub fn time(&self) -> Result<u64, SeraiError> { pub fn time(&self) -> Result<u64, SeraiError> {
for transaction in &self.transactions { for transaction in &self.transactions {
if let Call::Timestamp(timestamp::Call::set { now }) = &transaction.call { if let Call::Timestamp(timestamp::Call::set { now }) = transaction.call() {
return Ok(u64::from(*now)); return Ok(*now);
} }
} }
Err(SeraiError::InvalidNode("no time was present in block".to_string())) Err(SeraiError::InvalidNode("no time was present in block".to_string()))
@@ -162,15 +162,14 @@ impl Serai {
} }
fn unsigned(call: Call) -> Transaction { fn unsigned(call: Call) -> Transaction {
Transaction { call, signature: None } Transaction::new(call, None)
} }
pub fn sign(&self, signer: &Pair, call: Call, nonce: u32, tip: u64) -> Transaction { pub fn sign(&self, signer: &Pair, call: Call, nonce: u32, tip: u64) -> Transaction {
const SPEC_VERSION: u32 = 1; const SPEC_VERSION: u32 = 1;
const TX_VERSION: u32 = 1; const TX_VERSION: u32 = 1;
let extra = let extra = Extra { era: sp_runtime::generic::Era::Immortal, nonce, tip };
Extra { era: sp_runtime::generic::Era::Immortal, nonce: Compact(nonce), tip: Compact(tip) };
let signature_payload = ( let signature_payload = (
&call, &call,
&extra, &extra,
@@ -184,7 +183,7 @@ impl Serai {
.encode(); .encode();
let signature = signer.sign(&signature_payload); let signature = signer.sign(&signature_payload);
Transaction { call, signature: Some((signer.public().into(), signature, extra)) } Transaction::new(call, Some((signer.public().into(), signature, extra)))
} }
pub async fn publish(&self, tx: &Transaction) -> Result<(), SeraiError> { pub async fn publish(&self, tx: &Transaction) -> Result<(), SeraiError> {
@@ -367,7 +366,10 @@ impl<'a> TemporalSerai<'a> {
let Some(res) = res else { return Ok(None) }; let Some(res) = res else { return Ok(None) };
let res = Serai::hex_decode(res)?; let res = Serai::hex_decode(res)?;
Ok(Some(R::decode(&mut res.as_slice()).map_err(|_| { Ok(Some(R::decode(&mut res.as_slice()).map_err(|_| {
SeraiError::InvalidRuntime("different type present at storage location".to_string()) SeraiError::InvalidRuntime(format!(
"different type present at storage location, raw value: {}",
hex::encode(res)
))
})?)) })?))
} }

View File

@@ -180,7 +180,10 @@ impl<'a> SeraiValidatorSets<'a> {
pub fn set_keys( pub fn set_keys(
network: NetworkId, network: NetworkId,
removed_participants: Vec<SeraiAddress>, removed_participants: sp_runtime::BoundedVec<
SeraiAddress,
sp_core::ConstU32<{ primitives::MAX_KEY_SHARES_PER_SET / 3 }>,
>,
key_pair: KeyPair, key_pair: KeyPair,
signature: Signature, signature: Signature,
) -> Transaction { ) -> Transaction {

View File

@@ -64,7 +64,12 @@ pub async fn set_keys(
// Set the key pair // Set the key pair
let block = publish_tx( let block = publish_tx(
serai, serai,
&SeraiValidatorSets::set_keys(set.network, vec![], key_pair.clone(), Signature(sig.to_bytes())), &SeraiValidatorSets::set_keys(
set.network,
vec![].try_into().unwrap(),
key_pair.clone(),
Signature(sig.to_bytes()),
),
) )
.await; .await;

View File

@@ -37,9 +37,6 @@ pub use balance::*;
mod account; mod account;
pub use account::*; pub use account::*;
mod tx;
pub use tx::*;
pub type BlockNumber = u64; pub type BlockNumber = u64;
pub type Header = sp_runtime::generic::Header<BlockNumber, sp_runtime::traits::BlakeTwo256>; pub type Header = sp_runtime::generic::Header<BlockNumber, sp_runtime::traits::BlakeTwo256>;

View File

@@ -1,124 +0,0 @@
use scale::Encode;
use sp_core::sr25519::{Public, Signature};
use sp_runtime::traits::Verify;
use crate::SeraiAddress;
trait TransactionMember:
Clone + PartialEq + Eq + core::fmt::Debug + scale::Encode + scale::Decode + scale_info::TypeInfo
{
}
impl<
T: Clone
+ PartialEq
+ Eq
+ core::fmt::Debug
+ scale::Encode
+ scale::Decode
+ scale_info::TypeInfo,
> TransactionMember for T
{
}
// We use our own Transaction struct, over UncheckedExtrinsic, for more control, a bit more
// simplicity, and in order to be immune to https://github.com/paritytech/polkadot-sdk/issues/2947
#[allow(private_bounds)]
#[derive(Clone, PartialEq, Eq, Debug, scale::Encode, scale::Decode, scale_info::TypeInfo)]
pub struct Transaction<Call: TransactionMember, Extra: TransactionMember> {
pub call: Call,
pub signature: Option<(SeraiAddress, Signature, Extra)>,
}
#[cfg(feature = "serde")]
mod _serde {
use scale::Encode;
use serde::{ser::*, de::*};
use super::*;
impl<Call: TransactionMember, Extra: TransactionMember> Serialize for Transaction<Call, Extra> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let encoded = self.encode();
serializer.serialize_bytes(&encoded)
}
}
#[cfg(feature = "std")]
impl<'a, Call: TransactionMember, Extra: TransactionMember> Deserialize<'a>
for Transaction<Call, Extra>
{
fn deserialize<D: Deserializer<'a>>(de: D) -> Result<Self, D::Error> {
let bytes = sp_core::bytes::deserialize(de)?;
scale::Decode::decode(&mut &bytes[..])
.map_err(|e| serde::de::Error::custom(format!("invalid transaction: {e}")))
}
}
}
impl<Call: TransactionMember, Extra: TransactionMember> sp_runtime::traits::Extrinsic
for Transaction<Call, Extra>
{
type Call = Call;
type SignaturePayload = (SeraiAddress, Signature, Extra);
fn is_signed(&self) -> Option<bool> {
Some(self.signature.is_some())
}
fn new(call: Call, signature: Option<Self::SignaturePayload>) -> Option<Self> {
Some(Self { call, signature })
}
}
impl<Call: TransactionMember, Extra: TransactionMember> frame_support::traits::ExtrinsicCall
for Transaction<Call, Extra>
{
fn call(&self) -> &Call {
&self.call
}
}
impl<Call: TransactionMember, Extra: TransactionMember> sp_runtime::traits::ExtrinsicMetadata
for Transaction<Call, Extra>
where
Extra: sp_runtime::traits::SignedExtension,
{
type SignedExtensions = Extra;
const VERSION: u8 = 0;
}
impl<Call: TransactionMember, Extra: TransactionMember> frame_support::dispatch::GetDispatchInfo
for Transaction<Call, Extra>
where
Call: frame_support::dispatch::GetDispatchInfo,
{
fn get_dispatch_info(&self) -> frame_support::dispatch::DispatchInfo {
self.call.get_dispatch_info()
}
}
impl<Call: TransactionMember, Extra: TransactionMember> sp_runtime::traits::BlindCheckable
for Transaction<Call, Extra>
where
Extra: sp_runtime::traits::SignedExtension,
{
type Checked = sp_runtime::generic::CheckedExtrinsic<Public, Call, Extra>;
fn check(
self,
) -> Result<Self::Checked, sp_runtime::transaction_validity::TransactionValidityError> {
Ok(match self.signature {
Some((signer, signature, extra)) => {
if !signature.verify(
(&self.call, &extra, extra.additional_signed()?).encode().as_slice(),
&signer.into(),
) {
Err(sp_runtime::transaction_validity::InvalidTransaction::BadProof)?
}
sp_runtime::generic::CheckedExtrinsic {
signed: Some((signer.into(), extra)),
function: self.call,
}
}
None => sp_runtime::generic::CheckedExtrinsic { signed: None, function: self.call },
})
}
}

View File

@@ -49,6 +49,7 @@ frame-executive = { git = "https://github.com/serai-dex/substrate", default-feat
frame-benchmarking = { git = "https://github.com/serai-dex/substrate", default-features = false, optional = true } frame-benchmarking = { git = "https://github.com/serai-dex/substrate", default-features = false, optional = true }
serai-primitives = { path = "../primitives", default-features = false } serai-primitives = { path = "../primitives", default-features = false }
serai-abi = { path = "../abi", default-features = false, features = ["serde"] }
pallet-timestamp = { git = "https://github.com/serai-dex/substrate", default-features = false } pallet-timestamp = { git = "https://github.com/serai-dex/substrate", default-features = false }
pallet-authorship = { git = "https://github.com/serai-dex/substrate", default-features = false } pallet-authorship = { git = "https://github.com/serai-dex/substrate", default-features = false }
@@ -102,6 +103,8 @@ std = [
"frame-executive/std", "frame-executive/std",
"serai-primitives/std", "serai-primitives/std",
"serai-abi/std",
"serai-abi/serde",
"pallet-timestamp/std", "pallet-timestamp/std",
"pallet-authorship/std", "pallet-authorship/std",

View File

@@ -0,0 +1,363 @@
use core::marker::PhantomData;
use scale::{Encode, Decode};
use serai_abi::Call;
use crate::{
Vec,
primitives::{PublicKey, SeraiAddress},
timestamp, coins, dex,
validator_sets::{self, MembershipProof},
in_instructions, signals, babe, grandpa, RuntimeCall,
};
impl From<Call> for RuntimeCall {
fn from(call: Call) -> RuntimeCall {
match call {
Call::Timestamp(serai_abi::timestamp::Call::set { now }) => {
RuntimeCall::Timestamp(timestamp::Call::set { now })
}
Call::Coins(coins) => match coins {
serai_abi::coins::Call::transfer { to, balance } => {
RuntimeCall::Coins(coins::Call::transfer { to: to.into(), balance })
}
serai_abi::coins::Call::burn { balance } => {
RuntimeCall::Coins(coins::Call::burn { balance })
}
serai_abi::coins::Call::burn_with_instruction { instruction } => {
RuntimeCall::Coins(coins::Call::burn_with_instruction { instruction })
}
},
Call::LiquidityTokens(lt) => match lt {
serai_abi::coins::LiquidityTokensCall::transfer { to, balance } => {
RuntimeCall::LiquidityTokens(coins::Call::transfer { to: to.into(), balance })
}
serai_abi::coins::LiquidityTokensCall::burn { balance } => {
RuntimeCall::LiquidityTokens(coins::Call::burn { balance })
}
},
Call::Dex(dex) => match dex {
serai_abi::dex::Call::add_liquidity {
coin,
coin_desired,
sri_desired,
coin_min,
sri_min,
mint_to,
} => RuntimeCall::Dex(dex::Call::add_liquidity {
coin,
coin_desired,
sri_desired,
coin_min,
sri_min,
mint_to: mint_to.into(),
}),
serai_abi::dex::Call::remove_liquidity {
coin,
lp_token_burn,
coin_min_receive,
sri_min_receive,
withdraw_to,
} => RuntimeCall::Dex(dex::Call::remove_liquidity {
coin,
lp_token_burn,
coin_min_receive,
sri_min_receive,
withdraw_to: withdraw_to.into(),
}),
serai_abi::dex::Call::swap_exact_tokens_for_tokens {
path,
amount_in,
amount_out_min,
send_to,
} => RuntimeCall::Dex(dex::Call::swap_exact_tokens_for_tokens {
path,
amount_in,
amount_out_min,
send_to: send_to.into(),
}),
serai_abi::dex::Call::swap_tokens_for_exact_tokens {
path,
amount_out,
amount_in_max,
send_to,
} => RuntimeCall::Dex(dex::Call::swap_tokens_for_exact_tokens {
path,
amount_out,
amount_in_max,
send_to: send_to.into(),
}),
},
Call::ValidatorSets(vs) => match vs {
serai_abi::validator_sets::Call::set_keys {
network,
removed_participants,
key_pair,
signature,
} => RuntimeCall::ValidatorSets(validator_sets::Call::set_keys {
network,
removed_participants: <_>::try_from(
removed_participants.into_iter().map(PublicKey::from).collect::<Vec<_>>(),
)
.unwrap(),
key_pair,
signature,
}),
serai_abi::validator_sets::Call::report_slashes { network, slashes, signature } => {
RuntimeCall::ValidatorSets(validator_sets::Call::report_slashes {
network,
slashes: <_>::try_from(
slashes
.into_iter()
.map(|(addr, slash)| (PublicKey::from(addr), slash))
.collect::<Vec<_>>(),
)
.unwrap(),
signature,
})
}
serai_abi::validator_sets::Call::allocate { network, amount } => {
RuntimeCall::ValidatorSets(validator_sets::Call::allocate { network, amount })
}
serai_abi::validator_sets::Call::deallocate { network, amount } => {
RuntimeCall::ValidatorSets(validator_sets::Call::deallocate { network, amount })
}
serai_abi::validator_sets::Call::claim_deallocation { network, session } => {
RuntimeCall::ValidatorSets(validator_sets::Call::claim_deallocation { network, session })
}
},
Call::InInstructions(ii) => match ii {
serai_abi::in_instructions::Call::execute_batch { batch } => {
RuntimeCall::InInstructions(in_instructions::Call::execute_batch { batch })
}
},
Call::Signals(signals) => match signals {
serai_abi::signals::Call::register_retirement_signal { in_favor_of } => {
RuntimeCall::Signals(signals::Call::register_retirement_signal { in_favor_of })
}
serai_abi::signals::Call::revoke_retirement_signal { retirement_signal_id } => {
RuntimeCall::Signals(signals::Call::revoke_retirement_signal { retirement_signal_id })
}
serai_abi::signals::Call::favor { signal_id, for_network } => {
RuntimeCall::Signals(signals::Call::favor { signal_id, for_network })
}
serai_abi::signals::Call::revoke_favor { signal_id, for_network } => {
RuntimeCall::Signals(signals::Call::revoke_favor { signal_id, for_network })
}
serai_abi::signals::Call::stand_against { signal_id, for_network } => {
RuntimeCall::Signals(signals::Call::stand_against { signal_id, for_network })
}
},
Call::Babe(babe) => match babe {
serai_abi::babe::Call::report_equivocation(report) => {
RuntimeCall::Babe(babe::Call::report_equivocation {
// TODO: Find a better way to go from Proof<[u8; 32]> to Proof<H256>
equivocation_proof: <_>::decode(&mut report.equivocation_proof.encode().as_slice())
.unwrap(),
key_owner_proof: MembershipProof(report.key_owner_proof.into(), PhantomData),
})
}
serai_abi::babe::Call::report_equivocation_unsigned(report) => {
RuntimeCall::Babe(babe::Call::report_equivocation_unsigned {
// TODO: Find a better way to go from Proof<[u8; 32]> to Proof<H256>
equivocation_proof: <_>::decode(&mut report.equivocation_proof.encode().as_slice())
.unwrap(),
key_owner_proof: MembershipProof(report.key_owner_proof.into(), PhantomData),
})
}
},
Call::Grandpa(grandpa) => match grandpa {
serai_abi::grandpa::Call::report_equivocation(report) => {
RuntimeCall::Grandpa(grandpa::Call::report_equivocation {
// TODO: Find a better way to go from Proof<[u8; 32]> to Proof<H256>
equivocation_proof: <_>::decode(&mut report.equivocation_proof.encode().as_slice())
.unwrap(),
key_owner_proof: MembershipProof(report.key_owner_proof.into(), PhantomData),
})
}
serai_abi::grandpa::Call::report_equivocation_unsigned(report) => {
RuntimeCall::Grandpa(grandpa::Call::report_equivocation_unsigned {
// TODO: Find a better way to go from Proof<[u8; 32]> to Proof<H256>
equivocation_proof: <_>::decode(&mut report.equivocation_proof.encode().as_slice())
.unwrap(),
key_owner_proof: MembershipProof(report.key_owner_proof.into(), PhantomData),
})
}
},
}
}
}
impl TryInto<Call> for RuntimeCall {
type Error = ();
fn try_into(self) -> Result<Call, ()> {
Ok(match self {
RuntimeCall::Timestamp(timestamp::Call::set { now }) => {
Call::Timestamp(serai_abi::timestamp::Call::set { now })
}
RuntimeCall::Coins(call) => Call::Coins(match call {
coins::Call::transfer { to, balance } => {
serai_abi::coins::Call::transfer { to: to.into(), balance }
}
coins::Call::burn { balance } => serai_abi::coins::Call::burn { balance },
coins::Call::burn_with_instruction { instruction } => {
serai_abi::coins::Call::burn_with_instruction { instruction }
}
_ => Err(())?,
}),
RuntimeCall::LiquidityTokens(call) => Call::LiquidityTokens(match call {
coins::Call::transfer { to, balance } => {
serai_abi::coins::LiquidityTokensCall::transfer { to: to.into(), balance }
}
coins::Call::burn { balance } => serai_abi::coins::LiquidityTokensCall::burn { balance },
_ => Err(())?,
}),
RuntimeCall::Dex(call) => Call::Dex(match call {
dex::Call::add_liquidity {
coin,
coin_desired,
sri_desired,
coin_min,
sri_min,
mint_to,
} => serai_abi::dex::Call::add_liquidity {
coin,
coin_desired,
sri_desired,
coin_min,
sri_min,
mint_to: mint_to.into(),
},
dex::Call::remove_liquidity {
coin,
lp_token_burn,
coin_min_receive,
sri_min_receive,
withdraw_to,
} => serai_abi::dex::Call::remove_liquidity {
coin,
lp_token_burn,
coin_min_receive,
sri_min_receive,
withdraw_to: withdraw_to.into(),
},
dex::Call::swap_exact_tokens_for_tokens { path, amount_in, amount_out_min, send_to } => {
serai_abi::dex::Call::swap_exact_tokens_for_tokens {
path,
amount_in,
amount_out_min,
send_to: send_to.into(),
}
}
dex::Call::swap_tokens_for_exact_tokens { path, amount_out, amount_in_max, send_to } => {
serai_abi::dex::Call::swap_tokens_for_exact_tokens {
path,
amount_out,
amount_in_max,
send_to: send_to.into(),
}
}
_ => Err(())?,
}),
RuntimeCall::ValidatorSets(call) => Call::ValidatorSets(match call {
validator_sets::Call::set_keys { network, removed_participants, key_pair, signature } => {
serai_abi::validator_sets::Call::set_keys {
network,
removed_participants: <_>::try_from(
removed_participants.into_iter().map(SeraiAddress::from).collect::<Vec<_>>(),
)
.unwrap(),
key_pair,
signature,
}
}
validator_sets::Call::report_slashes { network, slashes, signature } => {
serai_abi::validator_sets::Call::report_slashes {
network,
slashes: <_>::try_from(
slashes
.into_iter()
.map(|(addr, slash)| (SeraiAddress::from(addr), slash))
.collect::<Vec<_>>(),
)
.unwrap(),
signature,
}
}
validator_sets::Call::allocate { network, amount } => {
serai_abi::validator_sets::Call::allocate { network, amount }
}
validator_sets::Call::deallocate { network, amount } => {
serai_abi::validator_sets::Call::deallocate { network, amount }
}
validator_sets::Call::claim_deallocation { network, session } => {
serai_abi::validator_sets::Call::claim_deallocation { network, session }
}
_ => Err(())?,
}),
RuntimeCall::InInstructions(call) => Call::InInstructions(match call {
in_instructions::Call::execute_batch { batch } => {
serai_abi::in_instructions::Call::execute_batch { batch }
}
_ => Err(())?,
}),
RuntimeCall::Signals(call) => Call::Signals(match call {
signals::Call::register_retirement_signal { in_favor_of } => {
serai_abi::signals::Call::register_retirement_signal { in_favor_of }
}
signals::Call::revoke_retirement_signal { retirement_signal_id } => {
serai_abi::signals::Call::revoke_retirement_signal { retirement_signal_id }
}
signals::Call::favor { signal_id, for_network } => {
serai_abi::signals::Call::favor { signal_id, for_network }
}
signals::Call::revoke_favor { signal_id, for_network } => {
serai_abi::signals::Call::revoke_favor { signal_id, for_network }
}
signals::Call::stand_against { signal_id, for_network } => {
serai_abi::signals::Call::stand_against { signal_id, for_network }
}
_ => Err(())?,
}),
RuntimeCall::Babe(call) => Call::Babe(match call {
babe::Call::report_equivocation { equivocation_proof, key_owner_proof } => {
serai_abi::babe::Call::report_equivocation(serai_abi::babe::ReportEquivocation {
// TODO: Find a better way to go from Proof<H256> to Proof<[u8; 32]>
equivocation_proof: <_>::decode(&mut equivocation_proof.encode().as_slice()).unwrap(),
key_owner_proof: key_owner_proof.0.into(),
})
}
babe::Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } => {
serai_abi::babe::Call::report_equivocation_unsigned(serai_abi::babe::ReportEquivocation {
// TODO: Find a better way to go from Proof<H256> to Proof<[u8; 32]>
equivocation_proof: <_>::decode(&mut equivocation_proof.encode().as_slice()).unwrap(),
key_owner_proof: key_owner_proof.0.into(),
})
}
_ => Err(())?,
}),
RuntimeCall::Grandpa(call) => Call::Grandpa(match call {
grandpa::Call::report_equivocation { equivocation_proof, key_owner_proof } => {
serai_abi::grandpa::Call::report_equivocation(serai_abi::grandpa::ReportEquivocation {
// TODO: Find a better way to go from Proof<H256> to Proof<[u8; 32]>
equivocation_proof: <_>::decode(&mut equivocation_proof.encode().as_slice()).unwrap(),
key_owner_proof: key_owner_proof.0.into(),
})
}
grandpa::Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } => {
serai_abi::grandpa::Call::report_equivocation_unsigned(
serai_abi::grandpa::ReportEquivocation {
// TODO: Find a better way to go from Proof<H256> to Proof<[u8; 32]>
equivocation_proof: <_>::decode(&mut equivocation_proof.encode().as_slice()).unwrap(),
key_owner_proof: key_owner_proof.0.into(),
},
)
}
_ => Err(())?,
}),
_ => Err(())?,
})
}
}

View File

@@ -64,6 +64,8 @@ use sp_authority_discovery::AuthorityId as AuthorityDiscoveryId;
use babe::AuthorityId as BabeId; use babe::AuthorityId as BabeId;
use grandpa::AuthorityId as GrandpaId; use grandpa::AuthorityId as GrandpaId;
mod abi;
/// Nonce of a transaction in the chain, for a given account. /// Nonce of a transaction in the chain, for a given account.
pub type Nonce = u32; pub type Nonce = u32;
@@ -81,7 +83,7 @@ pub type SignedExtra = (
transaction_payment::ChargeTransactionPayment<Runtime>, transaction_payment::ChargeTransactionPayment<Runtime>,
); );
pub type Transaction = serai_primitives::Transaction<RuntimeCall, SignedExtra>; pub type Transaction = serai_abi::tx::Transaction<RuntimeCall, SignedExtra>;
pub type Block = generic::Block<Header, Transaction>; pub type Block = generic::Block<Header, Transaction>;
pub type BlockId = generic::BlockId<Block>; pub type BlockId = generic::BlockId<Block>;
@@ -161,35 +163,9 @@ parameter_types! {
pub struct CallFilter; pub struct CallFilter;
impl Contains<RuntimeCall> for CallFilter { impl Contains<RuntimeCall> for CallFilter {
fn contains(call: &RuntimeCall) -> bool { fn contains(call: &RuntimeCall) -> bool {
match call { // If the call is defined in our ABI, it's allowed
RuntimeCall::Timestamp(call) => match call { let call: Result<serai_abi::Call, ()> = call.clone().try_into();
timestamp::Call::set { .. } => true, call.is_ok()
timestamp::Call::__Ignore(_, _) => false,
},
// All of these pallets are our own, and all of their written calls are intended to be called
RuntimeCall::Coins(call) => !matches!(call, coins::Call::__Ignore(_, _)),
RuntimeCall::LiquidityTokens(call) => match call {
coins::Call::transfer { .. } | coins::Call::burn { .. } => true,
coins::Call::burn_with_instruction { .. } | coins::Call::__Ignore(_, _) => false,
},
RuntimeCall::Dex(call) => !matches!(call, dex::Call::__Ignore(_, _)),
RuntimeCall::ValidatorSets(call) => !matches!(call, validator_sets::Call::__Ignore(_, _)),
RuntimeCall::InInstructions(call) => !matches!(call, in_instructions::Call::__Ignore(_, _)),
RuntimeCall::Signals(call) => !matches!(call, signals::Call::__Ignore(_, _)),
RuntimeCall::Babe(call) => match call {
babe::Call::report_equivocation { .. } |
babe::Call::report_equivocation_unsigned { .. } => true,
babe::Call::plan_config_change { .. } | babe::Call::__Ignore(_, _) => false,
},
RuntimeCall::Grandpa(call) => match call {
grandpa::Call::report_equivocation { .. } |
grandpa::Call::report_equivocation_unsigned { .. } => true,
grandpa::Call::note_stalled { .. } | grandpa::Call::__Ignore(_, _) => false,
},
}
} }
} }

View File

@@ -878,7 +878,7 @@ pub mod pallet {
pub fn set_keys( pub fn set_keys(
origin: OriginFor<T>, origin: OriginFor<T>,
network: NetworkId, network: NetworkId,
removed_participants: Vec<Public>, removed_participants: BoundedVec<Public, ConstU32<{ MAX_KEY_SHARES_PER_SET / 3 }>>,
key_pair: KeyPair, key_pair: KeyPair,
signature: Signature, signature: Signature,
) -> DispatchResult { ) -> DispatchResult {

View File

@@ -365,7 +365,7 @@ impl Coordinator {
NetworkId::Ethereum => { NetworkId::Ethereum => {
use ethereum_serai::alloy::{ use ethereum_serai::alloy::{
simple_request_transport::SimpleRequest, simple_request_transport::SimpleRequest,
rpc_types::BlockNumberOrTag, rpc_types::{BlockTransactionsKind, BlockNumberOrTag},
rpc_client::ClientBuilder, rpc_client::ClientBuilder,
provider::{Provider, RootProvider}, provider::{Provider, RootProvider},
network::Ethereum, network::Ethereum,
@@ -375,7 +375,7 @@ impl Coordinator {
ClientBuilder::default().transport(SimpleRequest::new(rpc_url.clone()), true), ClientBuilder::default().transport(SimpleRequest::new(rpc_url.clone()), true),
); );
let start = provider let start = provider
.get_block(BlockNumberOrTag::Latest.into(), false) .get_block(BlockNumberOrTag::Latest.into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()
@@ -386,7 +386,7 @@ impl Coordinator {
provider.raw_request::<_, ()>("anvil_mine".into(), [96]).await.unwrap(); provider.raw_request::<_, ()>("anvil_mine".into(), [96]).await.unwrap();
let end_of_epoch = start + 31; let end_of_epoch = start + 31;
let hash = provider let hash = provider
.get_block(BlockNumberOrTag::Number(end_of_epoch).into(), false) .get_block(BlockNumberOrTag::Number(end_of_epoch).into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()
@@ -468,7 +468,7 @@ impl Coordinator {
NetworkId::Ethereum => { NetworkId::Ethereum => {
use ethereum_serai::alloy::{ use ethereum_serai::alloy::{
simple_request_transport::SimpleRequest, simple_request_transport::SimpleRequest,
rpc_types::BlockNumberOrTag, rpc_types::{BlockTransactionsKind, BlockNumberOrTag},
rpc_client::ClientBuilder, rpc_client::ClientBuilder,
provider::{Provider, RootProvider}, provider::{Provider, RootProvider},
network::Ethereum, network::Ethereum,
@@ -480,7 +480,7 @@ impl Coordinator {
); );
let expected_number = provider let expected_number = provider
.get_block(BlockNumberOrTag::Latest.into(), false) .get_block(BlockNumberOrTag::Latest.into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()
@@ -503,7 +503,7 @@ impl Coordinator {
.unwrap()); .unwrap());
let new_number = provider let new_number = provider
.get_block(BlockNumberOrTag::Latest.into(), false) .get_block(BlockNumberOrTag::Latest.into(), BlockTransactionsKind::Hashes)
.await .await
.unwrap() .unwrap()
.unwrap() .unwrap()