6 Commits

Author SHA1 Message Date
Luke Parker
ada94e8c5d Get all processors to compile again
Requires splitting `serai-cosign` into `serai-cosign` and `serai-cosign-types`
so the processor don't require `serai-client/serai` (not correct yet).
2025-09-02 02:17:10 -04:00
Luke Parker
75240ed327 Update serai-message-queue to the new serai-primitives 2025-09-02 02:17:10 -04:00
Luke Parker
6177cf5c07 Have serai-runtime compile again 2025-09-02 02:17:10 -04:00
Luke Parker
0d38dc96b6 Use serai-primitives, not serai-client, when possible in coordinator/*
Also updates `serai-coordinator-tributary` to prefer `borsh` to SCALE.
2025-09-02 02:17:10 -04:00
Luke Parker
e8094523ff Use borsh instead of SCALE within tendermint-machine, tributary-sdk
Not only does this follow our general practice, the latest SCALE has a
possibly-lossy truncation in its current implementation for `enum`s I'd like to
avoid without simply silencing.
2025-09-02 02:17:09 -04:00
Luke Parker
53a64bc7e2 Update serai-abi, and dependencies, to patch-polkadot-sdk 2025-09-02 02:17:09 -04:00
131 changed files with 2208 additions and 1121 deletions

View File

@@ -61,6 +61,7 @@ jobs:
-p serai-monero-processor \ -p serai-monero-processor \
-p tendermint-machine \ -p tendermint-machine \
-p tributary-sdk \ -p tributary-sdk \
-p serai-cosign-types \
-p serai-cosign \ -p serai-cosign \
-p serai-coordinator-substrate \ -p serai-coordinator-substrate \
-p serai-coordinator-tributary \ -p serai-coordinator-tributary \

1799
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -82,6 +82,7 @@ members = [
"coordinator/tributary-sdk/tendermint", "coordinator/tributary-sdk/tendermint",
"coordinator/tributary-sdk", "coordinator/tributary-sdk",
"coordinator/cosign/types",
"coordinator/cosign", "coordinator/cosign",
"coordinator/substrate", "coordinator/substrate",
"coordinator/tributary", "coordinator/tributary",

View File

@@ -31,3 +31,5 @@ tokio = { version = "1", default-features = false }
serai-db = { path = "../../common/db", version = "0.1.1" } serai-db = { path = "../../common/db", version = "0.1.1" }
serai-task = { path = "../../common/task", version = "0.1" } serai-task = { path = "../../common/task", version = "0.1" }
serai-cosign-types = { path = "./types" }

View File

@@ -19,6 +19,8 @@ use serai_client::{
use serai_db::*; use serai_db::*;
use serai_task::*; use serai_task::*;
use serai_cosign_types::*;
/// The cosigns which are intended to be performed. /// The cosigns which are intended to be performed.
mod intend; mod intend;
/// The evaluator of the cosigns. /// The evaluator of the cosigns.

View File

@@ -0,0 +1,25 @@
[package]
name = "serai-cosign-types"
version = "0.1.0"
description = "Evaluator of cosigns for the Serai network"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coordinator/cosign"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
publish = false
rust-version = "1.85"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
schnorrkel = { version = "0.11", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-primitives = { path = "../../../substrate/primitives", default-features = false, features = ["std"] }

View File

@@ -0,0 +1,15 @@
AGPL-3.0-only license
Copyright (c) 2023-2025 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@@ -0,0 +1,72 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![deny(missing_docs)]
//! Types used when cosigning Serai. For more info, please see `serai-cosign`.
use borsh::{BorshSerialize, BorshDeserialize};
use serai_primitives::{crypto::Public, network_id::ExternalNetworkId};
/// The schnorrkel context to used when signing a cosign.
pub const COSIGN_CONTEXT: &[u8] = b"/serai/coordinator/cosign";
/// An intended cosign.
#[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub struct CosignIntent {
/// The global session this cosign is being performed under.
pub global_session: [u8; 32],
/// The number of the block to cosign.
pub block_number: u64,
/// The hash of the block to cosign.
pub block_hash: [u8; 32],
/// If this cosign must be handled before further cosigns are.
pub notable: bool,
}
/// A cosign.
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub struct Cosign {
/// The global session this cosign is being performed under.
pub global_session: [u8; 32],
/// The number of the block to cosign.
pub block_number: u64,
/// The hash of the block to cosign.
pub block_hash: [u8; 32],
/// The actual cosigner.
pub cosigner: ExternalNetworkId,
}
impl CosignIntent {
/// Convert this into a `Cosign`.
pub fn into_cosign(self, cosigner: ExternalNetworkId) -> Cosign {
let CosignIntent { global_session, block_number, block_hash, notable: _ } = self;
Cosign { global_session, block_number, block_hash, cosigner }
}
}
impl Cosign {
/// The message to sign to sign this cosign.
///
/// This must be signed with schnorrkel, the context set to `COSIGN_CONTEXT`.
pub fn signature_message(&self) -> Vec<u8> {
// We use a schnorrkel context to domain-separate this
borsh::to_vec(self).unwrap()
}
}
/// A signed cosign.
#[derive(Clone, Debug, BorshSerialize, BorshDeserialize)]
pub struct SignedCosign {
/// The cosign.
pub cosign: Cosign,
/// The signature for the cosign.
pub signature: [u8; 64],
}
impl SignedCosign {
/// Verify a cosign's signature.
pub fn verify_signature(&self, signer: Public) -> bool {
let Ok(signer) = schnorrkel::PublicKey::from_bytes(&signer.0) else { return false };
let Ok(signature) = schnorrkel::Signature::from_bytes(&self.signature) else { return false };
signer.verify_simple(COSIGN_CONTEXT, &self.cosign.signature_message(), &signature).is_ok()
}
}

View File

@@ -22,7 +22,7 @@ borsh = { version = "1", default-features = false, features = ["std", "derive",
serai-db = { path = "../../common/db", version = "0.1" } serai-db = { path = "../../common/db", version = "0.1" }
serai-client = { path = "../../substrate/client", default-features = false, features = ["serai"] } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
serai-cosign = { path = "../cosign" } serai-cosign = { path = "../cosign" }
tributary-sdk = { path = "../tributary-sdk" } tributary-sdk = { path = "../tributary-sdk" }

View File

@@ -1,7 +1,7 @@
use core::future::Future; use core::future::Future;
use std::time::{Duration, SystemTime}; use std::time::{Duration, SystemTime};
use serai_client::validator_sets::primitives::{MAX_KEY_SHARES_PER_SET, ExternalValidatorSet}; use serai_primitives::{MAX_KEY_SHARES_PER_SET, ExternalValidatorSet};
use futures_lite::FutureExt; use futures_lite::FutureExt;

View File

@@ -7,7 +7,7 @@ use std::collections::HashMap;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_client::{primitives::ExternalNetworkId, validator_sets::primitives::ExternalValidatorSet}; use serai_primitives::{network_id::ExternalNetworkId, validator_sets::ExternalValidatorSet};
use serai_db::Db; use serai_db::Db;
use tributary_sdk::{ReadWrite, TransactionTrait, Tributary, TributaryReader}; use tributary_sdk::{ReadWrite, TransactionTrait, Tributary, TributaryReader};

View File

@@ -36,7 +36,7 @@ log = { version = "0.4", default-features = false, features = ["std"] }
serai-db = { path = "../../common/db", version = "0.1" } serai-db = { path = "../../common/db", version = "0.1" }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
futures-util = { version = "0.3", default-features = false, features = ["std", "sink", "channel"] } futures-util = { version = "0.3", default-features = false, features = ["std", "sink", "channel"] }
futures-channel = { version = "0.3", default-features = false, features = ["std", "sink"] } futures-channel = { version = "0.3", default-features = false, features = ["std", "sink"] }
tendermint = { package = "tendermint-machine", path = "./tendermint", version = "0.2" } tendermint = { package = "tendermint-machine", path = "./tendermint", version = "0.2" }

View File

@@ -5,7 +5,7 @@ use ciphersuite::{group::GroupEncoding, Ciphersuite};
use serai_db::{Get, DbTxn, Db}; use serai_db::{Get, DbTxn, Db};
use scale::Decode; use borsh::BorshDeserialize;
use tendermint::ext::{Network, Commit}; use tendermint::ext::{Network, Commit};
@@ -62,7 +62,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
D::key( D::key(
b"tributary_blockchain", b"tributary_blockchain",
b"next_nonce", b"next_nonce",
[genesis.as_ref(), signer.to_bytes().as_ref(), order].concat(), [genesis.as_slice(), signer.to_bytes().as_slice(), order].concat(),
) )
} }
@@ -106,7 +106,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
pub(crate) fn block_from_db(db: &D, genesis: [u8; 32], block: &[u8; 32]) -> Option<Block<T>> { pub(crate) fn block_from_db(db: &D, genesis: [u8; 32], block: &[u8; 32]) -> Option<Block<T>> {
db.get(Self::block_key(&genesis, block)) db.get(Self::block_key(&genesis, block))
.map(|bytes| Block::<T>::read::<&[u8]>(&mut bytes.as_ref()).unwrap()) .map(|bytes| Block::<T>::read::<&[u8]>(&mut bytes.as_slice()).unwrap())
} }
pub(crate) fn commit_from_db(db: &D, genesis: [u8; 32], block: &[u8; 32]) -> Option<Vec<u8>> { pub(crate) fn commit_from_db(db: &D, genesis: [u8; 32], block: &[u8; 32]) -> Option<Vec<u8>> {
@@ -166,7 +166,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
// we must have a commit per valid hash // we must have a commit per valid hash
let commit = Self::commit_from_db(db, genesis, &hash).unwrap(); let commit = Self::commit_from_db(db, genesis, &hash).unwrap();
// commit has to be valid if it is coming from our db // commit has to be valid if it is coming from our db
Some(Commit::<N::SignatureScheme>::decode(&mut commit.as_ref()).unwrap()) Some(Commit::<N::SignatureScheme>::deserialize_reader(&mut commit.as_slice()).unwrap())
}; };
let unsigned_in_chain = let unsigned_in_chain =
|hash: [u8; 32]| db.get(Self::unsigned_included_key(&self.genesis, &hash)).is_some(); |hash: [u8; 32]| db.get(Self::unsigned_included_key(&self.genesis, &hash)).is_some();
@@ -241,7 +241,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
let commit = |block: u64| -> Option<Commit<N::SignatureScheme>> { let commit = |block: u64| -> Option<Commit<N::SignatureScheme>> {
let commit = self.commit_by_block_number(block)?; let commit = self.commit_by_block_number(block)?;
// commit has to be valid if it is coming from our db // commit has to be valid if it is coming from our db
Some(Commit::<N::SignatureScheme>::decode(&mut commit.as_ref()).unwrap()) Some(Commit::<N::SignatureScheme>::deserialize_reader(&mut commit.as_slice()).unwrap())
}; };
let mut txn_db = db.clone(); let mut txn_db = db.clone();

View File

@@ -3,10 +3,11 @@ use std::{sync::Arc, io};
use zeroize::Zeroizing; use zeroize::Zeroizing;
use borsh::BorshDeserialize;
use ciphersuite::Ciphersuite; use ciphersuite::Ciphersuite;
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use scale::Decode;
use futures_channel::mpsc::UnboundedReceiver; use futures_channel::mpsc::UnboundedReceiver;
use futures_util::{StreamExt, SinkExt}; use futures_util::{StreamExt, SinkExt};
use ::tendermint::{ use ::tendermint::{
@@ -177,7 +178,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Tributary<D, T, P> {
let block_number = BlockNumber(blockchain.block_number()); let block_number = BlockNumber(blockchain.block_number());
let start_time = if let Some(commit) = blockchain.commit(&blockchain.tip()) { let start_time = if let Some(commit) = blockchain.commit(&blockchain.tip()) {
Commit::<Validators>::decode(&mut commit.as_ref()).unwrap().end_time Commit::<Validators>::deserialize_reader(&mut commit.as_slice()).unwrap().end_time
} else { } else {
start_time start_time
}; };
@@ -276,8 +277,8 @@ impl<D: Db, T: TransactionTrait, P: P2p> Tributary<D, T, P> {
} }
let block = TendermintBlock(block.serialize()); let block = TendermintBlock(block.serialize());
let mut commit_ref = commit.as_ref(); let mut commit_ref = commit.as_slice();
let Ok(commit) = Commit::<Arc<Validators>>::decode(&mut commit_ref) else { let Ok(commit) = Commit::<Arc<Validators>>::deserialize_reader(&mut commit_ref) else {
log::error!("sent an invalidly serialized commit"); log::error!("sent an invalidly serialized commit");
return false; return false;
}; };
@@ -327,7 +328,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Tributary<D, T, P> {
Some(&TENDERMINT_MESSAGE) => { Some(&TENDERMINT_MESSAGE) => {
let Ok(msg) = let Ok(msg) =
SignedMessageFor::<TendermintNetwork<D, T, P>>::decode::<&[u8]>(&mut &msg[1 ..]) SignedMessageFor::<TendermintNetwork<D, T, P>>::deserialize_reader(&mut &msg[1 ..])
else { else {
log::error!("received invalid tendermint message"); log::error!("received invalid tendermint message");
return false; return false;
@@ -367,15 +368,17 @@ impl<D: Db, T: TransactionTrait> TributaryReader<D, T> {
Blockchain::<D, T>::commit_from_db(&self.0, self.1, hash) Blockchain::<D, T>::commit_from_db(&self.0, self.1, hash)
} }
pub fn parsed_commit(&self, hash: &[u8; 32]) -> Option<Commit<Validators>> { pub fn parsed_commit(&self, hash: &[u8; 32]) -> Option<Commit<Validators>> {
self.commit(hash).map(|commit| Commit::<Validators>::decode(&mut commit.as_ref()).unwrap()) self
.commit(hash)
.map(|commit| Commit::<Validators>::deserialize_reader(&mut commit.as_slice()).unwrap())
} }
pub fn block_after(&self, hash: &[u8; 32]) -> Option<[u8; 32]> { pub fn block_after(&self, hash: &[u8; 32]) -> Option<[u8; 32]> {
Blockchain::<D, T>::block_after(&self.0, self.1, hash) Blockchain::<D, T>::block_after(&self.0, self.1, hash)
} }
pub fn time_of_block(&self, hash: &[u8; 32]) -> Option<u64> { pub fn time_of_block(&self, hash: &[u8; 32]) -> Option<u64> {
self self.commit(hash).map(|commit| {
.commit(hash) Commit::<Validators>::deserialize_reader(&mut commit.as_slice()).unwrap().end_time
.map(|commit| Commit::<Validators>::decode(&mut commit.as_ref()).unwrap().end_time) })
} }
pub fn locally_provided_txs_in_block(&self, hash: &[u8; 32], order: &str) -> bool { pub fn locally_provided_txs_in_block(&self, hash: &[u8; 32], order: &str) -> bool {

View File

@@ -24,7 +24,7 @@ use schnorr::{
use serai_db::Db; use serai_db::Db;
use scale::{Encode, Decode}; use borsh::{BorshSerialize, BorshDeserialize};
use tendermint::{ use tendermint::{
SignedMessageFor, SignedMessageFor,
ext::{ ext::{
@@ -249,7 +249,7 @@ impl Weights for Validators {
} }
} }
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] #[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub struct TendermintBlock(pub Vec<u8>); pub struct TendermintBlock(pub Vec<u8>);
impl BlockTrait for TendermintBlock { impl BlockTrait for TendermintBlock {
type Id = [u8; 32]; type Id = [u8; 32];
@@ -301,7 +301,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Network for TendermintNetwork<D, T, P>
fn broadcast(&mut self, msg: SignedMessageFor<Self>) -> impl Send + Future<Output = ()> { fn broadcast(&mut self, msg: SignedMessageFor<Self>) -> impl Send + Future<Output = ()> {
async move { async move {
let mut to_broadcast = vec![TENDERMINT_MESSAGE]; let mut to_broadcast = vec![TENDERMINT_MESSAGE];
to_broadcast.extend(msg.encode()); msg.serialize(&mut to_broadcast).unwrap();
self.p2p.broadcast(self.genesis, to_broadcast).await self.p2p.broadcast(self.genesis, to_broadcast).await
} }
} }
@@ -391,7 +391,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Network for TendermintNetwork<D, T, P>
return invalid_block(); return invalid_block();
}; };
let encoded_commit = commit.encode(); let encoded_commit = borsh::to_vec(&commit).unwrap();
loop { loop {
let block_res = self.blockchain.write().await.add_block::<Self>( let block_res = self.blockchain.write().await.add_block::<Self>(
&block, &block,

View File

@@ -1,6 +1,6 @@
use std::io; use std::io;
use scale::{Encode, Decode, IoReader}; use borsh::BorshDeserialize;
use blake2::{Digest, Blake2s256}; use blake2::{Digest, Blake2s256};
@@ -27,14 +27,14 @@ pub enum TendermintTx {
impl ReadWrite for TendermintTx { impl ReadWrite for TendermintTx {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> { fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
Evidence::decode(&mut IoReader(reader)) Evidence::deserialize_reader(reader)
.map(TendermintTx::SlashEvidence) .map(TendermintTx::SlashEvidence)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "invalid evidence format")) .map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "invalid evidence format"))
} }
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> { fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self { match self {
TendermintTx::SlashEvidence(ev) => writer.write_all(&ev.encode()), TendermintTx::SlashEvidence(ev) => writer.write_all(&borsh::to_vec(&ev).unwrap()),
} }
} }
} }

View File

@@ -13,8 +13,6 @@ use ciphersuite::{
}; };
use schnorr::SchnorrSignature; use schnorr::SchnorrSignature;
use scale::Encode;
use ::tendermint::{ use ::tendermint::{
ext::{Network, Signer as SignerTrait, SignatureScheme, BlockNumber, RoundNumber}, ext::{Network, Signer as SignerTrait, SignatureScheme, BlockNumber, RoundNumber},
SignedMessageFor, DataFor, Message, SignedMessage, Data, Evidence, SignedMessageFor, DataFor, Message, SignedMessage, Data, Evidence,
@@ -204,7 +202,7 @@ pub async fn signed_from_data<N: Network>(
round: RoundNumber(round_number), round: RoundNumber(round_number),
data, data,
}; };
let sig = signer.sign(&msg.encode()).await; let sig = signer.sign(&borsh::to_vec(&msg).unwrap()).await;
SignedMessage { msg, sig } SignedMessage { msg, sig }
} }
@@ -217,5 +215,5 @@ pub async fn random_evidence_tx<N: Network>(
let data = Data::Proposal(Some(RoundNumber(0)), b); let data = Data::Proposal(Some(RoundNumber(0)), b);
let signer_id = signer.validator_id().await.unwrap(); let signer_id = signer.validator_id().await.unwrap();
let signed = signed_from_data::<N>(signer, signer_id, 0, 0, data).await; let signed = signed_from_data::<N>(signer, signer_id, 0, 0, data).await;
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(signed.encode())) TendermintTx::SlashEvidence(Evidence::InvalidValidRound(borsh::to_vec(&signed).unwrap()))
} }

View File

@@ -6,8 +6,6 @@ use rand::{RngCore, rngs::OsRng};
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use ciphersuite::{Ciphersuite, group::ff::Field}; use ciphersuite::{Ciphersuite, group::ff::Field};
use scale::Encode;
use tendermint::{ use tendermint::{
time::CanonicalInstant, time::CanonicalInstant,
round::RoundData, round::RoundData,
@@ -52,7 +50,10 @@ async fn invalid_valid_round() {
async move { async move {
let data = Data::Proposal(valid_round, TendermintBlock(vec![])); let data = Data::Proposal(valid_round, TendermintBlock(vec![]));
let signed = signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, data).await; let signed = signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, data).await;
(signed.clone(), TendermintTx::SlashEvidence(Evidence::InvalidValidRound(signed.encode()))) (
signed.clone(),
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(borsh::to_vec(&signed).unwrap())),
)
} }
}; };
@@ -70,7 +71,8 @@ async fn invalid_valid_round() {
let mut random_sig = [0u8; 64]; let mut random_sig = [0u8; 64];
OsRng.fill_bytes(&mut random_sig); OsRng.fill_bytes(&mut random_sig);
signed.sig = random_sig; signed.sig = random_sig;
let tx = TendermintTx::SlashEvidence(Evidence::InvalidValidRound(signed.encode())); let tx =
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(borsh::to_vec(&signed).unwrap()));
// should fail // should fail
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err()); assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
@@ -90,7 +92,10 @@ async fn invalid_precommit_signature() {
let signed = let signed =
signed_from_data::<N>(signer.clone().into(), signer_id, 1, 0, Data::Precommit(precommit)) signed_from_data::<N>(signer.clone().into(), signer_id, 1, 0, Data::Precommit(precommit))
.await; .await;
(signed.clone(), TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(signed.encode()))) (
signed.clone(),
TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(borsh::to_vec(&signed).unwrap())),
)
} }
}; };
@@ -120,7 +125,8 @@ async fn invalid_precommit_signature() {
let mut random_sig = [0u8; 64]; let mut random_sig = [0u8; 64];
OsRng.fill_bytes(&mut random_sig); OsRng.fill_bytes(&mut random_sig);
signed.sig = random_sig; signed.sig = random_sig;
let tx = TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(signed.encode())); let tx =
TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(borsh::to_vec(&signed).unwrap()));
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err()); assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
} }
} }
@@ -138,24 +144,32 @@ async fn evidence_with_prevote() {
// it should fail for all reasons. // it should fail for all reasons.
let mut txs = vec![]; let mut txs = vec![];
txs.push(TendermintTx::SlashEvidence(Evidence::InvalidPrecommit( txs.push(TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id)) borsh::to_vec(
.await &&signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
.encode(), .await,
)
.unwrap(),
))); )));
txs.push(TendermintTx::SlashEvidence(Evidence::InvalidValidRound( txs.push(TendermintTx::SlashEvidence(Evidence::InvalidValidRound(
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id)) borsh::to_vec(
.await &signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
.encode(), .await,
)
.unwrap(),
))); )));
// Since these require a second message, provide this one again // Since these require a second message, provide this one again
// ConflictingMessages can be fired for actually conflicting Prevotes however // ConflictingMessages can be fired for actually conflicting Prevotes however
txs.push(TendermintTx::SlashEvidence(Evidence::ConflictingMessages( txs.push(TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id)) borsh::to_vec(
.await &signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
.encode(), .await,
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id)) )
.await .unwrap(),
.encode(), borsh::to_vec(
&signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
.await,
)
.unwrap(),
))); )));
txs txs
} }
@@ -189,16 +203,16 @@ async fn conflicting_msgs_evidence_tx() {
// non-conflicting data should fail // non-conflicting data should fail
let signed_1 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![0x11]))).await; let signed_1 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![0x11]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
)); ));
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err()); assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
// conflicting data should pass // conflicting data should pass
let signed_2 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![0x22]))).await; let signed_2 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![0x22]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap(); verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap();
@@ -206,16 +220,16 @@ async fn conflicting_msgs_evidence_tx() {
// (except for Precommit) // (except for Precommit)
let signed_2 = signed_for_b_r(0, 1, Data::Proposal(None, TendermintBlock(vec![0x22]))).await; let signed_2 = signed_for_b_r(0, 1, Data::Proposal(None, TendermintBlock(vec![0x22]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err(); verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
// Proposals for different block numbers should also fail as evidence // Proposals for different block numbers should also fail as evidence
let signed_2 = signed_for_b_r(1, 0, Data::Proposal(None, TendermintBlock(vec![0x22]))).await; let signed_2 = signed_for_b_r(1, 0, Data::Proposal(None, TendermintBlock(vec![0x22]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err(); verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
} }
@@ -225,16 +239,16 @@ async fn conflicting_msgs_evidence_tx() {
// non-conflicting data should fail // non-conflicting data should fail
let signed_1 = signed_for_b_r(0, 0, Data::Prevote(Some([0x11; 32]))).await; let signed_1 = signed_for_b_r(0, 0, Data::Prevote(Some([0x11; 32]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
)); ));
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err()); assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
// conflicting data should pass // conflicting data should pass
let signed_2 = signed_for_b_r(0, 0, Data::Prevote(Some([0x22; 32]))).await; let signed_2 = signed_for_b_r(0, 0, Data::Prevote(Some([0x22; 32]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap(); verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap();
@@ -242,16 +256,16 @@ async fn conflicting_msgs_evidence_tx() {
// (except for Precommit) // (except for Precommit)
let signed_2 = signed_for_b_r(0, 1, Data::Prevote(Some([0x22; 32]))).await; let signed_2 = signed_for_b_r(0, 1, Data::Prevote(Some([0x22; 32]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err(); verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
// Proposals for different block numbers should also fail as evidence // Proposals for different block numbers should also fail as evidence
let signed_2 = signed_for_b_r(1, 0, Data::Prevote(Some([0x22; 32]))).await; let signed_2 = signed_for_b_r(1, 0, Data::Prevote(Some([0x22; 32]))).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err(); verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
} }
@@ -273,8 +287,8 @@ async fn conflicting_msgs_evidence_tx() {
.await; .await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
// update schema so that we don't fail due to invalid signature // update schema so that we don't fail due to invalid signature
@@ -293,8 +307,8 @@ async fn conflicting_msgs_evidence_tx() {
let signed_1 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![]))).await; let signed_1 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![]))).await;
let signed_2 = signed_for_b_r(0, 0, Data::Prevote(None)).await; let signed_2 = signed_for_b_r(0, 0, Data::Prevote(None)).await;
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages( let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
signed_1.encode(), borsh::to_vec(&signed_1).unwrap(),
signed_2.encode(), borsh::to_vec(&signed_2).unwrap(),
)); ));
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err()); assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
} }

View File

@@ -21,7 +21,7 @@ thiserror = { version = "2", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] }
log = { version = "0.4", default-features = false, features = ["std"] } log = { version = "0.4", default-features = false, features = ["std"] }
parity-scale-codec = { version = "3", default-features = false, features = ["std", "derive"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
futures-util = { version = "0.3", default-features = false, features = ["std", "async-await-macro", "sink", "channel"] } futures-util = { version = "0.3", default-features = false, features = ["std", "async-await-macro", "sink", "channel"] }
futures-channel = { version = "0.3", default-features = false, features = ["std", "sink"] } futures-channel = { version = "0.3", default-features = false, features = ["std", "sink"] }

View File

@@ -3,33 +3,41 @@ use std::{sync::Arc, collections::HashSet};
use thiserror::Error; use thiserror::Error;
use parity_scale_codec::{Encode, Decode}; use borsh::{BorshSerialize, BorshDeserialize};
use crate::{SignedMessageFor, SlashEvent, commit_msg}; use crate::{SignedMessageFor, SlashEvent, commit_msg};
/// An alias for a series of traits required for a type to be usable as a validator ID, /// An alias for a series of traits required for a type to be usable as a validator ID,
/// automatically implemented for all types satisfying those traits. /// automatically implemented for all types satisfying those traits.
pub trait ValidatorId: pub trait ValidatorId:
Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + Encode + Decode Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + BorshSerialize + BorshDeserialize
{ {
} }
impl<V: Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + Encode + Decode> ValidatorId #[rustfmt::skip]
for V impl<
V: Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + BorshSerialize + BorshDeserialize,
> ValidatorId for V
{ {
} }
/// An alias for a series of traits required for a type to be usable as a signature, /// An alias for a series of traits required for a type to be usable as a signature,
/// automatically implemented for all types satisfying those traits. /// automatically implemented for all types satisfying those traits.
pub trait Signature: Send + Sync + Clone + PartialEq + Eq + Debug + Encode + Decode {} pub trait Signature:
impl<S: Send + Sync + Clone + PartialEq + Eq + Debug + Encode + Decode> Signature for S {} Send + Sync + Clone + PartialEq + Eq + Debug + BorshSerialize + BorshDeserialize
{
}
impl<S: Send + Sync + Clone + PartialEq + Eq + Debug + BorshSerialize + BorshDeserialize> Signature
for S
{
}
// Type aliases which are distinct according to the type system // Type aliases which are distinct according to the type system
/// A struct containing a Block Number, wrapped to have a distinct type. /// A struct containing a Block Number, wrapped to have a distinct type.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
pub struct BlockNumber(pub u64); pub struct BlockNumber(pub u64);
/// A struct containing a round number, wrapped to have a distinct type. /// A struct containing a round number, wrapped to have a distinct type.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
pub struct RoundNumber(pub u32); pub struct RoundNumber(pub u32);
/// A signer for a validator. /// A signer for a validator.
@@ -127,7 +135,7 @@ impl<S: SignatureScheme> SignatureScheme for Arc<S> {
/// A commit for a specific block. /// A commit for a specific block.
/// ///
/// The list of validators have weight exceeding the threshold for a valid commit. /// The list of validators have weight exceeding the threshold for a valid commit.
#[derive(PartialEq, Debug, Encode, Decode)] #[derive(PartialEq, Debug, BorshSerialize, BorshDeserialize)]
pub struct Commit<S: SignatureScheme> { pub struct Commit<S: SignatureScheme> {
/// End time of the round which created this commit, used as the start time of the next block. /// End time of the round which created this commit, used as the start time of the next block.
pub end_time: u64, pub end_time: u64,
@@ -185,7 +193,7 @@ impl<W: Weights> Weights for Arc<W> {
} }
/// Simplified error enum representing a block's validity. /// Simplified error enum representing a block's validity.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error, Encode, Decode)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Error, BorshSerialize, BorshDeserialize)]
pub enum BlockError { pub enum BlockError {
/// Malformed block which is wholly invalid. /// Malformed block which is wholly invalid.
#[error("invalid block")] #[error("invalid block")]
@@ -197,9 +205,20 @@ pub enum BlockError {
} }
/// Trait representing a Block. /// Trait representing a Block.
pub trait Block: Send + Sync + Clone + PartialEq + Eq + Debug + Encode + Decode { pub trait Block:
Send + Sync + Clone + PartialEq + Eq + Debug + BorshSerialize + BorshDeserialize
{
// Type used to identify blocks. Presumably a cryptographic hash of the block. // Type used to identify blocks. Presumably a cryptographic hash of the block.
type Id: Send + Sync + Copy + Clone + PartialEq + Eq + AsRef<[u8]> + Debug + Encode + Decode; type Id: Send
+ Sync
+ Copy
+ Clone
+ PartialEq
+ Eq
+ AsRef<[u8]>
+ Debug
+ BorshSerialize
+ BorshDeserialize;
/// Return the deterministic, unique ID for this block. /// Return the deterministic, unique ID for this block.
fn id(&self) -> Self::Id; fn id(&self) -> Self::Id;

View File

@@ -6,7 +6,7 @@ use std::{
collections::{VecDeque, HashMap}, collections::{VecDeque, HashMap},
}; };
use parity_scale_codec::{Encode, Decode, IoReader}; use borsh::{BorshSerialize, BorshDeserialize};
use futures_channel::mpsc; use futures_channel::mpsc;
use futures_util::{ use futures_util::{
@@ -41,14 +41,14 @@ pub fn commit_msg(end_time: u64, id: &[u8]) -> Vec<u8> {
[&end_time.to_le_bytes(), id].concat() [&end_time.to_le_bytes(), id].concat()
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
pub enum Step { pub enum Step {
Propose, Propose,
Prevote, Prevote,
Precommit, Precommit,
} }
#[derive(Clone, Eq, Debug, Encode, Decode)] #[derive(Clone, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub enum Data<B: Block, S: Signature> { pub enum Data<B: Block, S: Signature> {
Proposal(Option<RoundNumber>, B), Proposal(Option<RoundNumber>, B),
Prevote(Option<B::Id>), Prevote(Option<B::Id>),
@@ -90,7 +90,7 @@ impl<B: Block, S: Signature> Data<B, S> {
} }
} }
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] #[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub struct Message<V: ValidatorId, B: Block, S: Signature> { pub struct Message<V: ValidatorId, B: Block, S: Signature> {
pub sender: V, pub sender: V,
pub block: BlockNumber, pub block: BlockNumber,
@@ -100,7 +100,7 @@ pub struct Message<V: ValidatorId, B: Block, S: Signature> {
} }
/// A signed Tendermint consensus message to be broadcast to the other validators. /// A signed Tendermint consensus message to be broadcast to the other validators.
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] #[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub struct SignedMessage<V: ValidatorId, B: Block, S: Signature> { pub struct SignedMessage<V: ValidatorId, B: Block, S: Signature> {
pub msg: Message<V, B, S>, pub msg: Message<V, B, S>,
pub sig: S, pub sig: S,
@@ -117,18 +117,18 @@ impl<V: ValidatorId, B: Block, S: Signature> SignedMessage<V, B, S> {
&self, &self,
signer: &Scheme, signer: &Scheme,
) -> bool { ) -> bool {
signer.verify(self.msg.sender, &self.msg.encode(), &self.sig) signer.verify(self.msg.sender, &borsh::to_vec(&self.msg).unwrap(), &self.sig)
} }
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, Decode)] #[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub enum SlashReason { pub enum SlashReason {
FailToPropose, FailToPropose,
InvalidBlock, InvalidBlock,
InvalidProposer, InvalidProposer,
} }
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] #[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub enum Evidence { pub enum Evidence {
ConflictingMessages(Vec<u8>, Vec<u8>), ConflictingMessages(Vec<u8>, Vec<u8>),
InvalidPrecommit(Vec<u8>), InvalidPrecommit(Vec<u8>),
@@ -159,7 +159,7 @@ pub type SignedMessageFor<N> = SignedMessage<
>; >;
pub fn decode_signed_message<N: Network>(mut data: &[u8]) -> Option<SignedMessageFor<N>> { pub fn decode_signed_message<N: Network>(mut data: &[u8]) -> Option<SignedMessageFor<N>> {
SignedMessageFor::<N>::decode(&mut data).ok() SignedMessageFor::<N>::deserialize_reader(&mut data).ok()
} }
fn decode_and_verify_signed_message<N: Network>( fn decode_and_verify_signed_message<N: Network>(
@@ -339,7 +339,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
target: "tendermint", target: "tendermint",
"proposer for block {}, round {round:?} was {} (me: {res})", "proposer for block {}, round {round:?} was {} (me: {res})",
self.block.number.0, self.block.number.0,
hex::encode(proposer.encode()), hex::encode(borsh::to_vec(&proposer).unwrap()),
); );
res res
} }
@@ -420,7 +420,11 @@ impl<N: Network + 'static> TendermintMachine<N> {
// TODO: If the new slash event has evidence, emit to prevent a low-importance slash from // TODO: If the new slash event has evidence, emit to prevent a low-importance slash from
// cancelling emission of high-importance slashes // cancelling emission of high-importance slashes
if !self.block.slashes.contains(&validator) { if !self.block.slashes.contains(&validator) {
log::info!(target: "tendermint", "Slashing validator {}", hex::encode(validator.encode())); log::info!(
target: "tendermint",
"Slashing validator {}",
hex::encode(borsh::to_vec(&validator).unwrap()),
);
self.block.slashes.insert(validator); self.block.slashes.insert(validator);
self.network.slash(validator, slash_event).await; self.network.slash(validator, slash_event).await;
} }
@@ -670,7 +674,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
self self
.slash( .slash(
msg.sender, msg.sender,
SlashEvent::WithEvidence(Evidence::InvalidPrecommit(signed.encode())), SlashEvent::WithEvidence(Evidence::InvalidPrecommit(borsh::to_vec(&signed).unwrap())),
) )
.await; .await;
Err(TendermintError::Malicious)?; Err(TendermintError::Malicious)?;
@@ -741,7 +745,10 @@ impl<N: Network + 'static> TendermintMachine<N> {
self.broadcast(Data::Prevote(None)); self.broadcast(Data::Prevote(None));
} }
self self
.slash(msg.sender, SlashEvent::WithEvidence(Evidence::InvalidValidRound(msg.encode()))) .slash(
msg.sender,
SlashEvent::WithEvidence(Evidence::InvalidValidRound(borsh::to_vec(&msg).unwrap())),
)
.await; .await;
Err(TendermintError::Malicious)?; Err(TendermintError::Malicious)?;
} }
@@ -1032,7 +1039,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
while !messages.is_empty() { while !messages.is_empty() {
self.network.broadcast( self.network.broadcast(
SignedMessageFor::<N>::decode(&mut IoReader(&mut messages)) SignedMessageFor::<N>::deserialize_reader(&mut messages)
.expect("saved invalid message to DB") .expect("saved invalid message to DB")
).await; ).await;
} }
@@ -1057,7 +1064,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
} { } {
if our_message { if our_message {
assert!(sig.is_none()); assert!(sig.is_none());
sig = Some(self.signer.sign(&msg.encode()).await); sig = Some(self.signer.sign(&borsh::to_vec(&msg).unwrap()).await);
} }
let sig = sig.unwrap(); let sig = sig.unwrap();
@@ -1077,7 +1084,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
let message_tape_key = message_tape_key(self.genesis); let message_tape_key = message_tape_key(self.genesis);
let mut txn = self.db.txn(); let mut txn = self.db.txn();
let mut message_tape = txn.get(&message_tape_key).unwrap_or(vec![]); let mut message_tape = txn.get(&message_tape_key).unwrap_or(vec![]);
message_tape.extend(signed_msg.encode()); signed_msg.serialize(&mut message_tape).unwrap();
txn.put(&message_tape_key, message_tape); txn.put(&message_tape_key, message_tape);
txn.commit(); txn.commit();
} }

View File

@@ -1,7 +1,5 @@
use std::{sync::Arc, collections::HashMap}; use std::{sync::Arc, collections::HashMap};
use parity_scale_codec::Encode;
use crate::{ext::*, RoundNumber, Step, DataFor, SignedMessageFor, Evidence}; use crate::{ext::*, RoundNumber, Step, DataFor, SignedMessageFor, Evidence};
type RoundLog<N> = HashMap<<N as Network>::ValidatorId, HashMap<Step, SignedMessageFor<N>>>; type RoundLog<N> = HashMap<<N as Network>::ValidatorId, HashMap<Step, SignedMessageFor<N>>>;
@@ -39,7 +37,10 @@ impl<N: Network> MessageLog<N> {
target: "tendermint", target: "tendermint",
"Validator sent multiple messages for the same block + round + step" "Validator sent multiple messages for the same block + round + step"
); );
Err(Evidence::ConflictingMessages(existing.encode(), signed.encode()))?; Err(Evidence::ConflictingMessages(
borsh::to_vec(&existing).unwrap(),
borsh::to_vec(&signed).unwrap(),
))?;
} }
return Ok(false); return Ok(false);
} }

View File

@@ -4,7 +4,7 @@ use std::{
time::{UNIX_EPOCH, SystemTime, Duration}, time::{UNIX_EPOCH, SystemTime, Duration},
}; };
use parity_scale_codec::{Encode, Decode}; use borsh::{BorshSerialize, BorshDeserialize};
use futures_util::sink::SinkExt; use futures_util::sink::SinkExt;
use tokio::{sync::RwLock, time::sleep}; use tokio::{sync::RwLock, time::sleep};
@@ -89,7 +89,7 @@ impl Weights for TestWeights {
} }
} }
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] #[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
struct TestBlock { struct TestBlock {
id: TestBlockId, id: TestBlockId,
valid: Result<(), BlockError>, valid: Result<(), BlockError>,

View File

@@ -21,7 +21,6 @@ workspace = true
zeroize = { version = "^1.5", default-features = false, features = ["std"] } zeroize = { version = "^1.5", default-features = false, features = ["std"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] } rand_core = { version = "0.6", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
blake2 = { version = "0.11.0-rc.0", default-features = false, features = ["alloc"] } blake2 = { version = "0.11.0-rc.0", default-features = false, features = ["alloc"] }
@@ -30,7 +29,7 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = fals
dkg = { path = "../../crypto/dkg", default-features = false, features = ["std"] } dkg = { path = "../../crypto/dkg", default-features = false, features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../../crypto/schnorr", default-features = false, features = ["std"] } schnorr = { package = "schnorr-signatures", path = "../../crypto/schnorr", default-features = false, features = ["std"] }
serai-client = { path = "../../substrate/client", default-features = false, features = ["serai"] } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
serai-db = { path = "../../common/db" } serai-db = { path = "../../common/db" }
serai-task = { path = "../../common/task", version = "0.1" } serai-task = { path = "../../common/task", version = "0.1" }

View File

@@ -1,9 +1,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use scale::Encode;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_client::{primitives::SeraiAddress, validator_sets::primitives::ExternalValidatorSet}; use serai_primitives::{address::SeraiAddress, validator_sets::primitives::ExternalValidatorSet};
use messages::sign::{VariantSignId, SignId}; use messages::sign::{VariantSignId, SignId};
@@ -14,7 +13,7 @@ use serai_cosign::CosignIntent;
use crate::transaction::SigningProtocolRound; use crate::transaction::SigningProtocolRound;
/// A topic within the database which the group participates in /// A topic within the database which the group participates in
#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, BorshSerialize, BorshDeserialize)] #[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub enum Topic { pub enum Topic {
/// Vote to remove a participant /// Vote to remove a participant
RemoveParticipant { RemoveParticipant {
@@ -123,7 +122,7 @@ impl Topic {
Topic::DkgConfirmation { attempt, round: _ } => Some({ Topic::DkgConfirmation { attempt, round: _ } => Some({
let id = { let id = {
let mut id = [0; 32]; let mut id = [0; 32];
let encoded_set = set.encode(); let encoded_set = borsh::to_vec(set).unwrap();
id[.. encoded_set.len()].copy_from_slice(&encoded_set); id[.. encoded_set.len()].copy_from_slice(&encoded_set);
VariantSignId::Batch(id) VariantSignId::Batch(id)
}; };

View File

@@ -8,9 +8,9 @@ use std::collections::HashMap;
use ciphersuite::group::GroupEncoding; use ciphersuite::group::GroupEncoding;
use dkg::Participant; use dkg::Participant;
use serai_client::{ use serai_primitives::{
primitives::SeraiAddress, address::SeraiAddress,
validator_sets::primitives::{ExternalValidatorSet, Slash}, validator_sets::{ExternalValidatorSet, Slash},
}; };
use serai_db::*; use serai_db::*;

View File

@@ -12,10 +12,9 @@ use ciphersuite::{
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use schnorr::SchnorrSignature; use schnorr::SchnorrSignature;
use scale::Encode;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_client::{primitives::SeraiAddress, validator_sets::primitives::MAX_KEY_SHARES_PER_SET}; use serai_primitives::{addess::SeraiAddress, validator_sets::MAX_KEY_SHARES_PER_SET};
use messages::sign::VariantSignId; use messages::sign::VariantSignId;
@@ -29,7 +28,7 @@ use tributary_sdk::{
use crate::db::Topic; use crate::db::Topic;
/// The round this data is for, within a signing protocol. /// The round this data is for, within a signing protocol.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, BorshSerialize, BorshDeserialize)] #[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
pub enum SigningProtocolRound { pub enum SigningProtocolRound {
/// A preprocess. /// A preprocess.
Preprocess, Preprocess,
@@ -242,19 +241,20 @@ impl TransactionTrait for Transaction {
fn kind(&self) -> TransactionKind { fn kind(&self) -> TransactionKind {
match self { match self {
Transaction::RemoveParticipant { participant, signed } => TransactionKind::Signed( Transaction::RemoveParticipant { participant, signed } => TransactionKind::Signed(
(b"RemoveParticipant", participant).encode(), borsh::to_vec(&(b"RemoveParticipant".as_slice(), participant)).unwrap(),
signed.to_tributary_signed(0), signed.to_tributary_signed(0),
), ),
Transaction::DkgParticipation { signed, .. } => { Transaction::DkgParticipation { signed, .. } => TransactionKind::Signed(
TransactionKind::Signed(b"DkgParticipation".encode(), signed.to_tributary_signed(0)) borsh::to_vec(b"DkgParticipation".as_slice()).unwrap(),
} signed.to_tributary_signed(0),
),
Transaction::DkgConfirmationPreprocess { attempt, signed, .. } => TransactionKind::Signed( Transaction::DkgConfirmationPreprocess { attempt, signed, .. } => TransactionKind::Signed(
(b"DkgConfirmation", attempt).encode(), borsh::to_vec(b"DkgConfirmation".as_slice(), attempt).unwrap(),
signed.to_tributary_signed(0), signed.to_tributary_signed(0),
), ),
Transaction::DkgConfirmationShare { attempt, signed, .. } => TransactionKind::Signed( Transaction::DkgConfirmationShare { attempt, signed, .. } => TransactionKind::Signed(
(b"DkgConfirmation", attempt).encode(), borsh::to_vec(b"DkgConfirmation".as_slice(), attempt).unwrap(),
signed.to_tributary_signed(1), signed.to_tributary_signed(1),
), ),
@@ -264,13 +264,14 @@ impl TransactionTrait for Transaction {
Transaction::Batch { .. } => TransactionKind::Provided("Batch"), Transaction::Batch { .. } => TransactionKind::Provided("Batch"),
Transaction::Sign { id, attempt, round, signed, .. } => TransactionKind::Signed( Transaction::Sign { id, attempt, round, signed, .. } => TransactionKind::Signed(
(b"Sign", id, attempt).encode(), borsh::to_vec(b"Sign".as_slice(), id, attempt).unwrap(),
signed.to_tributary_signed(round.nonce()), signed.to_tributary_signed(round.nonce()),
), ),
Transaction::SlashReport { signed, .. } => { Transaction::SlashReport { signed, .. } => TransactionKind::Signed(
TransactionKind::Signed(b"SlashReport".encode(), signed.to_tributary_signed(0)) borsh::to_vec(b"SlashReport".as_slice()).unwrap(),
} signed.to_tributary_signed(0),
),
} }
} }

View File

@@ -133,5 +133,5 @@ allow-git = [
"https://github.com/kayabaNerve/hybrid-array", "https://github.com/kayabaNerve/hybrid-array",
"https://github.com/kayabaNerve/elliptic-curves", "https://github.com/kayabaNerve/elliptic-curves",
"https://github.com/monero-oxide/monero-oxide", "https://github.com/monero-oxide/monero-oxide",
"https://github.com/serai-dex/polkadot-sdk", "https://github.com/serai-dex/patch-polkadot-sdk",
] ]

View File

@@ -7,7 +7,7 @@ use dalek_ff_group::Ristretto;
pub(crate) use ciphersuite::{group::GroupEncoding, Ciphersuite}; pub(crate) use ciphersuite::{group::GroupEncoding, Ciphersuite};
pub(crate) use schnorr_signatures::SchnorrSignature; pub(crate) use schnorr_signatures::SchnorrSignature;
pub(crate) use serai_primitives::ExternalNetworkId; pub(crate) use serai_primitives::network_id::ExternalNetworkId;
pub(crate) use tokio::{ pub(crate) use tokio::{
io::{AsyncReadExt, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
@@ -198,7 +198,7 @@ async fn main() {
KEYS.write().unwrap().insert(service, key); KEYS.write().unwrap().insert(service, key);
let mut queues = QUEUES.write().unwrap(); let mut queues = QUEUES.write().unwrap();
if service == Service::Coordinator { if service == Service::Coordinator {
for network in serai_primitives::EXTERNAL_NETWORKS { for network in ExternalNetworkId::all() {
queues.insert( queues.insert(
(service, Service::Processor(network)), (service, Service::Processor(network)),
RwLock::new(Queue(db.clone(), service, Service::Processor(network))), RwLock::new(Queue(db.clone(), service, Service::Processor(network))),
@@ -213,12 +213,13 @@ async fn main() {
}; };
// Make queues for each ExternalNetworkId // Make queues for each ExternalNetworkId
for network in serai_primitives::EXTERNAL_NETWORKS { for network in ExternalNetworkId::all() {
// Use a match so we error if the list of NetworkIds changes // Use a match so we error if the list of NetworkIds changes
let Some(key) = read_key(match network { let Some(key) = read_key(match network {
ExternalNetworkId::Bitcoin => "BITCOIN_KEY", ExternalNetworkId::Bitcoin => "BITCOIN_KEY",
ExternalNetworkId::Ethereum => "ETHEREUM_KEY", ExternalNetworkId::Ethereum => "ETHEREUM_KEY",
ExternalNetworkId::Monero => "MONERO_KEY", ExternalNetworkId::Monero => "MONERO_KEY",
_ => panic!("unrecognized network"),
}) else { }) else {
continue; continue;
}; };

View File

@@ -4,7 +4,7 @@ use ciphersuite::{group::GroupEncoding, Ciphersuite};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_primitives::ExternalNetworkId; use serai_primitives::network_id::ExternalNetworkId;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
pub enum Service { pub enum Service {

View File

@@ -20,14 +20,13 @@ workspace = true
zeroize = { version = "1", default-features = false, features = ["std"] } zeroize = { version = "1", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] } ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] }
dkg = { package = "dkg-evrf", path = "../../crypto/dkg/evrf", default-features = false, features = ["std", "ristretto"] } dkg = { package = "dkg-evrf", path = "../../crypto/dkg/evrf", default-features = false, features = ["std", "ristretto"] }
serai-client = { path = "../../substrate/client", default-features = false } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
serai-cosign = { path = "../../coordinator/cosign" } serai-cosign = { package = "serai-cosign-types", path = "../../coordinator/cosign/types" }
log = { version = "0.4", default-features = false, features = ["std"] } log = { version = "0.4", default-features = false, features = ["std"] }
env_logger = { version = "0.10", default-features = false, features = ["humantime"] } env_logger = { version = "0.10", default-features = false, features = ["humantime"] }

View File

@@ -3,10 +3,10 @@ use std::sync::{LazyLock, Arc, Mutex};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use serai_client::{ use serai_primitives::{
primitives::Signature, crypto::Signature,
validator_sets::primitives::{Session, SlashReport}, validator_sets::{Session, SlashReport},
in_instructions::primitives::SignedBatch, instructions::SignedBatch,
}; };
use serai_cosign::SignedCosign; use serai_cosign::SignedCosign;

View File

@@ -8,7 +8,7 @@ use ciphersuite::{
}; };
use dkg::{Curves, Ristretto}; use dkg::{Curves, Ristretto};
use serai_client::validator_sets::primitives::Session; use serai_primitives::validator_sets::Session;
use serai_env as env; use serai_env as env;
use serai_db::{Get, DbTxn, Db as DbTrait, create_db, db_channel}; use serai_db::{Get, DbTxn, Db as DbTrait, create_db, db_channel};

View File

@@ -20,7 +20,6 @@ workspace = true
rand_core = { version = "0.6", default-features = false } rand_core = { version = "0.6", default-features = false }
hex = { version = "0.4", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] } ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] }

View File

@@ -10,12 +10,15 @@ use bitcoin_serai::{
wallet::ReceivedOutput as WalletOutput, wallet::ReceivedOutput as WalletOutput,
}; };
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::Get; use serai_db::Get;
use serai_client::{ use serai_client::{
primitives::{ExternalCoin, Amount, ExternalBalance, ExternalAddress}, primitives::{
coin::ExternalCoin,
balance::{Amount, ExternalBalance},
address::ExternalAddress,
},
networks::bitcoin::Address, networks::bitcoin::Address,
}; };
@@ -26,7 +29,7 @@ use crate::{
scan::{offsets_for_key, presumed_origin, extract_serai_data}, scan::{offsets_for_key, presumed_origin, extract_serai_data},
}; };
#[derive(Clone, PartialEq, Eq, Hash, Debug, Encode, Decode, BorshSerialize, BorshDeserialize)] #[derive(Clone, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
pub(crate) struct OutputId([u8; 36]); pub(crate) struct OutputId([u8; 36]);
impl Default for OutputId { impl Default for OutputId {
fn default() -> Self { fn default() -> Self {
@@ -139,7 +142,7 @@ impl ReceivedOutput<<Secp256k1 as Ciphersuite>::G, Address> for Output {
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> { fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.kind.write(writer)?; self.kind.write(writer)?;
let presumed_origin: Option<ExternalAddress> = self.presumed_origin.clone().map(Into::into); let presumed_origin: Option<ExternalAddress> = self.presumed_origin.clone().map(Into::into);
writer.write_all(&presumed_origin.encode())?; presumed_origin.serialize(writer)?;
self.output.write(writer)?; self.output.write(writer)?;
writer.write_all(&u16::try_from(self.data.len()).unwrap().to_le_bytes())?; writer.write_all(&u16::try_from(self.data.len()).unwrap().to_le_bytes())?;
writer.write_all(&self.data) writer.write_all(&self.data)
@@ -149,7 +152,7 @@ impl ReceivedOutput<<Secp256k1 as Ciphersuite>::G, Address> for Output {
Ok(Output { Ok(Output {
kind: OutputType::read(reader)?, kind: OutputType::read(reader)?,
presumed_origin: { presumed_origin: {
Option::<ExternalAddress>::decode(&mut IoReader(&mut reader)) Option::<ExternalAddress>::deserialize_reader(&mut reader)
.map_err(|e| io::Error::other(format!("couldn't decode ExternalAddress: {e:?}")))? .map_err(|e| io::Error::other(format!("couldn't decode ExternalAddress: {e:?}")))?
.map(|address| { .map(|address| {
Address::try_from(address) Address::try_from(address)

View File

@@ -2,7 +2,7 @@ use core::future::Future;
use bitcoin_serai::rpc::{RpcError, Rpc as BRpc}; use bitcoin_serai::rpc::{RpcError, Rpc as BRpc};
use serai_client::primitives::{ExternalNetworkId, ExternalCoin, Amount}; use serai_client::primitives::{network_id::ExternalNetworkId, coin::ExternalCoin, balance::Amount};
use serai_db::Db; use serai_db::Db;
use scanner::ScannerFeed; use scanner::ScannerFeed;

View File

@@ -9,7 +9,7 @@ use bitcoin_serai::{
}; };
use serai_client::{ use serai_client::{
primitives::{ExternalCoin, Amount}, primitives::{coin::ExternalCoin, balance::Amount},
networks::bitcoin::Address, networks::bitcoin::Address,
}; };

View File

@@ -21,7 +21,6 @@ rand_core = { version = "0.6", default-features = false }
const-hex = { version = "1", default-features = false, features = ["std"] } const-hex = { version = "1", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] } ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] }

View File

@@ -41,7 +41,6 @@ ethereum-primitives = { package = "serai-processor-ethereum-primitives", path =
ethereum-deployer = { package = "serai-processor-ethereum-deployer", path = "../deployer", default-features = false } ethereum-deployer = { package = "serai-processor-ethereum-deployer", path = "../deployer", default-features = false }
erc20 = { package = "serai-processor-ethereum-erc20", path = "../erc20", default-features = false } erc20 = { package = "serai-processor-ethereum-erc20", path = "../erc20", default-features = false }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
serai-client = { path = "../../../substrate/client", default-features = false, features = ["ethereum"] } serai-client = { path = "../../../substrate/client", default-features = false, features = ["ethereum"] }
futures-util = { version = "0.3", default-features = false, features = ["std"] } futures-util = { version = "0.3", default-features = false, features = ["std"] }

View File

@@ -19,7 +19,7 @@ interface IRouterWithoutCollisions {
/// @param from The address which called `inInstruction` and caused this event to be emitted /// @param from The address which called `inInstruction` and caused this event to be emitted
/// @param coin The coin transferred in /// @param coin The coin transferred in
/// @param amount The amount of the coin transferred in /// @param amount The amount of the coin transferred in
/// @param instruction The Shorthand-encoded InInstruction for Serai to decode and handle /// @param instruction The encoded `RefundableInInstruction` for Serai to decode and handle
event InInstruction( event InInstruction(
address indexed from, address indexed coin, uint256 amount, bytes instruction address indexed from, address indexed coin, uint256 amount, bytes instruction
); );
@@ -81,8 +81,8 @@ interface IRouterWithoutCollisions {
/// @param coin The coin to transfer in (address(0) if Ether) /// @param coin The coin to transfer in (address(0) if Ether)
/// @param amount The amount to transfer in (msg.value if Ether) /// @param amount The amount to transfer in (msg.value if Ether)
/** /**
* @param instruction The Shorthand-encoded InInstruction for Serai to associate with this * @param instruction The encoded `RefundableInInstruction` for Serai to associate with this
* transfer in * transfer in
*/ */
// Re-entrancy doesn't bork this function // Re-entrancy doesn't bork this function
// slither-disable-next-line reentrancy-events // slither-disable-next-line reentrancy-events

View File

@@ -293,7 +293,7 @@ contract Router is IRouterWithoutCollisions {
/// @param coin The coin to transfer in (address(0) if Ether) /// @param coin The coin to transfer in (address(0) if Ether)
/// @param amount The amount to transfer in (msg.value if Ether) /// @param amount The amount to transfer in (msg.value if Ether)
/** /**
* @param instruction The Shorthand-encoded InInstruction for Serai to associate with this * @param instruction The encoded `RefundableInInstruction` for Serai to associate with this
* transfer in * transfer in
*/ */
// This function doesn't require nonReentrant as re-entrancy isn't an issue with this function // This function doesn't require nonReentrant as re-entrancy isn't an issue with this function

View File

@@ -21,9 +21,8 @@ use alloy_rpc_types_eth::{BlockId, Log, Filter, TransactionInput, TransactionReq
use alloy_transport::{TransportErrorKind, RpcError}; use alloy_transport::{TransportErrorKind, RpcError};
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
use scale::Encode;
use serai_client::{ use serai_client::{
in_instructions::primitives::Shorthand, networks::ethereum::Address as SeraiAddress, primitives::instructions::RefundableInInstruction, networks::ethereum::Address as SeraiAddress,
}; };
use ethereum_primitives::LogIndex; use ethereum_primitives::LogIndex;
@@ -351,20 +350,29 @@ impl Router {
} }
} }
/// Construct a transaction to send coins with an InInstruction to Serai. /// Construct a transaction to send coins with an `InInstruction` to Serai.
/// ///
/// If coin is an ERC20, this will not create a transaction calling the Router but will create a /// If coin is an ERC20, this will not create a transaction calling the Router but will create a
/// top-level transfer of the ERC20 to the Router. This avoids needing to call `approve` before /// top-level transfer of the ERC20 to the Router. This avoids needing to call `approve` before
/// publishing the transaction calling the Router. /// publishing the transaction calling the Router.
/// ///
/// The gas limit and gas price are not set and are left to the caller. /// The gas limit and gas price are not set and are left to the caller.
pub fn in_instruction(&self, coin: Coin, amount: U256, in_instruction: &Shorthand) -> TxLegacy { pub fn in_instruction(
&self,
coin: Coin,
amount: U256,
in_instruction: &RefundableInInstruction,
) -> TxLegacy {
match coin { match coin {
Coin::Ether => TxLegacy { Coin::Ether => TxLegacy {
to: self.address.into(), to: self.address.into(),
input: abi::inInstructionCall::new((coin.into(), amount, in_instruction.encode().into())) input: abi::inInstructionCall::new((
.abi_encode() coin.into(),
.into(), amount,
borsh::to_vec(&in_instruction).unwrap().into(),
))
.abi_encode()
.into(),
value: amount, value: amount,
..Default::default() ..Default::default()
}, },
@@ -373,7 +381,7 @@ impl Router {
input: erc20::transferWithInInstructionCall::new(( input: erc20::transferWithInInstructionCall::new((
self.address, self.address,
amount, amount,
in_instruction.encode().into(), borsh::to_vec(&in_instruction).unwrap().into(),
)) ))
.abi_encode() .abi_encode()
.into(), .into(),

View File

@@ -5,12 +5,9 @@ use alloy_sol_types::SolCall;
use alloy_consensus::{TxLegacy, Signed}; use alloy_consensus::{TxLegacy, Signed};
use scale::Encode;
use serai_client::{ use serai_client::{
primitives::SeraiAddress, primitives::SeraiAddress,
in_instructions::primitives::{ primitives::instructions::{InInstruction as SeraiInInstruction, RefundableInInstruction},
InInstruction as SeraiInInstruction, RefundableInInstruction, Shorthand,
},
}; };
use ethereum_primitives::LogIndex; use ethereum_primitives::LogIndex;
@@ -18,14 +15,14 @@ use ethereum_primitives::LogIndex;
use crate::{InInstruction, tests::*}; use crate::{InInstruction, tests::*};
impl Test { impl Test {
pub(crate) fn in_instruction() -> Shorthand { pub(crate) fn in_instruction() -> RefundableInInstruction {
Shorthand::Raw(RefundableInInstruction { RefundableInInstruction {
origin: None, origin: None,
instruction: SeraiInInstruction::Transfer(SeraiAddress([0xff; 32])), instruction: SeraiInInstruction::Transfer(SeraiAddress([0xff; 32])),
}) }
} }
pub(crate) fn eth_in_instruction_tx(&self) -> (Coin, U256, Shorthand, TxLegacy) { pub(crate) fn eth_in_instruction_tx(&self) -> (Coin, U256, RefundableInInstruction, TxLegacy) {
let coin = Coin::Ether; let coin = Coin::Ether;
let amount = U256::from(1); let amount = U256::from(1);
let shorthand = Self::in_instruction(); let shorthand = Self::in_instruction();
@@ -42,7 +39,7 @@ impl Test {
tx: Signed<TxLegacy>, tx: Signed<TxLegacy>,
coin: Coin, coin: Coin,
amount: U256, amount: U256,
shorthand: &Shorthand, shorthand: &RefundableInInstruction,
) { ) {
let receipt = ethereum_test_primitives::publish_tx(&self.provider, tx.clone()).await; let receipt = ethereum_test_primitives::publish_tx(&self.provider, tx.clone()).await;
assert!(receipt.status()); assert!(receipt.status());
@@ -81,7 +78,7 @@ impl Test {
from: tx.recover_signer().unwrap(), from: tx.recover_signer().unwrap(),
coin, coin,
amount, amount,
data: shorthand.encode(), data: borsh::to_vec(&shorthand).unwrap(),
} }
); );
} }
@@ -140,9 +137,13 @@ async fn test_erc20_router_in_instruction() {
gas_limit: 1_000_000, gas_limit: 1_000_000,
to: test.router.address().into(), to: test.router.address().into(),
value: U256::ZERO, value: U256::ZERO,
input: crate::abi::inInstructionCall::new((coin.into(), amount, shorthand.encode().into())) input: crate::abi::inInstructionCall::new((
.abi_encode() coin.into(),
.into(), amount,
borsh::to_vec(shorthand).unwrap().into(),
))
.abi_encode()
.into(),
}; };
// If no `approve` was granted, this should fail // If no `approve` was granted, this should fail

View File

@@ -14,7 +14,7 @@ use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder; use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
use serai_client::validator_sets::primitives::Session; use serai_client::primitives::validator_sets::Session;
use serai_env as env; use serai_env as env;
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};

View File

@@ -1,6 +1,6 @@
use alloy_core::primitives::{FixedBytes, Address}; use alloy_core::primitives::{FixedBytes, Address};
use serai_client::primitives::Amount; use serai_client::primitives::balance::Amount;
pub(crate) mod output; pub(crate) mod output;
pub(crate) mod transaction; pub(crate) mod transaction;

View File

@@ -5,11 +5,14 @@ use ciphersuite_kp256::Secp256k1;
use alloy_core::primitives::U256; use alloy_core::primitives::U256;
use scale::{Encode, Decode};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_client::{ use serai_client::{
primitives::{ExternalNetworkId, ExternalCoin, Amount, ExternalBalance}, primitives::{
network_id::ExternalNetworkId,
coin::ExternalCoin,
balance::{Amount, ExternalBalance},
},
networks::ethereum::Address, networks::ethereum::Address,
}; };
@@ -39,9 +42,7 @@ fn amount_to_serai_amount(coin: ExternalCoin, amount: U256) -> Amount {
Amount(u64::try_from(amount / divisor).unwrap()) Amount(u64::try_from(amount / divisor).unwrap())
} }
#[derive( #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode, BorshSerialize, BorshDeserialize,
)]
pub(crate) struct OutputId(pub(crate) [u8; 40]); pub(crate) struct OutputId(pub(crate) [u8; 40]);
impl Default for OutputId { impl Default for OutputId {
fn default() -> Self { fn default() -> Self {

View File

@@ -6,7 +6,7 @@ use alloy_rpc_types_eth::{Header, BlockNumberOrTag};
use alloy_transport::{RpcError, TransportErrorKind}; use alloy_transport::{RpcError, TransportErrorKind};
use alloy_provider::{Provider, RootProvider}; use alloy_provider::{Provider, RootProvider};
use serai_client::primitives::{ExternalNetworkId, ExternalCoin, Amount}; use serai_client::primitives::{network_id::ExternalNetworkId, coin::ExternalCoin, balance::Amount};
use tokio::task::JoinSet; use tokio::task::JoinSet;

View File

@@ -3,7 +3,7 @@ use std::collections::HashMap;
use alloy_core::primitives::U256; use alloy_core::primitives::U256;
use serai_client::{ use serai_client::{
primitives::{ExternalNetworkId, ExternalCoin, ExternalBalance}, primitives::{network_id::ExternalNetworkId, coin::ExternalCoin, balance::ExternalBalance},
networks::ethereum::Address, networks::ethereum::Address,
}; };

View File

@@ -15,7 +15,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-machete] [package.metadata.cargo-machete]
ignored = ["borsh", "scale"] ignored = ["borsh"]
[lints] [lints]
workspace = true workspace = true
@@ -29,7 +29,6 @@ serai-primitives = { path = "../../substrate/primitives", default-features = fal
log = { version = "0.4", default-features = false, features = ["std"] } log = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-db = { path = "../../common/db" } serai-db = { path = "../../common/db" }

View File

@@ -7,7 +7,7 @@ use frost::{
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine},
}; };
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use serai_db::{Get, DbTxn, Db, create_db}; use serai_db::{Get, DbTxn, Db, create_db};
use messages::sign::{VariantSignId, SignId, ProcessorMessage}; use messages::sign::{VariantSignId, SignId, ProcessorMessage};

View File

@@ -6,7 +6,7 @@ use std::collections::HashMap;
use frost::{Participant, sign::PreprocessMachine}; use frost::{Participant, sign::PreprocessMachine};
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};
use messages::sign::{VariantSignId, ProcessorMessage, CoordinatorMessage}; use messages::sign::{VariantSignId, ProcessorMessage, CoordinatorMessage};

View File

@@ -14,9 +14,6 @@ rust-version = "1.89"
all-features = true all-features = true
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-machete]
ignored = ["scale"]
[lints] [lints]
workspace = true workspace = true
@@ -38,7 +35,6 @@ dkg = { package = "dkg-evrf", path = "../../crypto/dkg/evrf", default-features =
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
# Encoders # Encoders
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
# Application # Application

View File

@@ -6,7 +6,7 @@ use zeroize::Zeroizing;
use ciphersuite::{group::GroupEncoding, Ciphersuite}; use ciphersuite::{group::GroupEncoding, Ciphersuite};
use dkg::*; use dkg::*;
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn}; use serai_db::{Get, DbTxn};
@@ -36,7 +36,7 @@ pub(crate) struct Participations {
} }
mod _db { mod _db {
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};

View File

@@ -6,7 +6,7 @@ use std::{
use dkg::*; use dkg::*;
use serai_validator_sets_primitives::MAX_KEY_SHARES_PER_SET; use serai_primitives::constants::MAX_KEY_SHARES_PER_SET;
/// A cache of the generators used by the eVRF DKG. /// A cache of the generators used by the eVRF DKG.
/// ///

View File

@@ -17,7 +17,7 @@ use ciphersuite::{
}; };
use dkg::*; use dkg::*;
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use messages::key_gen::*; use messages::key_gen::*;
use serai_db::{Get, DbTxn}; use serai_db::{Get, DbTxn};

View File

@@ -20,11 +20,10 @@ workspace = true
[dependencies] [dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
dkg = { path = "../../crypto/dkg", default-features = false, features = ["std", "borsh"] } dkg = { path = "../../crypto/dkg", default-features = false, features = ["std", "borsh"] }
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
serai-cosign = { path = "../../coordinator/cosign", default-features = false } serai-cosign = { package = "serai-cosign-types", path = "../../coordinator/cosign/types" }

View File

@@ -1,15 +1,16 @@
use core::fmt; use core::fmt;
use std::collections::HashMap; use std::collections::HashMap;
use scale::{Encode, Decode};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use dkg::Participant; use dkg::Participant;
use serai_primitives::BlockHash; use serai_primitives::{
use validator_sets_primitives::{Session, KeyPair, SlashReport}; BlockHash,
use coins_primitives::OutInstructionWithBalance; crypto::KeyPair,
use in_instructions_primitives::SignedBatch; validator_sets::{Session, SlashReport},
instructions::{SignedBatch, OutInstructionWithBalance},
};
use serai_cosign::{Cosign, SignedCosign}; use serai_cosign::{Cosign, SignedCosign};
@@ -87,7 +88,7 @@ pub mod key_gen {
pub mod sign { pub mod sign {
use super::*; use super::*;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Encode, Decode, BorshSerialize, BorshDeserialize)] #[derive(Clone, Copy, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize)]
pub enum VariantSignId { pub enum VariantSignId {
Cosign(u64), Cosign(u64),
Batch([u8; 32]), Batch([u8; 32]),
@@ -111,9 +112,7 @@ pub mod sign {
} }
} }
#[derive( #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode, BorshSerialize, BorshDeserialize,
)]
pub struct SignId { pub struct SignId {
pub session: Session, pub session: Session,
pub id: VariantSignId, pub id: VariantSignId,
@@ -295,7 +294,7 @@ impl CoordinatorMessage {
let (sub, id) = match msg { let (sub, id) = match msg {
// Unique since we only have one attempt per session // Unique since we only have one attempt per session
key_gen::CoordinatorMessage::GenerateKey { session, .. } => { key_gen::CoordinatorMessage::GenerateKey { session, .. } => {
(0, borsh::to_vec(session).unwrap()) (0, borsh::to_vec(&session).unwrap())
} }
// Unique since one participation per participant per session // Unique since one participation per participant per session
key_gen::CoordinatorMessage::Participation { session, participant, .. } => { key_gen::CoordinatorMessage::Participation { session, participant, .. } => {
@@ -316,17 +315,19 @@ impl CoordinatorMessage {
}; };
let mut res = vec![COORDINATOR_UID, TYPE_SIGN_UID, sub]; let mut res = vec![COORDINATOR_UID, TYPE_SIGN_UID, sub];
res.extend(id.encode()); res.extend(borsh::to_vec(&id).unwrap());
res res
} }
CoordinatorMessage::Coordinator(msg) => { CoordinatorMessage::Coordinator(msg) => {
let (sub, id) = match msg { let (sub, id) = match msg {
// We only cosign a block once, and Reattempt is a separate message // We only cosign a block once, and Reattempt is a separate message
coordinator::CoordinatorMessage::CosignSubstrateBlock { cosign, .. } => { coordinator::CoordinatorMessage::CosignSubstrateBlock { cosign, .. } => {
(0, cosign.block_number.encode()) (0, borsh::to_vec(&cosign.block_number).unwrap())
} }
// We only sign one slash report, and Reattempt is a separate message // We only sign one slash report, and Reattempt is a separate message
coordinator::CoordinatorMessage::SignSlashReport { session, .. } => (1, session.encode()), coordinator::CoordinatorMessage::SignSlashReport { session, .. } => {
(1, borsh::to_vec(&session).unwrap())
}
}; };
let mut res = vec![COORDINATOR_UID, TYPE_COORDINATOR_UID, sub]; let mut res = vec![COORDINATOR_UID, TYPE_COORDINATOR_UID, sub];
@@ -335,10 +336,14 @@ impl CoordinatorMessage {
} }
CoordinatorMessage::Substrate(msg) => { CoordinatorMessage::Substrate(msg) => {
let (sub, id) = match msg { let (sub, id) = match msg {
substrate::CoordinatorMessage::SetKeys { session, .. } => (0, session.encode()), substrate::CoordinatorMessage::SetKeys { session, .. } => {
substrate::CoordinatorMessage::SlashesReported { session } => (1, session.encode()), (0, borsh::to_vec(&session).unwrap())
}
substrate::CoordinatorMessage::SlashesReported { session } => {
(1, borsh::to_vec(&session).unwrap())
}
substrate::CoordinatorMessage::Block { serai_block_number, .. } => { substrate::CoordinatorMessage::Block { serai_block_number, .. } => {
(2, serai_block_number.encode()) (2, borsh::to_vec(&serai_block_number).unwrap())
} }
}; };
@@ -363,10 +368,10 @@ impl ProcessorMessage {
let (sub, id) = match msg { let (sub, id) = match msg {
// Unique since we only have one participation per session (due to no re-attempts) // Unique since we only have one participation per session (due to no re-attempts)
key_gen::ProcessorMessage::Participation { session, .. } => { key_gen::ProcessorMessage::Participation { session, .. } => {
(0, borsh::to_vec(session).unwrap()) (0, borsh::to_vec(&session).unwrap())
} }
key_gen::ProcessorMessage::GeneratedKeyPair { session, .. } => { key_gen::ProcessorMessage::GeneratedKeyPair { session, .. } => {
(1, borsh::to_vec(session).unwrap()) (1, borsh::to_vec(&session).unwrap())
} }
// Unique since we only blame a participant once (as this is fatal) // Unique since we only blame a participant once (as this is fatal)
key_gen::ProcessorMessage::Blame { session, participant } => { key_gen::ProcessorMessage::Blame { session, participant } => {
@@ -382,11 +387,11 @@ impl ProcessorMessage {
let (sub, id) = match msg { let (sub, id) = match msg {
// Unique since we'll only fatally slash a a participant once // Unique since we'll only fatally slash a a participant once
sign::ProcessorMessage::InvalidParticipant { session, participant } => { sign::ProcessorMessage::InvalidParticipant { session, participant } => {
(0, (session, u16::from(*participant)).encode()) (0, borsh::to_vec(&(session, u16::from(*participant))).unwrap())
} }
// Unique since SignId // Unique since SignId
sign::ProcessorMessage::Preprocesses { id, .. } => (1, id.encode()), sign::ProcessorMessage::Preprocesses { id, .. } => (1, borsh::to_vec(&id).unwrap()),
sign::ProcessorMessage::Shares { id, .. } => (2, id.encode()), sign::ProcessorMessage::Shares { id, .. } => (2, borsh::to_vec(&id).unwrap()),
}; };
let mut res = vec![PROCESSOR_UID, TYPE_SIGN_UID, sub]; let mut res = vec![PROCESSOR_UID, TYPE_SIGN_UID, sub];
@@ -396,10 +401,14 @@ impl ProcessorMessage {
ProcessorMessage::Coordinator(msg) => { ProcessorMessage::Coordinator(msg) => {
let (sub, id) = match msg { let (sub, id) = match msg {
coordinator::ProcessorMessage::CosignedBlock { cosign } => { coordinator::ProcessorMessage::CosignedBlock { cosign } => {
(0, cosign.cosign.block_hash.encode()) (0, borsh::to_vec(&cosign.cosign.block_hash).unwrap())
}
coordinator::ProcessorMessage::SignedBatch { batch, .. } => {
(1, borsh::to_vec(&batch.batch.id()).unwrap())
}
coordinator::ProcessorMessage::SignedSlashReport { session, .. } => {
(2, borsh::to_vec(&session).unwrap())
} }
coordinator::ProcessorMessage::SignedBatch { batch, .. } => (1, batch.batch.id.encode()),
coordinator::ProcessorMessage::SignedSlashReport { session, .. } => (2, session.encode()),
}; };
let mut res = vec![PROCESSOR_UID, TYPE_COORDINATOR_UID, sub]; let mut res = vec![PROCESSOR_UID, TYPE_COORDINATOR_UID, sub];
@@ -408,7 +417,9 @@ impl ProcessorMessage {
} }
ProcessorMessage::Substrate(msg) => { ProcessorMessage::Substrate(msg) => {
let (sub, id) = match msg { let (sub, id) = match msg {
substrate::ProcessorMessage::SubstrateBlockAck { block, .. } => (0, block.encode()), substrate::ProcessorMessage::SubstrateBlockAck { block, .. } => {
(0, borsh::to_vec(&block).unwrap())
}
}; };
let mut res = vec![PROCESSOR_UID, TYPE_SUBSTRATE_UID, sub]; let mut res = vec![PROCESSOR_UID, TYPE_SUBSTRATE_UID, sub];

View File

@@ -21,7 +21,6 @@ rand_core = { version = "0.6", default-features = false }
rand_chacha = { version = "0.3", default-features = false, features = ["std"] } rand_chacha = { version = "0.3", default-features = false, features = ["std"] }
zeroize = { version = "1", default-features = false, features = ["std"] } zeroize = { version = "1", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] } ciphersuite = { path = "../../crypto/ciphersuite", default-features = false, features = ["std"] }

View File

@@ -5,11 +5,13 @@ use dalek_ff_group::Ed25519;
use monero_wallet::WalletOutput; use monero_wallet::WalletOutput;
use scale::{Encode, Decode};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_client::{ use serai_client::{
primitives::{ExternalCoin, Amount, ExternalBalance}, primitives::{
coin::ExternalCoin,
balance::{Amount, ExternalBalance},
},
networks::monero::Address, networks::monero::Address,
}; };
@@ -17,10 +19,7 @@ use primitives::{OutputType, ReceivedOutput};
use crate::{EXTERNAL_SUBADDRESS, BRANCH_SUBADDRESS, CHANGE_SUBADDRESS, FORWARDED_SUBADDRESS}; use crate::{EXTERNAL_SUBADDRESS, BRANCH_SUBADDRESS, CHANGE_SUBADDRESS, FORWARDED_SUBADDRESS};
#[rustfmt::skip] #[derive(Clone, Copy, PartialEq, Eq, Default, Hash, Debug, BorshSerialize, BorshDeserialize)]
#[derive(
Clone, Copy, PartialEq, Eq, Default, Hash, Debug, Encode, Decode, BorshSerialize, BorshDeserialize,
)]
pub(crate) struct OutputId(pub(crate) [u8; 32]); pub(crate) struct OutputId(pub(crate) [u8; 32]);
impl AsRef<[u8]> for OutputId { impl AsRef<[u8]> for OutputId {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {

View File

@@ -3,7 +3,7 @@ use core::future::Future;
use monero_wallet::rpc::{RpcError, Rpc as RpcTrait}; use monero_wallet::rpc::{RpcError, Rpc as RpcTrait};
use monero_simple_request_rpc::SimpleRequestRpc; use monero_simple_request_rpc::SimpleRequestRpc;
use serai_client::primitives::{ExternalNetworkId, ExternalCoin, Amount}; use serai_client::primitives::{network_id::ExternalNetworkId, coin::ExternalCoin, balance::Amount};
use scanner::ScannerFeed; use scanner::ScannerFeed;
use signers::TransactionPublisher; use signers::TransactionPublisher;

View File

@@ -10,7 +10,7 @@ use dalek_ff_group::Ed25519;
use monero_wallet::rpc::{FeeRate, RpcError}; use monero_wallet::rpc::{FeeRate, RpcError};
use serai_client::{ use serai_client::{
primitives::{ExternalCoin, Amount}, primitives::{coin::ExternalCoin, balance::Amount},
networks::monero::Address, networks::monero::Address,
}; };

View File

@@ -22,7 +22,6 @@ group = { version = "0.13", default-features = false }
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
log = { version = "0.4", default-features = false, features = ["std"] } log = { version = "0.4", default-features = false, features = ["std"] }

View File

@@ -6,7 +6,6 @@ use core::{hash::Hash, fmt::Debug};
use group::GroupEncoding; use group::GroupEncoding;
use scale::{Encode, Decode};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
/// A module for task-related structs and functionality. /// A module for task-related structs and functionality.
@@ -40,8 +39,6 @@ pub trait Id:
+ AsRef<[u8]> + AsRef<[u8]>
+ AsMut<[u8]> + AsMut<[u8]>
+ Debug + Debug
+ Encode
+ Decode
+ BorshSerialize + BorshSerialize
+ BorshDeserialize + BorshDeserialize
{ {
@@ -57,22 +54,15 @@ impl<
+ AsRef<[u8]> + AsRef<[u8]>
+ AsMut<[u8]> + AsMut<[u8]>
+ Debug + Debug
+ Encode
+ Decode
+ BorshSerialize + BorshSerialize
+ BorshDeserialize, + BorshDeserialize,
> Id for I > Id for I
{ {
} }
/// A wrapper for a group element which implements the scale/borsh traits. /// A wrapper for a group element which implements the `borsh` traits.
#[derive(Clone, Copy, PartialEq, Eq, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct EncodableG<G: GroupEncoding>(pub G); pub struct EncodableG<G: GroupEncoding>(pub G);
impl<G: GroupEncoding> Encode for EncodableG<G> {
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
f(self.0.to_bytes().as_ref())
}
}
impl<G: GroupEncoding> BorshSerialize for EncodableG<G> { impl<G: GroupEncoding> BorshSerialize for EncodableG<G> {
fn serialize<W: borsh::io::Write>(&self, writer: &mut W) -> borsh::io::Result<()> { fn serialize<W: borsh::io::Write>(&self, writer: &mut W) -> borsh::io::Result<()> {
writer.write_all(self.0.to_bytes().as_ref()) writer.write_all(self.0.to_bytes().as_ref())

View File

@@ -5,7 +5,7 @@ use group::GroupEncoding;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_primitives::{ExternalAddress, ExternalBalance}; use serai_primitives::{address::ExternalAddress, balance::ExternalBalance};
use crate::Id; use crate::Id;

View File

@@ -1,10 +1,11 @@
use std::io; use std::io;
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_primitives::ExternalBalance; use serai_primitives::{
use serai_coins_primitives::OutInstructionWithBalance; balance::ExternalBalance,
instructions::{OutInstruction, OutInstructionWithBalance},
};
use crate::Address; use crate::Address;
@@ -18,9 +19,11 @@ pub struct Payment<A: Address> {
impl<A: Address> TryFrom<OutInstructionWithBalance> for Payment<A> { impl<A: Address> TryFrom<OutInstructionWithBalance> for Payment<A> {
type Error = (); type Error = ();
fn try_from(out_instruction_with_balance: OutInstructionWithBalance) -> Result<Self, ()> { fn try_from(out_instruction_with_balance: OutInstructionWithBalance) -> Result<Self, ()> {
Ok(Payment { Ok(match out_instruction_with_balance.instruction {
address: out_instruction_with_balance.instruction.address.try_into().map_err(|_| ())?, OutInstruction::Transfer(address) => Payment {
balance: out_instruction_with_balance.balance, address: address.try_into().map_err(|_| ())?,
balance: out_instruction_with_balance.balance,
},
}) })
} }
} }
@@ -43,14 +46,12 @@ impl<A: Address> Payment<A> {
/// Read a Payment. /// Read a Payment.
pub fn read(reader: &mut impl io::Read) -> io::Result<Self> { pub fn read(reader: &mut impl io::Read) -> io::Result<Self> {
let address = A::deserialize_reader(reader)?; let address = A::deserialize_reader(reader)?;
let reader = &mut IoReader(reader); let balance = ExternalBalance::deserialize_reader(reader).map_err(io::Error::other)?;
let balance = ExternalBalance::decode(reader).map_err(io::Error::other)?;
Ok(Self { address, balance }) Ok(Self { address, balance })
} }
/// Write the Payment. /// Write the Payment.
pub fn write(&self, writer: &mut impl io::Write) -> io::Result<()> { pub fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
self.address.serialize(writer)?; self.address.serialize(writer)?;
self.balance.encode_to(writer); self.balance.serialize(writer)
Ok(())
} }
} }

View File

@@ -20,7 +20,6 @@ workspace = true
[dependencies] [dependencies]
# Encoders # Encoders
hex = { version = "0.4", default-features = false, features = ["std"] } hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
# Cryptography # Cryptography

View File

@@ -3,12 +3,10 @@ use std::io::{Read, Write};
use group::GroupEncoding; use group::GroupEncoding;
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};
use serai_primitives::ExternalBalance; use serai_primitives::{balance::ExternalBalance, validator_sets::Session};
use serai_validator_sets_primitives::Session;
use primitives::EncodableG; use primitives::EncodableG;
use crate::{ScannerFeed, KeyFor, AddressFor}; use crate::{ScannerFeed, KeyFor, AddressFor};
@@ -94,7 +92,7 @@ impl<S: ScannerFeed> BatchDb<S> {
if let Some(ReturnInformation { address, balance }) = return_information { if let Some(ReturnInformation { address, balance }) = return_information {
buf.write_all(&[1]).unwrap(); buf.write_all(&[1]).unwrap();
address.serialize(&mut buf).unwrap(); address.serialize(&mut buf).unwrap();
balance.encode_to(&mut buf); balance.serialize(&mut buf).unwrap();
} else { } else {
buf.write_all(&[0]).unwrap(); buf.write_all(&[0]).unwrap();
} }
@@ -116,7 +114,7 @@ impl<S: ScannerFeed> BatchDb<S> {
res.push((opt[0] == 1).then(|| { res.push((opt[0] == 1).then(|| {
let address = AddressFor::<S>::deserialize_reader(&mut buf).unwrap(); let address = AddressFor::<S>::deserialize_reader(&mut buf).unwrap();
let balance = ExternalBalance::decode(&mut IoReader(&mut buf)).unwrap(); let balance = ExternalBalance::deserialize_reader(&mut buf).unwrap();
ReturnInformation { address, balance } ReturnInformation { address, balance }
})); }));
} }

View File

@@ -2,11 +2,9 @@ use core::{marker::PhantomData, future::Future};
use blake2::{digest::typenum::U32, Digest, Blake2b}; use blake2::{digest::typenum::U32, Digest, Blake2b};
use scale::Encode;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};
use serai_primitives::BlockHash; use serai_primitives::{BlockHash, instructions::Batch};
use serai_in_instructions_primitives::{MAX_BATCH_SIZE, Batch};
use primitives::{ use primitives::{
EncodableG, EncodableG,
@@ -111,12 +109,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
let mut batch_id = BatchDb::<S>::acquire_batch_id(&mut txn); let mut batch_id = BatchDb::<S>::acquire_batch_id(&mut txn);
// start with empty batch // start with empty batch
let mut batches = vec![Batch { let mut batches = vec![Batch::new(network, batch_id, external_network_block_hash)];
network,
id: batch_id,
external_network_block_hash,
instructions: vec![],
}];
// We also track the return information for the InInstructions within a Batch in case // We also track the return information for the InInstructions within a Batch in case
// they error // they error
let mut return_information = vec![vec![]]; let mut return_information = vec![vec![]];
@@ -125,23 +118,19 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
let balance = in_instruction.balance; let balance = in_instruction.balance;
let batch = batches.last_mut().unwrap(); let batch = batches.last_mut().unwrap();
batch.instructions.push(in_instruction);
// check if batch is over-size // check if batch is over-size
if batch.encode().len() > MAX_BATCH_SIZE { if batch.push_instruction(in_instruction.clone()).is_err() {
// pop the last instruction so it's back in size
let in_instruction = batch.instructions.pop().unwrap();
// bump the id for the new batch // bump the id for the new batch
batch_id = BatchDb::<S>::acquire_batch_id(&mut txn); batch_id = BatchDb::<S>::acquire_batch_id(&mut txn);
// make a new batch with this instruction included // make a new batch with this instruction included
batches.push(Batch { let mut batch = Batch::new(network, batch_id, external_network_block_hash);
network, batch
id: batch_id, .push_instruction(in_instruction)
external_network_block_hash, .expect("single InInstruction exceeded Batch size limit");
instructions: vec![in_instruction], batches.push(batch);
});
// Since we're allocating a new batch, allocate a new set of return addresses for it // Since we're allocating a new batch, allocate a new set of return addresses for it
return_information.push(vec![]); return_information.push(vec![]);
} }
@@ -157,16 +146,16 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
// Now that we've finalized the Batches, save the information for each to the database // Now that we've finalized the Batches, save the information for each to the database
assert_eq!(batches.len(), return_information.len()); assert_eq!(batches.len(), return_information.len());
for (batch, return_information) in batches.iter().zip(&return_information) { for (batch, return_information) in batches.iter().zip(&return_information) {
assert_eq!(batch.instructions.len(), return_information.len()); assert_eq!(batch.instructions().len(), return_information.len());
BatchDb::<S>::save_batch_info( BatchDb::<S>::save_batch_info(
&mut txn, &mut txn,
batch.id, batch.id(),
block_number, block_number,
session_to_sign_batch, session_to_sign_batch,
external_key_for_session_to_sign_batch, external_key_for_session_to_sign_batch,
Blake2b::<U32>::digest(batch.instructions.encode()).into(), Blake2b::<U32>::digest(borsh::to_vec(&batch.instructions()).unwrap()).into(),
); );
BatchDb::<S>::save_return_information(&mut txn, batch.id, return_information); BatchDb::<S>::save_return_information(&mut txn, batch.id(), return_information);
} }
for batch in batches { for batch in batches {

View File

@@ -3,13 +3,13 @@ use std::io::{self, Read, Write};
use group::GroupEncoding; use group::GroupEncoding;
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db, db_channel}; use serai_db::{Get, DbTxn, create_db, db_channel};
use serai_coins_primitives::OutInstructionWithBalance; use serai_primitives::{
use serai_validator_sets_primitives::Session; validator_sets::Session,
use serai_in_instructions_primitives::{InInstructionWithBalance, Batch}; instructions::{InInstructionWithBalance, Batch, OutInstructionWithBalance},
};
use primitives::{EncodableG, ReceivedOutput}; use primitives::{EncodableG, ReceivedOutput};
@@ -56,7 +56,7 @@ impl<S: ScannerFeed> OutputWithInInstruction<S> {
(opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(reader)).transpose()? (opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(reader)).transpose()?
}; };
let in_instruction = let in_instruction =
InInstructionWithBalance::decode(&mut IoReader(reader)).map_err(io::Error::other)?; InInstructionWithBalance::deserialize_reader(reader).map_err(io::Error::other)?;
Ok(Self { output, return_address, in_instruction }) Ok(Self { output, return_address, in_instruction })
} }
pub(crate) fn write(&self, writer: &mut impl io::Write) -> io::Result<()> { pub(crate) fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
@@ -67,7 +67,7 @@ impl<S: ScannerFeed> OutputWithInInstruction<S> {
} else { } else {
writer.write_all(&[0])?; writer.write_all(&[0])?;
} }
self.in_instruction.encode_to(writer); self.in_instruction.serialize(writer)?;
Ok(()) Ok(())
} }
} }
@@ -76,10 +76,10 @@ create_db!(
ScannerGlobal { ScannerGlobal {
StartBlock: () -> u64, StartBlock: () -> u64,
QueuedKey: <K: Encode>(key: K) -> (), QueuedKey: <K: BorshSerialize>(key: K) -> (),
ActiveKeys: <K: Borshy>() -> Vec<SeraiKeyDbEntry<K>>, ActiveKeys: <K: Borshy>() -> Vec<SeraiKeyDbEntry<K>>,
RetireAt: <K: Encode>(key: K) -> u64, RetireAt: <K: BorshSerialize>(key: K) -> u64,
// Highest acknowledged block // Highest acknowledged block
HighestAcknowledgedBlock: () -> u64, HighestAcknowledgedBlock: () -> u64,
@@ -294,7 +294,7 @@ impl<S: ScannerFeed> ScannerGlobalDb<S> {
assert!((opt[0] == 0) || (opt[0] == 1)); assert!((opt[0] == 0) || (opt[0] == 1));
let address = (opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(&mut buf).unwrap()); let address = (opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(&mut buf).unwrap());
Some((address, InInstructionWithBalance::decode(&mut IoReader(buf)).unwrap())) Some((address, InInstructionWithBalance::deserialize_reader(&mut buf).unwrap()))
} }
} }
@@ -357,7 +357,7 @@ impl<S: ScannerFeed> ScanToEventualityDb<S> {
} else { } else {
buf.write_all(&[0]).unwrap(); buf.write_all(&[0]).unwrap();
} }
forward.in_instruction.encode_to(&mut buf); forward.in_instruction.serialize(&mut buf).unwrap();
SerializedForwardedOutput::set(txn, forward.output.id().as_ref(), &buf); SerializedForwardedOutput::set(txn, forward.output.id().as_ref(), &buf);
} }
@@ -454,7 +454,7 @@ impl<S: ScannerFeed> Returnable<S> {
(opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(reader)).transpose()?; (opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(reader)).transpose()?;
let in_instruction = let in_instruction =
InInstructionWithBalance::decode(&mut IoReader(reader)).map_err(io::Error::other)?; InInstructionWithBalance::deserialize_reader(reader).map_err(io::Error::other)?;
Ok(Returnable { return_address, in_instruction }) Ok(Returnable { return_address, in_instruction })
} }
fn write(&self, writer: &mut impl io::Write) -> io::Result<()> { fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
@@ -464,7 +464,7 @@ impl<S: ScannerFeed> Returnable<S> {
} else { } else {
writer.write_all(&[0])?; writer.write_all(&[0])?;
} }
self.in_instruction.encode_to(writer); self.in_instruction.serialize(writer)?;
Ok(()) Ok(())
} }
} }
@@ -494,7 +494,7 @@ impl<S: ScannerFeed> ScanToBatchDb<S> {
block_number: u64, block_number: u64,
data: &InInstructionData<S>, data: &InInstructionData<S>,
) { ) {
let mut buf = data.session_to_sign_batch.encode(); let mut buf = borsh::to_vec(&data.session_to_sign_batch).unwrap();
buf.extend(data.external_key_for_session_to_sign_batch.to_bytes().as_ref()); buf.extend(data.external_key_for_session_to_sign_batch.to_bytes().as_ref());
for returnable_in_instruction in &data.returnable_in_instructions { for returnable_in_instruction in &data.returnable_in_instructions {
returnable_in_instruction.write(&mut buf).unwrap(); returnable_in_instruction.write(&mut buf).unwrap();
@@ -517,7 +517,7 @@ impl<S: ScannerFeed> ScanToBatchDb<S> {
); );
let mut buf = data.returnable_in_instructions.as_slice(); let mut buf = data.returnable_in_instructions.as_slice();
let session_to_sign_batch = Session::decode(&mut buf).unwrap(); let session_to_sign_batch = Session::deserialize_reader(&mut buf).unwrap();
let external_key_for_session_to_sign_batch = { let external_key_for_session_to_sign_batch = {
let mut external_key_for_session_to_sign_batch = let mut external_key_for_session_to_sign_batch =
<KeyFor<S> as GroupEncoding>::Repr::default(); <KeyFor<S> as GroupEncoding>::Repr::default();
@@ -595,7 +595,7 @@ impl SubstrateToEventualityDb {
} }
mod _public_db { mod _public_db {
use serai_in_instructions_primitives::Batch; use serai_primitives::instructions::Batch;
use serai_db::{Get, DbTxn, create_db, db_channel}; use serai_db::{Get, DbTxn, create_db, db_channel};

View File

@@ -1,6 +1,7 @@
use core::marker::PhantomData; use core::marker::PhantomData;
use scale::Encode; use borsh::BorshSerialize;
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};
use primitives::{EncodableG, ReceivedOutput, Eventuality, EventualityTracker}; use primitives::{EncodableG, ReceivedOutput, Eventuality, EventualityTracker};
@@ -14,7 +15,7 @@ create_db!(
// The latest block this task has handled which was notable // The latest block this task has handled which was notable
LatestHandledNotableBlock: () -> u64, LatestHandledNotableBlock: () -> u64,
SerializedEventualities: <K: Encode>(key: K) -> Vec<u8>, SerializedEventualities: <K: BorshSerialize>(key: K) -> Vec<u8>,
AccumulatedOutput: (id: &[u8]) -> (), AccumulatedOutput: (id: &[u8]) -> (),
} }

View File

@@ -10,8 +10,10 @@ use group::GroupEncoding;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, Db}; use serai_db::{Get, DbTxn, Db};
use serai_primitives::{ExternalNetworkId, ExternalCoin, Amount}; use serai_primitives::{
use serai_coins_primitives::OutInstructionWithBalance; network_id::ExternalNetworkId, coin::ExternalCoin, balance::Amount,
instructions::OutInstructionWithBalance,
};
use messages::substrate::ExecutedBatch; use messages::substrate::ExecutedBatch;
use primitives::{task::*, Address, ReceivedOutput, Block, Payment}; use primitives::{task::*, Address, ReceivedOutput, Block, Payment};

View File

@@ -1,6 +1,6 @@
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
create_db!( create_db!(
ScannerBatch { ScannerBatch {

View File

@@ -2,7 +2,7 @@ use core::{marker::PhantomData, future::Future};
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use primitives::task::{DoesNotError, ContinuallyRan}; use primitives::task::{DoesNotError, ContinuallyRan};
use crate::{ use crate::{
@@ -70,12 +70,12 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ReportTask<D, S> {
// Because this boolean was expanded, we lose short-circuiting. That's fine // Because this boolean was expanded, we lose short-circuiting. That's fine
let handover_batch = last_session != session_to_sign_batch; let handover_batch = last_session != session_to_sign_batch;
let batch_after_handover_batch = let batch_after_handover_batch =
(last_session == session_to_sign_batch) && ((first_batch + 1) == batch.id); (last_session == session_to_sign_batch) && ((first_batch + 1) == batch.id());
if handover_batch || batch_after_handover_batch { if handover_batch || batch_after_handover_batch {
let verified_prior_batch = substrate::last_acknowledged_batch::<S>(&txn) let verified_prior_batch = substrate::last_acknowledged_batch::<S>(&txn)
// Since `batch.id = 0` in the Session(0)-never-published-a-Batch case, we don't // Since `batch.id() = 0` in the Session(0)-never-published-a-Batch case, we don't
// check `last_acknowledged_batch >= (batch.id - 1)` but instead this // check `last_acknowledged_batch >= (batch.id() - 1)` but instead this
.map(|last_acknowledged_batch| (last_acknowledged_batch + 1) >= batch.id) .map(|last_acknowledged_batch| (last_acknowledged_batch + 1) >= batch.id())
// We've never verified any Batches // We've never verified any Batches
.unwrap_or(false); .unwrap_or(false);
if !verified_prior_batch { if !verified_prior_batch {
@@ -90,7 +90,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ReportTask<D, S> {
BatchDb::set_last_session_to_sign_batch_and_first_batch( BatchDb::set_last_session_to_sign_batch_and_first_batch(
&mut txn, &mut txn,
session_to_sign_batch, session_to_sign_batch,
batch.id, batch.id(),
); );
} }
} }

View File

@@ -1,12 +1,12 @@
use core::future::Future; use core::future::Future;
use std::collections::HashMap; use std::collections::HashMap;
use scale::Decode; use borsh::BorshDeserialize;
use serai_db::{Get, DbTxn, Db}; use serai_db::{Get, DbTxn, Db};
use serai_in_instructions_primitives::{ #[rustfmt::skip]
Shorthand, RefundableInInstruction, InInstruction, InInstructionWithBalance, use serai_primitives::instructions::{RefundableInInstruction, InInstruction, InInstructionWithBalance};
};
use primitives::{task::ContinuallyRan, OutputType, ReceivedOutput, Block}; use primitives::{task::ContinuallyRan, OutputType, ReceivedOutput, Block};
@@ -55,26 +55,22 @@ fn in_instruction_from_output<S: ScannerFeed>(
let presumed_origin = output.presumed_origin(); let presumed_origin = output.presumed_origin();
let mut data = output.data(); let mut data = output.data();
let shorthand = match Shorthand::decode(&mut data) { let instruction = match RefundableInInstruction::deserialize_reader(&mut data) {
Ok(shorthand) => shorthand,
Err(e) => {
log::info!("data in output {} wasn't valid shorthand: {e:?}", hex::encode(output.id()));
return (presumed_origin, None);
}
};
let instruction = match RefundableInInstruction::try_from(shorthand) {
Ok(instruction) => instruction, Ok(instruction) => instruction,
Err(e) => { Err(e) => {
log::info!( log::info!(
"shorthand in output {} wasn't convertible to a RefundableInInstruction: {e:?}", "data in output {} wasn't a valid `RefundableInInstruction`: {e:?}",
hex::encode(output.id()) hex::encode(output.id()),
); );
return (presumed_origin, None); return (presumed_origin, None);
} }
}; };
( (
instruction.origin.and_then(|addr| AddressFor::<S>::try_from(addr).ok()).or(presumed_origin), instruction
.return_address
.and_then(|addr| AddressFor::<S>::try_from(addr).ok())
.or(presumed_origin),
Some(instruction.instruction), Some(instruction.instruction),
) )
} }

View File

@@ -5,7 +5,7 @@ use group::GroupEncoding;
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db, db_channel}; use serai_db::{Get, DbTxn, create_db, db_channel};
use serai_coins_primitives::OutInstructionWithBalance; use serai_primitives::instructions::OutInstructionWithBalance;
use messages::substrate::ExecutedBatch; use messages::substrate::ExecutedBatch;

View File

@@ -2,7 +2,7 @@ use core::{marker::PhantomData, future::Future};
use serai_db::{Get, DbTxn, Db}; use serai_db::{Get, DbTxn, Db};
use serai_coins_primitives::{OutInstruction, OutInstructionWithBalance}; use serai_primitives::instructions::{OutInstruction, OutInstructionWithBalance};
use messages::substrate::ExecutedBatch; use messages::substrate::ExecutedBatch;
use primitives::task::{DoesNotError, ContinuallyRan}; use primitives::task::{DoesNotError, ContinuallyRan};
@@ -150,7 +150,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for SubstrateTask<D, S> {
if let Some(batch::ReturnInformation { address, balance }) = return_information { if let Some(batch::ReturnInformation { address, balance }) = return_information {
burns.push(OutInstructionWithBalance { burns.push(OutInstructionWithBalance {
instruction: OutInstruction { address: address.into() }, instruction: OutInstruction::Transfer(address.into()),
balance, balance,
}); });
} }

View File

@@ -15,7 +15,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-machete] [package.metadata.cargo-machete]
ignored = ["scale", "borsh"] ignored = ["borsh"]
[lints] [lints]
workspace = true workspace = true
@@ -24,7 +24,6 @@ workspace = true
ciphersuite = { path = "../../../crypto/ciphersuite", default-features = false, features = ["std"] } ciphersuite = { path = "../../../crypto/ciphersuite", default-features = false, features = ["std"] }
frost = { package = "modular-frost", path = "../../../crypto/frost", default-features = false } frost = { package = "modular-frost", path = "../../../crypto/frost", default-features = false }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-db = { path = "../../../common/db" } serai-db = { path = "../../../common/db" }

View File

@@ -15,7 +15,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-machete] [package.metadata.cargo-machete]
ignored = ["scale", "borsh"] ignored = ["borsh"]
[lints] [lints]
workspace = true workspace = true
@@ -23,7 +23,6 @@ workspace = true
[dependencies] [dependencies]
group = { version = "0.13", default-features = false } group = { version = "0.13", default-features = false }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-db = { path = "../../../common/db" } serai-db = { path = "../../../common/db" }

View File

@@ -4,7 +4,7 @@
use core::{fmt::Debug, future::Future}; use core::{fmt::Debug, future::Future};
use serai_primitives::Amount; use serai_primitives::balance::Amount;
use primitives::{ReceivedOutput, Payment}; use primitives::{ReceivedOutput, Payment};
use scanner::{ScannerFeed, KeyFor, AddressFor, OutputFor, EventualityFor, BlockFor}; use scanner::{ScannerFeed, KeyFor, AddressFor, OutputFor, EventualityFor, BlockFor};

View File

@@ -1,6 +1,9 @@
use borsh::{BorshSerialize, BorshDeserialize}; use borsh::{BorshSerialize, BorshDeserialize};
use serai_primitives::{ExternalCoin, Amount, ExternalBalance}; use serai_primitives::{
coin::ExternalCoin,
balance::{Amount, ExternalBalance},
};
use primitives::{Address, Payment}; use primitives::{Address, Payment};
use scanner::ScannerFeed; use scanner::ScannerFeed;

View File

@@ -15,7 +15,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-machete] [package.metadata.cargo-machete]
ignored = ["scale", "borsh"] ignored = ["borsh"]
[lints] [lints]
workspace = true workspace = true
@@ -23,7 +23,6 @@ workspace = true
[dependencies] [dependencies]
group = { version = "0.13", default-features = false } group = { version = "0.13", default-features = false }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-primitives = { path = "../../../../substrate/primitives", default-features = false, features = ["std"] } serai-primitives = { path = "../../../../substrate/primitives", default-features = false, features = ["std"] }

View File

@@ -2,7 +2,10 @@ use core::marker::PhantomData;
use group::GroupEncoding; use group::GroupEncoding;
use serai_primitives::{ExternalCoin, Amount, ExternalBalance}; use serai_primitives::{
coin::ExternalCoin,
balance::{Amount, ExternalBalance},
};
use borsh::BorshDeserialize; use borsh::BorshDeserialize;
use serai_db::{Get, DbTxn, create_db, db_channel}; use serai_db::{Get, DbTxn, create_db, db_channel};

View File

@@ -7,7 +7,10 @@ use std::collections::HashMap;
use group::GroupEncoding; use group::GroupEncoding;
use serai_primitives::{ExternalCoin, Amount, ExternalBalance}; use serai_primitives::{
coin::ExternalCoin,
balance::{Amount, ExternalBalance},
};
use serai_db::DbTxn; use serai_db::DbTxn;

View File

@@ -15,7 +15,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-machete] [package.metadata.cargo-machete]
ignored = ["scale", "borsh"] ignored = ["borsh"]
[lints] [lints]
workspace = true workspace = true
@@ -23,7 +23,6 @@ workspace = true
[dependencies] [dependencies]
group = { version = "0.13", default-features = false } group = { version = "0.13", default-features = false }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-primitives = { path = "../../../../substrate/primitives", default-features = false, features = ["std"] } serai-primitives = { path = "../../../../substrate/primitives", default-features = false, features = ["std"] }

View File

@@ -2,7 +2,7 @@ use core::marker::PhantomData;
use group::GroupEncoding; use group::GroupEncoding;
use serai_primitives::{ExternalCoin, Amount}; use serai_primitives::{coin::ExternalCoin, balance::Amount};
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};

View File

@@ -7,7 +7,7 @@ use std::collections::HashMap;
use group::GroupEncoding; use group::GroupEncoding;
use serai_primitives::{ExternalCoin, Amount}; use serai_primitives::{coin::ExternalCoin, balance::Amount};
use serai_db::DbTxn; use serai_db::DbTxn;

View File

@@ -30,7 +30,6 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = fals
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false } frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false }
frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false } frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] } serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
@@ -39,7 +38,7 @@ serai-db = { path = "../../common/db" }
log = { version = "0.4", default-features = false, features = ["std"] } log = { version = "0.4", default-features = false, features = ["std"] }
tokio = { version = "1", default-features = false, features = ["rt-multi-thread", "sync", "time", "macros"] } tokio = { version = "1", default-features = false, features = ["rt-multi-thread", "sync", "time", "macros"] }
serai-cosign = { path = "../../coordinator/cosign" } serai-cosign = { package = "serai-cosign-types", path = "../../coordinator/cosign/types" }
messages = { package = "serai-processor-messages", path = "../messages" } messages = { package = "serai-processor-messages", path = "../messages" }
primitives = { package = "serai-processor-primitives", path = "../primitives" } primitives = { package = "serai-processor-primitives", path = "../primitives" }
scanner = { package = "serai-processor-scanner", path = "../scanner" } scanner = { package = "serai-processor-scanner", path = "../scanner" }

View File

@@ -1,5 +1,7 @@
use serai_validator_sets_primitives::Session; use serai_primitives::{
use serai_in_instructions_primitives::{Batch, SignedBatch}; validator_sets::Session,
instructions::{Batch, SignedBatch},
};
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};

View File

@@ -6,10 +6,7 @@ use ciphersuite::group::GroupEncoding;
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use frost::dkg::ThresholdKeys; use frost::dkg::ThresholdKeys;
use scale::Encode; use serai_primitives::{validator_sets::Session, instructions::SignedBatch};
use serai_validator_sets_primitives::Session;
use serai_in_instructions_primitives::{SignedBatch, batch_message};
use serai_db::{Get, DbTxn, Db}; use serai_db::{Get, DbTxn, Db};
@@ -74,7 +71,7 @@ impl<D: Db, E: GroupEncoding> BatchSignerTask<D, E> {
machines.push(WrappedSchnorrkelMachine::new( machines.push(WrappedSchnorrkelMachine::new(
keys.clone(), keys.clone(),
b"substrate", b"substrate",
batch_message(&batch), batch.publish_batch_message(),
)); ));
} }
attempt_manager.register(VariantSignId::Batch(id), machines); attempt_manager.register(VariantSignId::Batch(id), machines);
@@ -100,14 +97,14 @@ impl<D: Db, E: Send + GroupEncoding> ContinuallyRan for BatchSignerTask<D, E> {
iterated = true; iterated = true;
// Save this to the database as a transaction to sign // Save this to the database as a transaction to sign
let batch_hash = <[u8; 32]>::from(Blake2b::<U32>::digest(batch.encode())); let batch_hash = <[u8; 32]>::from(Blake2b::<U32>::digest(borsh::to_vec(&batch).unwrap()));
self.active_signing_protocols.insert(batch_hash); self.active_signing_protocols.insert(batch_hash);
ActiveSigningProtocols::set( ActiveSigningProtocols::set(
&mut txn, &mut txn,
self.session, self.session,
&self.active_signing_protocols.iter().copied().collect(), &self.active_signing_protocols.iter().copied().collect(),
); );
BatchHash::set(&mut txn, batch.id, &batch_hash); BatchHash::set(&mut txn, batch.id(), &batch_hash);
Batches::set(&mut txn, batch_hash, &batch); Batches::set(&mut txn, batch_hash, &batch);
let mut machines = Vec::with_capacity(self.keys.len()); let mut machines = Vec::with_capacity(self.keys.len());
@@ -116,7 +113,7 @@ impl<D: Db, E: Send + GroupEncoding> ContinuallyRan for BatchSignerTask<D, E> {
machines.push(WrappedSchnorrkelMachine::new( machines.push(WrappedSchnorrkelMachine::new(
keys.clone(), keys.clone(),
b"substrate", b"substrate",
batch_message(&batch), batch.publish_batch_message(),
)); ));
} }
for msg in self.attempt_manager.register(VariantSignId::Batch(batch_hash), machines) { for msg in self.attempt_manager.register(VariantSignId::Batch(batch_hash), machines) {
@@ -160,8 +157,8 @@ impl<D: Db, E: Send + GroupEncoding> ContinuallyRan for BatchSignerTask<D, E> {
// Update the last acknowledged Batch // Update the last acknowledged Batch
{ {
let last_acknowledged = LastAcknowledgedBatch::get(&txn); let last_acknowledged = LastAcknowledgedBatch::get(&txn);
if Some(batch.id) > last_acknowledged { if Some(batch.id()) > last_acknowledged {
LastAcknowledgedBatch::set(&mut txn, &batch.id); LastAcknowledgedBatch::set(&mut txn, &batch.id());
} }
} }
@@ -174,7 +171,7 @@ impl<D: Db, E: Send + GroupEncoding> ContinuallyRan for BatchSignerTask<D, E> {
); );
// Clean up SignedBatches // Clean up SignedBatches
SignedBatches::del(&mut txn, batch.id); SignedBatches::del(&mut txn, batch.id());
// We retire with a txn so we either successfully flag this Batch as acknowledged, and // We retire with a txn so we either successfully flag this Batch as acknowledged, and
// won't re-register it (making this retire safe), or we don't flag it, meaning we will // won't re-register it (making this retire safe), or we don't flag it, meaning we will
@@ -203,7 +200,7 @@ impl<D: Db, E: Send + GroupEncoding> ContinuallyRan for BatchSignerTask<D, E> {
let batch = let batch =
Batches::get(&txn, id).expect("signed a Batch we didn't save to the database"); Batches::get(&txn, id).expect("signed a Batch we didn't save to the database");
let signed_batch = SignedBatch { batch, signature: signature.into() }; let signed_batch = SignedBatch { batch, signature: signature.into() };
SignedBatches::set(&mut txn, signed_batch.batch.id, &signed_batch); SignedBatches::set(&mut txn, signed_batch.batch.id(), &signed_batch);
} }
} }

View File

@@ -1,6 +1,6 @@
use core::future::Future; use core::future::Future;
use serai_primitives::Signature; use serai_primitives::crypto::Signature;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};
@@ -140,7 +140,7 @@ impl<D: Db, C: Coordinator> ContinuallyRan for CoordinatorTask<D, C> {
let mut next_batch = last_batch.map(|id| id + 1).unwrap_or(0); let mut next_batch = last_batch.map(|id| id + 1).unwrap_or(0);
while let Some(batch) = crate::batch::signed_batch(&txn, next_batch) { while let Some(batch) = crate::batch::signed_batch(&txn, next_batch) {
iterated = true; iterated = true;
db::LastPublishedBatch::set(&mut txn, &batch.batch.id); db::LastPublishedBatch::set(&mut txn, &batch.batch.id());
self self
.coordinator .coordinator
.publish_signed_batch(batch) .publish_signed_batch(batch)

View File

@@ -1,4 +1,4 @@
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};

View File

@@ -3,9 +3,7 @@ use core::future::Future;
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use frost::dkg::ThresholdKeys; use frost::dkg::ThresholdKeys;
use scale::Encode; use serai_primitives::{crypto::Signature, validator_sets::Session};
use serai_primitives::Signature;
use serai_validator_sets_primitives::Session;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};
@@ -127,7 +125,7 @@ impl<D: Db> ContinuallyRan for CosignerTask<D> {
LatestCosigned::set(&mut txn, self.session, &cosign.block_number); LatestCosigned::set(&mut txn, self.session, &cosign.block_number);
let cosign = SignedCosign { let cosign = SignedCosign {
cosign, cosign,
signature: Signature::from(signature).encode().try_into().unwrap(), signature: borsh::to_vec(&Signature::from(signature)).unwrap().try_into().unwrap(),
}; };
// Send the cosign // Send the cosign
Cosign::send(&mut txn, self.session, &cosign); Cosign::send(&mut txn, self.session, &cosign);

View File

@@ -1,4 +1,4 @@
use serai_validator_sets_primitives::{Session, SlashReport as SlashReportStruct}; use serai_primitives::validator_sets::{Session, SlashReport as SlashReportStruct};
use serai_db::{Get, DbTxn, create_db, db_channel}; use serai_db::{Get, DbTxn, create_db, db_channel};

View File

@@ -11,9 +11,11 @@ use ciphersuite::{group::GroupEncoding, Ciphersuite};
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use frost::dkg::ThresholdKeys; use frost::dkg::ThresholdKeys;
use serai_primitives::Signature; use serai_primitives::{
use serai_validator_sets_primitives::{Session, SlashReport}; crypto::Signature,
use serai_in_instructions_primitives::SignedBatch; validator_sets::{Session, SlashReport},
instructions::SignedBatch,
};
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};

View File

@@ -3,8 +3,7 @@ use core::{marker::PhantomData, future::Future};
use dalek_ff_group::Ristretto; use dalek_ff_group::Ristretto;
use frost::dkg::ThresholdKeys; use frost::dkg::ThresholdKeys;
use serai_primitives::Signature; use serai_primitives::{crypto::Signature, validator_sets::Session};
use serai_validator_sets_primitives::Session;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};

View File

@@ -1,4 +1,4 @@
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use serai_db::{Get, DbTxn, create_db}; use serai_db::{Get, DbTxn, create_db};

View File

@@ -6,7 +6,7 @@ use std::{
use frost::dkg::ThresholdKeys; use frost::dkg::ThresholdKeys;
use serai_validator_sets_primitives::Session; use serai_primitives::validator_sets::Session;
use serai_db::{DbTxn, Db}; use serai_db::{DbTxn, Db};

View File

@@ -22,13 +22,13 @@ workspace = true
borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"] } borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"] }
bitvec = { version = "1", default-features = false, features = ["alloc"] } bitvec = { version = "1", default-features = false, features = ["alloc"] }
sp-core = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "serai-next", default-features = false } sp-core = { git = "https://github.com/serai-dex/patch-polkadot-sdk", rev = "2bfdaed4b3614de2fe7d10e4ece3e6a912833e90", default-features = false }
serde = { version = "1", default-features = false, features = ["derive"], optional = true } serde = { version = "1", default-features = false, features = ["derive"], optional = true }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"], optional = true } scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"], optional = true }
scale-info = { version = "2", default-features = false, features = ["derive"], optional = true } scale-info = { version = "2", default-features = false, features = ["derive"], optional = true }
sp-runtime = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "serai-next", default-features = false, features = ["serde"], optional = true } sp-runtime = { git = "https://github.com/serai-dex/patch-polkadot-sdk", rev = "2bfdaed4b3614de2fe7d10e4ece3e6a912833e90", default-features = false, features = ["serde"], optional = true }
frame-support = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "serai-next", default-features = false, optional = true } frame-support = { git = "https://github.com/serai-dex/patch-polkadot-sdk", rev = "2bfdaed4b3614de2fe7d10e4ece3e6a912833e90", default-features = false, optional = true }
serai-primitives = { path = "../primitives", version = "0.1", default-features = false } serai-primitives = { path = "../primitives", version = "0.1", default-features = false }
@@ -47,6 +47,6 @@ std = [
"serai-primitives/std", "serai-primitives/std",
] ]
substrate = ["serde", "scale", "scale-info", "sp-runtime", "frame-support", "serai-primitives/non_canonical_scale_derivations"] substrate = ["serde", "scale", "scale-info", "sp-runtime", "frame-support", "serai-primitives/serde", "serai-primitives/non_canonical_scale_derivations"]
try-runtime = ["sp-runtime/try-runtime"] try-runtime = ["sp-runtime/try-runtime"]
default = ["std"] default = ["std"]

View File

@@ -105,7 +105,7 @@ pub struct Block {
mod substrate { mod substrate {
use core::fmt::Debug; use core::fmt::Debug;
use scale::{Encode, Decode}; use scale::{Encode, Decode, DecodeWithMemTracking};
use scale_info::TypeInfo; use scale_info::TypeInfo;
use sp_core::H256; use sp_core::H256;
@@ -116,6 +116,31 @@ mod substrate {
use super::*; use super::*;
// Add `serde` implementations which treat self as a `Vec<u8>`
impl sp_core::serde::Serialize for Transaction {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: sp_core::serde::Serializer,
{
<Vec<u8> as sp_core::serde::Serialize>::serialize(&self.encode(), serializer)
}
}
impl<'de> sp_core::serde::Deserialize<'de> for Transaction {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: sp_core::serde::Deserializer<'de>,
{
use sp_core::serde::de::Error;
let bytes = <Vec<u8> as sp_core::serde::Deserialize>::deserialize(deserializer)?;
let mut reader = bytes.as_slice();
let block = Self::decode(&mut reader).map_err(D::Error::custom)?;
if !reader.is_empty() {
Err(D::Error::custom("extraneous bytes at end"))?;
}
Ok(block)
}
}
/// The digest for all of the Serai-specific header fields added before execution of the block. /// The digest for all of the Serai-specific header fields added before execution of the block.
#[derive(Clone, Copy, PartialEq, Eq, BorshSerialize, BorshDeserialize)] #[derive(Clone, Copy, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct SeraiPreExecutionDigest { pub struct SeraiPreExecutionDigest {
@@ -149,7 +174,18 @@ mod substrate {
/// ///
/// This is not considered part of the protocol proper and may be pruned in the future. It's /// This is not considered part of the protocol proper and may be pruned in the future. It's
/// solely considered used for consensus now. /// solely considered used for consensus now.
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo, sp_runtime::Serialize)] #[derive(
Clone,
PartialEq,
Eq,
Debug,
Encode,
Decode,
DecodeWithMemTracking,
TypeInfo,
sp_runtime::Serialize,
sp_runtime::Deserialize,
)]
pub struct ConsensusV1 { pub struct ConsensusV1 {
/// The hash of the immediately preceding block. /// The hash of the immediately preceding block.
parent_hash: H256, parent_hash: H256,
@@ -164,14 +200,37 @@ mod substrate {
} }
/// A V1 header for a block, as needed by Substrate. /// A V1 header for a block, as needed by Substrate.
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo, sp_runtime::Serialize)] #[derive(
Clone,
PartialEq,
Eq,
Debug,
Encode,
Decode,
DecodeWithMemTracking,
TypeInfo,
sp_runtime::Serialize,
sp_runtime::Deserialize,
)]
pub struct SubstrateHeaderV1 { pub struct SubstrateHeaderV1 {
number: u64, number: u64,
consensus: ConsensusV1, consensus: ConsensusV1,
} }
/// A header for a block, as needed by Substrate. /// A header for a block, as needed by Substrate.
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo, sp_runtime::Serialize)] #[derive(
Clone,
PartialEq,
Eq,
Debug,
Encode,
Decode,
DecodeWithMemTracking,
TypeInfo,
sp_runtime::Serialize,
sp_runtime::Deserialize,
)]
#[allow(clippy::cast_possible_truncation)]
pub enum SubstrateHeader { pub enum SubstrateHeader {
/// A version 1 header. /// A version 1 header.
V1(SubstrateHeaderV1), V1(SubstrateHeaderV1),
@@ -226,12 +285,34 @@ mod substrate {
} }
/// A block, as needed by Substrate. /// A block, as needed by Substrate.
#[derive(Clone, Debug, PartialEq, Eq, Encode, Decode, sp_runtime::Serialize)] #[derive(Clone, Debug, PartialEq, Eq, Encode, Decode, DecodeWithMemTracking, TypeInfo)]
pub struct SubstrateBlock { pub struct SubstrateBlock {
header: SubstrateHeader, header: SubstrateHeader,
#[serde(skip)] // This makes this unsafe to deserialize, but we don't impl `Deserialize`
transactions: Vec<Transaction>, transactions: Vec<Transaction>,
} }
impl sp_core::serde::Serialize for SubstrateBlock {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: sp_core::serde::Serializer,
{
<Vec<u8> as sp_core::serde::Serialize>::serialize(&self.encode(), serializer)
}
}
impl<'de> sp_core::serde::Deserialize<'de> for SubstrateBlock {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: sp_core::serde::Deserializer<'de>,
{
use sp_core::serde::de::Error;
let bytes = <Vec<u8> as sp_core::serde::Deserialize>::deserialize(deserializer)?;
let mut reader = bytes.as_slice();
let block = Self::decode(&mut reader).map_err(D::Error::custom)?;
if !reader.is_empty() {
Err(D::Error::custom("extraneous bytes at end"))?;
}
Ok(block)
}
}
impl HeaderTrait for SubstrateHeader { impl HeaderTrait for SubstrateHeader {
type Number = u64; type Number = u64;

View File

@@ -166,17 +166,20 @@ impl BorshDeserialize for Transaction {
if len == 0 { if len == 0 {
let call = Call::deserialize_reader(reader)?; let call = Call::deserialize_reader(reader)?;
if call.is_signed() { if call.is_signed() {
#[allow(clippy::io_other_error)]
Err(io::Error::new(io::ErrorKind::Other, "call was signed but marked unsigned"))?; Err(io::Error::new(io::ErrorKind::Other, "call was signed but marked unsigned"))?;
} }
Ok(Transaction::Unsigned { call: UnsignedCall(call) }) Ok(Transaction::Unsigned { call: UnsignedCall(call) })
} else { } else {
if u32::from(len) > MAX_CALLS { if u32::from(len) > MAX_CALLS {
#[allow(clippy::io_other_error)]
Err(io::Error::new(io::ErrorKind::Other, "too many calls"))?; Err(io::Error::new(io::ErrorKind::Other, "too many calls"))?;
} }
let mut calls = BoundedVec::with_bounded_capacity(len.into()); let mut calls = BoundedVec::with_bounded_capacity(len.into());
for _ in 0 .. len { for _ in 0 .. len {
let call = Call::deserialize_reader(reader)?; let call = Call::deserialize_reader(reader)?;
if !call.is_signed() { if !call.is_signed() {
#[allow(clippy::io_other_error)]
Err(io::Error::new(io::ErrorKind::Other, "call was unsigned but included as signed"))?; Err(io::Error::new(io::ErrorKind::Other, "call was unsigned but included as signed"))?;
} }
calls.try_push(call).unwrap(); calls.try_push(call).unwrap();
@@ -254,12 +257,20 @@ mod substrate {
fn read(&mut self, buf: &mut [u8]) -> borsh::io::Result<usize> { fn read(&mut self, buf: &mut [u8]) -> borsh::io::Result<usize> {
let remaining_len = self.0.remaining_len().map_err(|err| { let remaining_len = self.0.remaining_len().map_err(|err| {
self.1 = Some(err); self.1 = Some(err);
#[allow(clippy::io_other_error)]
borsh::io::Error::new(borsh::io::ErrorKind::Other, "") borsh::io::Error::new(borsh::io::ErrorKind::Other, "")
})?; })?;
// If we're still calling `read`, we try to read at least one more byte // If we're still calling `read`, we try to read at least one more byte
let to_read = buf.len().min(remaining_len.unwrap_or(1)); let to_read = buf.len().min(remaining_len.unwrap_or(1));
// This may not be _allocated_ making this over-zealous, but it's the best we can do
self.0.on_before_alloc_mem(to_read).map_err(|err| {
self.1 = Some(err);
#[allow(clippy::io_other_error)]
borsh::io::Error::new(borsh::io::ErrorKind::Other, "")
})?;
self.0.read(&mut buf[.. to_read]).map_err(|err| { self.0.read(&mut buf[.. to_read]).map_err(|err| {
self.1 = Some(err); self.1 = Some(err);
#[allow(clippy::io_other_error)]
borsh::io::Error::new(borsh::io::ErrorKind::Other, "") borsh::io::Error::new(borsh::io::ErrorKind::Other, "")
})?; })?;
Ok(to_read) Ok(to_read)
@@ -273,6 +284,23 @@ mod substrate {
} }
} }
// Clean `Transaction` tracks its memory during decoding, as we do call
// `Input::on_before_alloc_mem`
impl scale::DecodeWithMemTracking for Transaction {}
// Shim `TypeInfo` for `Transaction`
impl scale_info::TypeInfo for Transaction {
type Identity = Self;
fn type_info() -> scale_info::Type {
scale_info::Type {
path: scale_info::Path { segments: vec!["serai_abi", "transaction", "Transaction"] },
type_params: vec![],
type_def: (scale_info::TypeDefComposite { fields: vec![] }).into(),
docs: vec![],
}
}
}
/// The context which transactions are executed in. /// The context which transactions are executed in.
pub trait TransactionContext: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { pub trait TransactionContext: 'static + Send + Sync + Clone + PartialEq + Eq + Debug {
/// The base weight for a signed transaction. /// The base weight for a signed transaction.

Some files were not shown because too many files have changed in this diff Show More