Get all processors to compile again

Requires splitting `serai-cosign` into `serai-cosign` and `serai-cosign-types`
so the processor don't require `serai-client/serai` (not correct yet).
This commit is contained in:
Luke Parker
2025-09-02 02:16:21 -04:00
parent 75240ed327
commit ada94e8c5d
87 changed files with 413 additions and 301 deletions

View File

@@ -20,7 +20,6 @@ workspace = true
[dependencies]
# Encoders
hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
# Cryptography

View File

@@ -3,12 +3,10 @@ use std::io::{Read, Write};
use group::GroupEncoding;
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db};
use serai_primitives::ExternalBalance;
use serai_validator_sets_primitives::Session;
use serai_primitives::{balance::ExternalBalance, validator_sets::Session};
use primitives::EncodableG;
use crate::{ScannerFeed, KeyFor, AddressFor};
@@ -94,7 +92,7 @@ impl<S: ScannerFeed> BatchDb<S> {
if let Some(ReturnInformation { address, balance }) = return_information {
buf.write_all(&[1]).unwrap();
address.serialize(&mut buf).unwrap();
balance.encode_to(&mut buf);
balance.serialize(&mut buf).unwrap();
} else {
buf.write_all(&[0]).unwrap();
}
@@ -116,7 +114,7 @@ impl<S: ScannerFeed> BatchDb<S> {
res.push((opt[0] == 1).then(|| {
let address = AddressFor::<S>::deserialize_reader(&mut buf).unwrap();
let balance = ExternalBalance::decode(&mut IoReader(&mut buf)).unwrap();
let balance = ExternalBalance::deserialize_reader(&mut buf).unwrap();
ReturnInformation { address, balance }
}));
}

View File

@@ -2,11 +2,9 @@ use core::{marker::PhantomData, future::Future};
use blake2::{digest::typenum::U32, Digest, Blake2b};
use scale::Encode;
use serai_db::{DbTxn, Db};
use serai_primitives::BlockHash;
use serai_in_instructions_primitives::{MAX_BATCH_SIZE, Batch};
use serai_primitives::{BlockHash, instructions::Batch};
use primitives::{
EncodableG,
@@ -111,12 +109,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
let mut batch_id = BatchDb::<S>::acquire_batch_id(&mut txn);
// start with empty batch
let mut batches = vec![Batch {
network,
id: batch_id,
external_network_block_hash,
instructions: vec![],
}];
let mut batches = vec![Batch::new(network, batch_id, external_network_block_hash)];
// We also track the return information for the InInstructions within a Batch in case
// they error
let mut return_information = vec![vec![]];
@@ -125,23 +118,19 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
let balance = in_instruction.balance;
let batch = batches.last_mut().unwrap();
batch.instructions.push(in_instruction);
// check if batch is over-size
if batch.encode().len() > MAX_BATCH_SIZE {
// pop the last instruction so it's back in size
let in_instruction = batch.instructions.pop().unwrap();
if batch.push_instruction(in_instruction.clone()).is_err() {
// bump the id for the new batch
batch_id = BatchDb::<S>::acquire_batch_id(&mut txn);
// make a new batch with this instruction included
batches.push(Batch {
network,
id: batch_id,
external_network_block_hash,
instructions: vec![in_instruction],
});
let mut batch = Batch::new(network, batch_id, external_network_block_hash);
batch
.push_instruction(in_instruction)
.expect("single InInstruction exceeded Batch size limit");
batches.push(batch);
// Since we're allocating a new batch, allocate a new set of return addresses for it
return_information.push(vec![]);
}
@@ -157,16 +146,16 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for BatchTask<D, S> {
// Now that we've finalized the Batches, save the information for each to the database
assert_eq!(batches.len(), return_information.len());
for (batch, return_information) in batches.iter().zip(&return_information) {
assert_eq!(batch.instructions.len(), return_information.len());
assert_eq!(batch.instructions().len(), return_information.len());
BatchDb::<S>::save_batch_info(
&mut txn,
batch.id,
batch.id(),
block_number,
session_to_sign_batch,
external_key_for_session_to_sign_batch,
Blake2b::<U32>::digest(batch.instructions.encode()).into(),
Blake2b::<U32>::digest(borsh::to_vec(&batch.instructions()).unwrap()).into(),
);
BatchDb::<S>::save_return_information(&mut txn, batch.id, return_information);
BatchDb::<S>::save_return_information(&mut txn, batch.id(), return_information);
}
for batch in batches {

View File

@@ -3,13 +3,13 @@ use std::io::{self, Read, Write};
use group::GroupEncoding;
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db, db_channel};
use serai_coins_primitives::OutInstructionWithBalance;
use serai_validator_sets_primitives::Session;
use serai_in_instructions_primitives::{InInstructionWithBalance, Batch};
use serai_primitives::{
validator_sets::Session,
instructions::{InInstructionWithBalance, Batch, OutInstructionWithBalance},
};
use primitives::{EncodableG, ReceivedOutput};
@@ -56,7 +56,7 @@ impl<S: ScannerFeed> OutputWithInInstruction<S> {
(opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(reader)).transpose()?
};
let in_instruction =
InInstructionWithBalance::decode(&mut IoReader(reader)).map_err(io::Error::other)?;
InInstructionWithBalance::deserialize_reader(reader).map_err(io::Error::other)?;
Ok(Self { output, return_address, in_instruction })
}
pub(crate) fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
@@ -67,7 +67,7 @@ impl<S: ScannerFeed> OutputWithInInstruction<S> {
} else {
writer.write_all(&[0])?;
}
self.in_instruction.encode_to(writer);
self.in_instruction.serialize(writer)?;
Ok(())
}
}
@@ -76,10 +76,10 @@ create_db!(
ScannerGlobal {
StartBlock: () -> u64,
QueuedKey: <K: Encode>(key: K) -> (),
QueuedKey: <K: BorshSerialize>(key: K) -> (),
ActiveKeys: <K: Borshy>() -> Vec<SeraiKeyDbEntry<K>>,
RetireAt: <K: Encode>(key: K) -> u64,
RetireAt: <K: BorshSerialize>(key: K) -> u64,
// Highest acknowledged block
HighestAcknowledgedBlock: () -> u64,
@@ -294,7 +294,7 @@ impl<S: ScannerFeed> ScannerGlobalDb<S> {
assert!((opt[0] == 0) || (opt[0] == 1));
let address = (opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(&mut buf).unwrap());
Some((address, InInstructionWithBalance::decode(&mut IoReader(buf)).unwrap()))
Some((address, InInstructionWithBalance::deserialize_reader(&mut buf).unwrap()))
}
}
@@ -357,7 +357,7 @@ impl<S: ScannerFeed> ScanToEventualityDb<S> {
} else {
buf.write_all(&[0]).unwrap();
}
forward.in_instruction.encode_to(&mut buf);
forward.in_instruction.serialize(&mut buf).unwrap();
SerializedForwardedOutput::set(txn, forward.output.id().as_ref(), &buf);
}
@@ -454,7 +454,7 @@ impl<S: ScannerFeed> Returnable<S> {
(opt[0] == 1).then(|| AddressFor::<S>::deserialize_reader(reader)).transpose()?;
let in_instruction =
InInstructionWithBalance::decode(&mut IoReader(reader)).map_err(io::Error::other)?;
InInstructionWithBalance::deserialize_reader(reader).map_err(io::Error::other)?;
Ok(Returnable { return_address, in_instruction })
}
fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
@@ -464,7 +464,7 @@ impl<S: ScannerFeed> Returnable<S> {
} else {
writer.write_all(&[0])?;
}
self.in_instruction.encode_to(writer);
self.in_instruction.serialize(writer)?;
Ok(())
}
}
@@ -494,7 +494,7 @@ impl<S: ScannerFeed> ScanToBatchDb<S> {
block_number: u64,
data: &InInstructionData<S>,
) {
let mut buf = data.session_to_sign_batch.encode();
let mut buf = borsh::to_vec(&data.session_to_sign_batch).unwrap();
buf.extend(data.external_key_for_session_to_sign_batch.to_bytes().as_ref());
for returnable_in_instruction in &data.returnable_in_instructions {
returnable_in_instruction.write(&mut buf).unwrap();
@@ -517,7 +517,7 @@ impl<S: ScannerFeed> ScanToBatchDb<S> {
);
let mut buf = data.returnable_in_instructions.as_slice();
let session_to_sign_batch = Session::decode(&mut buf).unwrap();
let session_to_sign_batch = Session::deserialize_reader(&mut buf).unwrap();
let external_key_for_session_to_sign_batch = {
let mut external_key_for_session_to_sign_batch =
<KeyFor<S> as GroupEncoding>::Repr::default();
@@ -595,7 +595,7 @@ impl SubstrateToEventualityDb {
}
mod _public_db {
use serai_in_instructions_primitives::Batch;
use serai_primitives::instructions::Batch;
use serai_db::{Get, DbTxn, create_db, db_channel};

View File

@@ -1,6 +1,7 @@
use core::marker::PhantomData;
use scale::Encode;
use borsh::BorshSerialize;
use serai_db::{Get, DbTxn, create_db};
use primitives::{EncodableG, ReceivedOutput, Eventuality, EventualityTracker};
@@ -14,7 +15,7 @@ create_db!(
// The latest block this task has handled which was notable
LatestHandledNotableBlock: () -> u64,
SerializedEventualities: <K: Encode>(key: K) -> Vec<u8>,
SerializedEventualities: <K: BorshSerialize>(key: K) -> Vec<u8>,
AccumulatedOutput: (id: &[u8]) -> (),
}

View File

@@ -10,8 +10,10 @@ use group::GroupEncoding;
use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, Db};
use serai_primitives::{ExternalNetworkId, ExternalCoin, Amount};
use serai_coins_primitives::OutInstructionWithBalance;
use serai_primitives::{
network_id::ExternalNetworkId, coin::ExternalCoin, balance::Amount,
instructions::OutInstructionWithBalance,
};
use messages::substrate::ExecutedBatch;
use primitives::{task::*, Address, ReceivedOutput, Block, Payment};

View File

@@ -1,6 +1,6 @@
use serai_db::{Get, DbTxn, create_db};
use serai_validator_sets_primitives::Session;
use serai_primitives::validator_sets::Session;
create_db!(
ScannerBatch {

View File

@@ -2,7 +2,7 @@ use core::{marker::PhantomData, future::Future};
use serai_db::{DbTxn, Db};
use serai_validator_sets_primitives::Session;
use serai_primitives::validator_sets::Session;
use primitives::task::{DoesNotError, ContinuallyRan};
use crate::{
@@ -70,12 +70,12 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ReportTask<D, S> {
// Because this boolean was expanded, we lose short-circuiting. That's fine
let handover_batch = last_session != session_to_sign_batch;
let batch_after_handover_batch =
(last_session == session_to_sign_batch) && ((first_batch + 1) == batch.id);
(last_session == session_to_sign_batch) && ((first_batch + 1) == batch.id());
if handover_batch || batch_after_handover_batch {
let verified_prior_batch = substrate::last_acknowledged_batch::<S>(&txn)
// Since `batch.id = 0` in the Session(0)-never-published-a-Batch case, we don't
// check `last_acknowledged_batch >= (batch.id - 1)` but instead this
.map(|last_acknowledged_batch| (last_acknowledged_batch + 1) >= batch.id)
// Since `batch.id() = 0` in the Session(0)-never-published-a-Batch case, we don't
// check `last_acknowledged_batch >= (batch.id() - 1)` but instead this
.map(|last_acknowledged_batch| (last_acknowledged_batch + 1) >= batch.id())
// We've never verified any Batches
.unwrap_or(false);
if !verified_prior_batch {
@@ -90,7 +90,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ReportTask<D, S> {
BatchDb::set_last_session_to_sign_batch_and_first_batch(
&mut txn,
session_to_sign_batch,
batch.id,
batch.id(),
);
}
}

View File

@@ -1,12 +1,12 @@
use core::future::Future;
use std::collections::HashMap;
use scale::Decode;
use borsh::BorshDeserialize;
use serai_db::{Get, DbTxn, Db};
use serai_in_instructions_primitives::{
Shorthand, RefundableInInstruction, InInstruction, InInstructionWithBalance,
};
#[rustfmt::skip]
use serai_primitives::instructions::{RefundableInInstruction, InInstruction, InInstructionWithBalance};
use primitives::{task::ContinuallyRan, OutputType, ReceivedOutput, Block};
@@ -55,26 +55,22 @@ fn in_instruction_from_output<S: ScannerFeed>(
let presumed_origin = output.presumed_origin();
let mut data = output.data();
let shorthand = match Shorthand::decode(&mut data) {
Ok(shorthand) => shorthand,
Err(e) => {
log::info!("data in output {} wasn't valid shorthand: {e:?}", hex::encode(output.id()));
return (presumed_origin, None);
}
};
let instruction = match RefundableInInstruction::try_from(shorthand) {
let instruction = match RefundableInInstruction::deserialize_reader(&mut data) {
Ok(instruction) => instruction,
Err(e) => {
log::info!(
"shorthand in output {} wasn't convertible to a RefundableInInstruction: {e:?}",
hex::encode(output.id())
"data in output {} wasn't a valid `RefundableInInstruction`: {e:?}",
hex::encode(output.id()),
);
return (presumed_origin, None);
}
};
(
instruction.origin.and_then(|addr| AddressFor::<S>::try_from(addr).ok()).or(presumed_origin),
instruction
.return_address
.and_then(|addr| AddressFor::<S>::try_from(addr).ok())
.or(presumed_origin),
Some(instruction.instruction),
)
}

View File

@@ -5,7 +5,7 @@ use group::GroupEncoding;
use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db, db_channel};
use serai_coins_primitives::OutInstructionWithBalance;
use serai_primitives::instructions::OutInstructionWithBalance;
use messages::substrate::ExecutedBatch;

View File

@@ -2,7 +2,7 @@ use core::{marker::PhantomData, future::Future};
use serai_db::{Get, DbTxn, Db};
use serai_coins_primitives::{OutInstruction, OutInstructionWithBalance};
use serai_primitives::instructions::{OutInstruction, OutInstructionWithBalance};
use messages::substrate::ExecutedBatch;
use primitives::task::{DoesNotError, ContinuallyRan};
@@ -150,7 +150,7 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for SubstrateTask<D, S> {
if let Some(batch::ReturnInformation { address, balance }) = return_information {
burns.push(OutInstructionWithBalance {
instruction: OutInstruction { address: address.into() },
instruction: OutInstruction::Transfer(address.into()),
balance,
});
}