Correct re-attempts for the DKG Confirmation protocol

Also spawns the SetKeys task.
This commit is contained in:
Luke Parker
2025-01-15 17:49:00 -05:00
parent 8b52b921f3
commit 505f1b20a4
5 changed files with 61 additions and 31 deletions

2
Cargo.lock generated
View File

@@ -8323,10 +8323,8 @@ dependencies = [
"frost-schnorrkel", "frost-schnorrkel",
"hex", "hex",
"log", "log",
"modular-frost",
"parity-scale-codec", "parity-scale-codec",
"rand_core", "rand_core",
"schnorr-signatures",
"schnorrkel", "schnorrkel",
"serai-client", "serai-client",
"serai-coordinator-libp2p-p2p", "serai-coordinator-libp2p-p2p",

View File

@@ -25,13 +25,13 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] }
blake2 = { version = "0.10", default-features = false, features = ["std"] } blake2 = { version = "0.10", default-features = false, features = ["std"] }
schnorrkel = { version = "0.11", default-features = false, features = ["std"] } schnorrkel = { version = "0.11", default-features = false, features = ["std"] }
ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] } ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] }
dkg = { path = "../crypto/dkg", default-features = false, features = ["std"] } dkg = { path = "../crypto/dkg", default-features = false, features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std"] }
frost = { package = "modular-frost", path = "../crypto/frost" }
frost-schnorrkel = { path = "../crypto/schnorrkel" } frost-schnorrkel = { path = "../crypto/schnorrkel" }
hex = { version = "0.4", default-features = false, features = ["std"] }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive", "bit-vec"] } scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive", "bit-vec"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
zalloc = { path = "../common/zalloc" } zalloc = { path = "../common/zalloc" }
serai-db = { path = "../common/db" } serai-db = { path = "../common/db" }
@@ -44,9 +44,6 @@ tributary-sdk = { path = "./tributary-sdk" }
serai-client = { path = "../substrate/client", default-features = false, features = ["serai", "borsh"] } serai-client = { path = "../substrate/client", default-features = false, features = ["serai", "borsh"] }
hex = { version = "0.4", default-features = false, features = ["std"] }
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
log = { version = "0.4", default-features = false, features = ["std"] } log = { version = "0.4", default-features = false, features = ["std"] }
env_logger = { version = "0.10", default-features = false, features = ["humantime"] } env_logger = { version = "0.10", default-features = false, features = ["humantime"] }

View File

@@ -24,8 +24,8 @@ use serai_task::{Task, TaskHandle, ContinuallyRan};
use serai_cosign::{Faulted, SignedCosign, Cosigning}; use serai_cosign::{Faulted, SignedCosign, Cosigning};
use serai_coordinator_substrate::{ use serai_coordinator_substrate::{
CanonicalEventStream, EphemeralEventStream, SignSlashReport, SignedBatches, PublishBatchTask, CanonicalEventStream, EphemeralEventStream, SignSlashReport, SetKeysTask, SignedBatches,
SlashReports, PublishSlashReportTask, PublishBatchTask, SlashReports, PublishSlashReportTask,
}; };
use serai_coordinator_tributary::{SigningProtocolRound, Signed, Transaction, SubstrateBlockPlans}; use serai_coordinator_tributary::{SigningProtocolRound, Signed, Transaction, SubstrateBlockPlans};
@@ -207,7 +207,7 @@ async fn handle_network(
session, session,
substrate_key, substrate_key,
network_key, network_key,
} => todo!("TODO Transaction::DkgConfirmationPreprocess"), } => todo!("TODO DkgConfirmationMessages, Transaction::DkgConfirmationPreprocess"),
messages::key_gen::ProcessorMessage::Blame { session, participant } => { messages::key_gen::ProcessorMessage::Blame { session, participant } => {
RemoveParticipant::send(&mut txn, ValidatorSet { network, session }, participant); RemoveParticipant::send(&mut txn, ValidatorSet { network, session }, participant);
} }
@@ -220,7 +220,6 @@ async fn handle_network(
let set = ValidatorSet { network, session: id.session }; let set = ValidatorSet { network, session: id.session };
if id.attempt == 0 { if id.attempt == 0 {
// Batches are declared by their intent to be signed // Batches are declared by their intent to be signed
// TODO: Document this in processor <-> coordinator rebuild issue
if let messages::sign::VariantSignId::Batch(hash) = id.id { if let messages::sign::VariantSignId::Batch(hash) = id.id {
TributaryTransactions::send(&mut txn, set, &Transaction::Batch { hash }); TributaryTransactions::send(&mut txn, set, &Transaction::Batch { hash });
} }
@@ -469,6 +468,16 @@ async fn main() {
tokio::spawn(handle_network(db.clone(), message_queue.clone(), serai.clone(), network)); tokio::spawn(handle_network(db.clone(), message_queue.clone(), serai.clone(), network));
} }
// Spawn the task to set keys
{
let (set_keys_task_def, set_keys_task) = Task::new();
tokio::spawn(
SetKeysTask::new(db.clone(), serai.clone()).continually_run(set_keys_task_def, vec![]),
);
// Forget its handle so it always runs in the background
core::mem::forget(set_keys_task);
}
// Spawn the task to publish slash reports // Spawn the task to publish slash reports
{ {
let (publish_slash_report_task_def, publish_slash_report_task) = Task::new(); let (publish_slash_report_task_def, publish_slash_report_task) = Task::new();

View File

@@ -94,9 +94,9 @@ impl Topic {
} }
} }
// The SignId for this topic /// The SignId for this topic
// ///
// Returns None if Topic isn't Topic::Sign /// Returns None if Topic isn't Topic::Sign
pub(crate) fn sign_id(self, set: ValidatorSet) -> Option<messages::sign::SignId> { pub(crate) fn sign_id(self, set: ValidatorSet) -> Option<messages::sign::SignId> {
#[allow(clippy::match_same_arms)] #[allow(clippy::match_same_arms)]
match self { match self {
@@ -107,6 +107,33 @@ impl Topic {
} }
} }
/// The SignId for this DKG Confirmation.
///
/// This is undefined except for being consistent to the DKG Confirmation signing protocol and
/// unique across sets.
///
/// Returns None if Topic isn't Topic::DkgConfirmation.
pub(crate) fn dkg_confirmation_sign_id(
self,
set: ValidatorSet,
) -> Option<messages::sign::SignId> {
#[allow(clippy::match_same_arms)]
match self {
Topic::RemoveParticipant { .. } => None,
Topic::DkgConfirmation { attempt, round: _ } => Some({
let id = {
let mut id = [0; 32];
let encoded_set = set.encode();
id[.. encoded_set.len()].copy_from_slice(&encoded_set);
VariantSignId::Batch(id)
};
SignId { session: set.session, id, attempt }
}),
Topic::SlashReport { .. } => None,
Topic::Sign { .. } => None,
}
}
/// The topic which precedes this topic as a prerequisite /// The topic which precedes this topic as a prerequisite
/// ///
/// The preceding topic must define this topic as succeeding /// The preceding topic must define this topic as succeeding
@@ -337,6 +364,12 @@ impl TributaryDb {
Self::recognize_topic(txn, set, topic); Self::recognize_topic(txn, set, topic);
if let Some(id) = topic.sign_id(set) { if let Some(id) = topic.sign_id(set) {
Self::send_message(txn, set, messages::sign::CoordinatorMessage::Reattempt { id }); Self::send_message(txn, set, messages::sign::CoordinatorMessage::Reattempt { id });
} else if let Some(id) = topic.dkg_confirmation_sign_id(set) {
DkgConfirmationMessages::send(
txn,
set,
&messages::sign::CoordinatorMessage::Reattempt { id },
);
} }
} }
} }

View File

@@ -5,8 +5,6 @@
use core::{marker::PhantomData, future::Future}; use core::{marker::PhantomData, future::Future};
use std::collections::HashMap; use std::collections::HashMap;
use scale::Encode;
use ciphersuite::group::GroupEncoding; use ciphersuite::group::GroupEncoding;
use dkg::Participant; use dkg::Participant;
@@ -184,7 +182,6 @@ impl<'a, TD: Db, TDT: DbTxn, P: P2p> ScanBlock<'a, TD, TDT, P> {
&mut self, &mut self,
block_number: u64, block_number: u64,
topic: Topic, topic: Topic,
attempt: u32,
data: &D, data: &D,
signer: SeraiAddress, signer: SeraiAddress,
) -> Option<(SignId, HashMap<Participant, Vec<u8>>)> { ) -> Option<(SignId, HashMap<Participant, Vec<u8>>)> {
@@ -201,14 +198,7 @@ impl<'a, TD: Db, TDT: DbTxn, P: P2p> ScanBlock<'a, TD, TDT, P> {
) { ) {
DataSet::None => None, DataSet::None => None,
DataSet::Participating(data_set) => { DataSet::Participating(data_set) => {
// Consistent ID for the DKG confirmation, unqie across sets let id = topic.dkg_confirmation_sign_id(self.set.set).unwrap();
let id = {
let mut id = [0; 32];
let encoded_set = self.set.set.encode();
id[.. encoded_set.len()].copy_from_slice(&encoded_set);
VariantSignId::Batch(id)
};
let id = SignId { session: self.set.set.session, id, attempt };
// This will be used in a MuSig protocol, so the Participant indexes are the validator's // This will be used in a MuSig protocol, so the Participant indexes are the validator's
// position in the list regardless of their weight // position in the list regardless of their weight
@@ -222,8 +212,11 @@ impl<'a, TD: Db, TDT: DbTxn, P: P2p> ScanBlock<'a, TD, TDT, P> {
.enumerate() .enumerate()
.find(|(_i, (validator_i, _weight))| validator == *validator_i) .find(|(_i, (validator_i, _weight))| validator == *validator_i)
.unwrap(); .unwrap();
// The index is zero-indexed yet participants are one-indexed
let index = index + 1;
entries.insert( entries.insert(
Participant::new(u16::try_from(*index).unwrap()).unwrap(), Participant::new(u16::try_from(index).unwrap()).unwrap(),
participation.as_ref().to_vec(), participation.as_ref().to_vec(),
); );
} }
@@ -302,12 +295,12 @@ impl<'a, TD: Db, TDT: DbTxn, P: P2p> ScanBlock<'a, TD, TDT, P> {
}, },
); );
} }
Transaction::DkgConfirmationPreprocess { attempt, preprocess, signed } => { Transaction::DkgConfirmationPreprocess { attempt: _, preprocess, signed } => {
let topic = topic.unwrap(); let topic = topic.unwrap();
let signer = signer(signed); let signer = signer(signed);
let Some((id, data_set)) = let Some((id, data_set)) =
self.accumulate_dkg_confirmation(block_number, topic, attempt, &preprocess, signer) self.accumulate_dkg_confirmation(block_number, topic, &preprocess, signer)
else { else {
return; return;
}; };
@@ -318,12 +311,12 @@ impl<'a, TD: Db, TDT: DbTxn, P: P2p> ScanBlock<'a, TD, TDT, P> {
&messages::sign::CoordinatorMessage::Preprocesses { id, preprocesses: data_set }, &messages::sign::CoordinatorMessage::Preprocesses { id, preprocesses: data_set },
); );
} }
Transaction::DkgConfirmationShare { attempt, share, signed } => { Transaction::DkgConfirmationShare { attempt: _, share, signed } => {
let topic = topic.unwrap(); let topic = topic.unwrap();
let signer = signer(signed); let signer = signer(signed);
let Some((id, data_set)) = let Some((id, data_set)) =
self.accumulate_dkg_confirmation(block_number, topic, attempt, &share, signer) self.accumulate_dkg_confirmation(block_number, topic, &share, signer)
else { else {
return; return;
}; };