mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Move FROST to HashMaps
Honestly, the borrowed keys are frustrating, and this probably reduces performance while no longer offering an order when iterating. That said, they enable full u16 indexing and should mildly improve the API. Cleans the Proof of Knowledge handling present in key gen.
This commit is contained in:
@@ -1,16 +1,17 @@
|
||||
use core::{convert::TryFrom, cmp::min, fmt};
|
||||
use core::{convert::TryFrom, fmt};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use ff::{Field, PrimeField};
|
||||
use group::Group;
|
||||
|
||||
use crate::{Curve, MultisigParams, MultisigKeys, FrostError};
|
||||
use crate::{Curve, MultisigParams, MultisigKeys, FrostError, validate_map};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn challenge<C: Curve>(l: usize, context: &str, R: &[u8], Am: &[u8]) -> C::F {
|
||||
fn challenge<C: Curve>(l: u16, context: &str, R: &[u8], Am: &[u8]) -> C::F {
|
||||
let mut c = Vec::with_capacity(2 + context.len() + R.len() + Am.len());
|
||||
c.extend(&u16::try_from(l).unwrap().to_be_bytes());
|
||||
c.extend(l.to_be_bytes());
|
||||
c.extend(context.as_bytes());
|
||||
c.extend(R); // R
|
||||
c.extend(Am); // A of the first commitment, which is what we're proving we have the private key
|
||||
@@ -21,48 +22,41 @@ fn challenge<C: Curve>(l: usize, context: &str, R: &[u8], Am: &[u8]) -> C::F {
|
||||
|
||||
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
|
||||
// the serialized commitments to be broadcasted over an authenticated channel to all parties
|
||||
// TODO: This potentially could return a much more robust serialized message, including a signature
|
||||
// of its entirety. The issue is it can't use its own key as it has no chain of custody behind it.
|
||||
// While we could ask for a key to be passed in, explicitly declaring the needed for authenticated
|
||||
// communications in the API itself, systems will likely already provide a authenticated
|
||||
// communication method making this redundant. It also doesn't guarantee the system which passed
|
||||
// the key is correctly using it, meaning we can only minimize risk so much
|
||||
// One notable improvement would be to include the index in the message. While the system must
|
||||
// still track this to determine if it's ready for the next step, and to remove duplicates, it
|
||||
// would ensure no counterparties presume the same index and this system didn't mislabel a
|
||||
// counterparty
|
||||
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &MultisigParams,
|
||||
context: &str,
|
||||
) -> (Vec<C::F>, Vec<C::G>, Vec<u8>) {
|
||||
let mut coefficients = Vec::with_capacity(params.t);
|
||||
let mut commitments = Vec::with_capacity(params.t);
|
||||
let mut serialized = Vec::with_capacity((C::G_len() * params.t) + C::G_len() + C::F_len());
|
||||
for j in 0 .. params.t {
|
||||
) -> (Vec<C::F>, Vec<u8>) {
|
||||
let t = usize::from(params.t);
|
||||
let mut coefficients = Vec::with_capacity(t);
|
||||
let mut commitments = Vec::with_capacity(t);
|
||||
let mut serialized = Vec::with_capacity((C::G_len() * t) + C::G_len() + C::F_len());
|
||||
|
||||
for i in 0 .. t {
|
||||
// Step 1: Generate t random values to form a polynomial with
|
||||
coefficients.push(C::F::random(&mut *rng));
|
||||
// Step 3: Generate public commitments
|
||||
commitments.push(C::generator_table() * coefficients[j]);
|
||||
commitments.push(C::generator_table() * coefficients[i]);
|
||||
// Serialize them for publication
|
||||
serialized.extend(&C::G_to_bytes(&commitments[j]));
|
||||
serialized.extend(&C::G_to_bytes(&commitments[i]));
|
||||
}
|
||||
|
||||
// Step 2: Provide a proof of knowledge
|
||||
// This can be deterministic as the PoK is a singleton never opened up to cooperative discussion
|
||||
// There's also no reason to spend the time and effort to make this deterministic besides a
|
||||
// general obsession with canonicity and determinism
|
||||
let k = C::F::random(rng);
|
||||
let r = C::F::random(rng);
|
||||
#[allow(non_snake_case)]
|
||||
let R = C::generator_table() * k;
|
||||
let c = challenge::<C>(params.i, context, &C::G_to_bytes(&R), &serialized);
|
||||
let s = k + (coefficients[0] * c);
|
||||
let R = C::generator_table() * r;
|
||||
let s = r + (
|
||||
coefficients[0] * challenge::<C>(params.i(), context, &C::G_to_bytes(&R), &serialized)
|
||||
);
|
||||
|
||||
serialized.extend(&C::G_to_bytes(&R));
|
||||
serialized.extend(&C::F_to_bytes(&s));
|
||||
|
||||
// Step 4: Broadcast
|
||||
(coefficients, commitments, serialized)
|
||||
(coefficients, serialized)
|
||||
}
|
||||
|
||||
// Verify the received data from the first round of key generation
|
||||
@@ -70,69 +64,48 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &MultisigParams,
|
||||
context: &str,
|
||||
our_commitments: Vec<C::G>,
|
||||
serialized: &[Vec<u8>],
|
||||
) -> Result<Vec<Vec<C::G>>, FrostError> {
|
||||
// Deserialize all of the commitments, validating the input buffers as needed
|
||||
if serialized.len() != (params.n + 1) {
|
||||
Err(
|
||||
// Prevents a panic if serialized.len() == 0
|
||||
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
|
||||
)?;
|
||||
}
|
||||
our_commitments: Vec<u8>,
|
||||
mut serialized: HashMap<u16, Vec<u8>>,
|
||||
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
|
||||
validate_map(
|
||||
&mut serialized,
|
||||
&(1 ..= params.n()).into_iter().collect::<Vec<_>>(),
|
||||
(params.i(), our_commitments)
|
||||
)?;
|
||||
|
||||
// Expect a null set of commitments for index 0 so the vector is guaranteed to line up with
|
||||
// actual indexes. Even if we did the offset internally, the system would need to write the vec
|
||||
// with the same offset in mind. Therefore, this trick which is probably slightly less efficient
|
||||
// yet keeps everything simple is preferred
|
||||
if serialized[0] != vec![] {
|
||||
Err(FrostError::NonEmptyParticipantZero)?;
|
||||
}
|
||||
let commitments_len = usize::from(params.t()) * C::G_len();
|
||||
|
||||
let commitments_len = params.t * C::G_len();
|
||||
let mut commitments = Vec::with_capacity(params.n + 1);
|
||||
commitments.push(vec![]);
|
||||
let mut commitments = HashMap::new();
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R_bytes = |l| &serialized[&l][commitments_len .. commitments_len + C::G_len()];
|
||||
#[allow(non_snake_case)]
|
||||
let R = |l| C::G_from_slice(R_bytes(l)).map_err(|_| FrostError::InvalidProofOfKnowledge(l));
|
||||
#[allow(non_snake_case)]
|
||||
let Am = |l| &serialized[&l][0 .. commitments_len];
|
||||
|
||||
let s = |l| C::F_from_slice(
|
||||
&serialized[&l][commitments_len + C::G_len() ..]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l));
|
||||
|
||||
let signature_len = C::G_len() + C::F_len();
|
||||
let mut first = true;
|
||||
let mut scalars = Vec::with_capacity((params.n - 1) * 3);
|
||||
let mut points = Vec::with_capacity((params.n - 1) * 3);
|
||||
for l in 1 ..= params.n {
|
||||
if l == params.i {
|
||||
if serialized[l].len() != 0 {
|
||||
Err(FrostError::DuplicatedIndex(l))?;
|
||||
}
|
||||
commitments.push(vec![]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if serialized[l].len() != (commitments_len + signature_len) {
|
||||
// Return an error with an approximation for how many commitments were included
|
||||
// Prevents errors if not even the signature was included
|
||||
if serialized[l].len() < signature_len {
|
||||
Err(FrostError::InvalidCommitmentQuantity(l, params.t, 0))?;
|
||||
}
|
||||
|
||||
Err(
|
||||
FrostError::InvalidCommitmentQuantity(
|
||||
l,
|
||||
params.t,
|
||||
// Could technically be x.y despite this returning x, yet any y is negligible
|
||||
// It could help with debugging to know a partial piece of data was read but this error
|
||||
// alone should be enough
|
||||
(serialized[l].len() - signature_len) / C::G_len()
|
||||
)
|
||||
)?;
|
||||
}
|
||||
|
||||
commitments.push(Vec::with_capacity(params.t));
|
||||
for o in 0 .. params.t {
|
||||
commitments[l].push(
|
||||
let mut scalars = Vec::with_capacity((usize::from(params.n()) - 1) * 3);
|
||||
let mut points = Vec::with_capacity((usize::from(params.n()) - 1) * 3);
|
||||
for l in 1 ..= params.n() {
|
||||
let mut these_commitments = vec![];
|
||||
for c in 0 .. usize::from(params.t()) {
|
||||
these_commitments.push(
|
||||
C::G_from_slice(
|
||||
&serialized[l][(o * C::G_len()) .. ((o + 1) * C::G_len())]
|
||||
).map_err(|_| FrostError::InvalidCommitment(l))?
|
||||
&serialized[&l][(c * C::G_len()) .. ((c + 1) * C::G_len())]
|
||||
).map_err(|_| FrostError::InvalidCommitment(l.try_into().unwrap()))?
|
||||
);
|
||||
}
|
||||
commitments.insert(l, these_commitments);
|
||||
|
||||
// Don't bother validating our own proof of knowledge
|
||||
if l == params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 5: Validate each proof of knowledge (prep)
|
||||
let mut u = C::F::one();
|
||||
@@ -140,62 +113,35 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
u = C::F::random(&mut *rng);
|
||||
}
|
||||
|
||||
// uR
|
||||
scalars.push(u);
|
||||
points.push(
|
||||
C::G_from_slice(
|
||||
&serialized[l][commitments_len .. commitments_len + C::G_len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?
|
||||
);
|
||||
points.push(R(l)?);
|
||||
|
||||
scalars.push(
|
||||
-C::F_from_slice(
|
||||
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))? * u
|
||||
);
|
||||
// -usG
|
||||
scalars.push(-s(l)? * u);
|
||||
points.push(C::generator());
|
||||
|
||||
let c = challenge::<C>(
|
||||
l,
|
||||
context,
|
||||
&serialized[l][commitments_len .. commitments_len + C::G_len()],
|
||||
&serialized[l][0 .. commitments_len]
|
||||
);
|
||||
|
||||
if first {
|
||||
scalars.push(c);
|
||||
first = false;
|
||||
} else {
|
||||
scalars.push(c * u);
|
||||
}
|
||||
points.push(commitments[l][0]);
|
||||
// ucA
|
||||
let c = challenge::<C>(l, context, R_bytes(l), Am(l));
|
||||
scalars.push(if first { first = false; c } else { c * u});
|
||||
points.push(commitments[&l][0]);
|
||||
}
|
||||
|
||||
// Step 5: Implementation
|
||||
// Uses batch verification to optimize the success case dramatically
|
||||
// On failure, the cost is now this + blame, yet that should happen infrequently
|
||||
// s = r + ca
|
||||
// sG == R + cA
|
||||
// R + cA - sG == 0
|
||||
if C::multiexp_vartime(&scalars, &points) != C::G::identity() {
|
||||
for l in 1 ..= params.n {
|
||||
if l == params.i {
|
||||
for l in 1 ..= params.n() {
|
||||
if l == params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = C::G_from_slice(
|
||||
&serialized[l][commitments_len .. commitments_len + C::G_len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
|
||||
let s = C::F_from_slice(
|
||||
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
|
||||
let c = challenge::<C>(
|
||||
l,
|
||||
context,
|
||||
&serialized[l][commitments_len .. commitments_len + C::G_len()],
|
||||
&serialized[l][0 .. commitments_len]
|
||||
);
|
||||
|
||||
if R != ((C::generator_table() * s) + (commitments[l][0] * (C::F::zero() - &c))) {
|
||||
if (C::generator_table() * s(l)?) != (
|
||||
R(l)? + (commitments[&l][0] * challenge::<C>(l, context, R_bytes(l), Am(l)))
|
||||
) {
|
||||
Err(FrostError::InvalidProofOfKnowledge(l))?;
|
||||
}
|
||||
}
|
||||
@@ -203,22 +149,19 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
Err(FrostError::InternalError("batch validation is broken".to_string()))?;
|
||||
}
|
||||
|
||||
// Write in our own commitments
|
||||
commitments[params.i] = our_commitments;
|
||||
|
||||
Ok(commitments)
|
||||
}
|
||||
|
||||
fn polynomial<F: PrimeField>(
|
||||
coefficients: &[F],
|
||||
i: usize
|
||||
l: u16
|
||||
) -> F {
|
||||
let i = F::from(u64::try_from(i).unwrap());
|
||||
let l = F::from(u64::from(l));
|
||||
let mut share = F::zero();
|
||||
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
|
||||
share += coefficient;
|
||||
if idx != (coefficients.len() - 1) {
|
||||
share *= i;
|
||||
share *= l;
|
||||
}
|
||||
}
|
||||
share
|
||||
@@ -232,27 +175,25 @@ fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
params: &MultisigParams,
|
||||
context: &str,
|
||||
coefficients: Vec<C::F>,
|
||||
our_commitments: Vec<C::G>,
|
||||
commitments: &[Vec<u8>],
|
||||
) -> Result<(C::F, Vec<Vec<C::G>>, Vec<Vec<u8>>), FrostError> {
|
||||
our_commitments: Vec<u8>,
|
||||
commitments: HashMap<u16, Vec<u8>>,
|
||||
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> {
|
||||
let commitments = verify_r1::<R, C>(rng, params, context, our_commitments, commitments)?;
|
||||
|
||||
// Step 1: Generate secret shares for all other parties
|
||||
let mut res = Vec::with_capacity(params.n + 1);
|
||||
res.push(vec![]);
|
||||
for i in 1 ..= params.n {
|
||||
// Don't push our own to the byte buffer which is meant to be sent around
|
||||
let mut res = HashMap::new();
|
||||
for l in 1 ..= params.n() {
|
||||
// Don't insert our own shares to the byte buffer which is meant to be sent around
|
||||
// An app developer could accidentally send it. Best to keep this black boxed
|
||||
if i == params.i {
|
||||
res.push(vec![]);
|
||||
continue
|
||||
if l == params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
res.push(C::F_to_bytes(&polynomial(&coefficients, i)));
|
||||
res.insert(l, C::F_to_bytes(&polynomial(&coefficients, l)));
|
||||
}
|
||||
|
||||
// Calculate our own share
|
||||
let share = polynomial(&coefficients, params.i);
|
||||
let share = polynomial(&coefficients, params.i());
|
||||
|
||||
// The secret shares are discarded here, not cleared. While any system which leaves its memory
|
||||
// accessible is likely totally lost already, making the distinction meaningless when the key gen
|
||||
@@ -273,87 +214,67 @@ fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
fn complete_r2<C: Curve>(
|
||||
params: MultisigParams,
|
||||
share: C::F,
|
||||
commitments: &[Vec<C::G>],
|
||||
commitments: HashMap<u16, Vec<C::G>>,
|
||||
// Vec to preserve ownership
|
||||
serialized: Vec<Vec<u8>>,
|
||||
mut serialized: HashMap<u16, Vec<u8>>,
|
||||
) -> Result<MultisigKeys<C>, FrostError> {
|
||||
validate_map(
|
||||
&mut serialized,
|
||||
&(1 ..= params.n()).into_iter().collect::<Vec<_>>(),
|
||||
(params.i(), C::F_to_bytes(&share))
|
||||
)?;
|
||||
|
||||
// Step 2. Verify each share
|
||||
if serialized.len() != (params.n + 1) {
|
||||
Err(
|
||||
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
|
||||
)?;
|
||||
let mut shares = HashMap::new();
|
||||
for (l, share) in serialized {
|
||||
shares.insert(l, C::F_from_slice(&share).map_err(|_| FrostError::InvalidShare(params.i()))?);
|
||||
}
|
||||
|
||||
if (commitments[0].len() != 0) || (serialized[0].len() != 0) {
|
||||
Err(FrostError::NonEmptyParticipantZero)?;
|
||||
}
|
||||
|
||||
// Deserialize them
|
||||
let mut shares: Vec<C::F> = vec![C::F::zero()];
|
||||
for i in 1 .. serialized.len() {
|
||||
if i == params.i {
|
||||
if serialized[i].len() != 0 {
|
||||
Err(FrostError::DuplicatedIndex(i))?;
|
||||
}
|
||||
shares.push(C::F::zero());
|
||||
continue;
|
||||
}
|
||||
shares.push(C::F_from_slice(&serialized[i]).map_err(|_| FrostError::InvalidShare(i))?);
|
||||
}
|
||||
|
||||
|
||||
for l in 1 ..= params.n {
|
||||
if l == params.i {
|
||||
for (l, share) in &shares {
|
||||
if *l == params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let i_scalar = C::F::from(u64::try_from(params.i).unwrap());
|
||||
let i_scalar = C::F::from(params.i.into());
|
||||
let mut exp = C::F::one();
|
||||
let mut exps = Vec::with_capacity(params.t);
|
||||
for _ in 0 .. params.t {
|
||||
let mut exps = Vec::with_capacity(usize::from(params.t()));
|
||||
for _ in 0 .. params.t() {
|
||||
exps.push(exp);
|
||||
exp *= i_scalar;
|
||||
}
|
||||
|
||||
// Doesn't use multiexp_vartime with -shares[l] due to not being able to push to commitments
|
||||
if C::multiexp_vartime(&exps, &commitments[l]) != (C::generator_table() * shares[l]) {
|
||||
Err(FrostError::InvalidCommitment(l))?;
|
||||
if C::multiexp_vartime(&exps, &commitments[&l]) != (C::generator_table() * *share) {
|
||||
Err(FrostError::InvalidCommitment(*l))?;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Clear the original share
|
||||
|
||||
let mut secret_share = share;
|
||||
for remote_share in shares {
|
||||
secret_share += remote_share;
|
||||
let mut secret_share = C::F::zero();
|
||||
for (_, share) in shares {
|
||||
secret_share += share;
|
||||
}
|
||||
|
||||
let mut verification_shares = vec![C::G::identity()];
|
||||
for i in 1 ..= params.n {
|
||||
let mut verification_shares = HashMap::new();
|
||||
for l in 1 ..= params.n() {
|
||||
let mut exps = vec![];
|
||||
let mut cs = vec![];
|
||||
for j in 1 ..= params.n {
|
||||
for k in 0 .. params.t {
|
||||
for i in 1 ..= params.n() {
|
||||
for j in 0 .. params.t() {
|
||||
let mut exp = C::F::one();
|
||||
for _ in 0 .. k {
|
||||
exp *= C::F::from(u64::try_from(i).unwrap());
|
||||
for _ in 0 .. j {
|
||||
exp *= C::F::from(u64::try_from(l).unwrap());
|
||||
}
|
||||
exps.push(exp);
|
||||
cs.push(commitments[j][k]);
|
||||
cs.push(commitments[&i][usize::from(j)]);
|
||||
}
|
||||
}
|
||||
verification_shares.push(C::multiexp_vartime(&exps, &cs));
|
||||
verification_shares.insert(l, C::multiexp_vartime(&exps, &cs));
|
||||
}
|
||||
debug_assert_eq!(C::generator_table() * secret_share, verification_shares[¶ms.i()]);
|
||||
|
||||
debug_assert_eq!(
|
||||
C::generator_table() * secret_share,
|
||||
verification_shares[params.i]
|
||||
);
|
||||
|
||||
let mut group_key = C::G::identity();
|
||||
for j in 1 ..= params.n {
|
||||
group_key += commitments[j][0];
|
||||
}
|
||||
let group_key = commitments.iter().map(|(_, commitments)| commitments[0]).sum();
|
||||
|
||||
// TODO: Clear serialized and shares
|
||||
|
||||
@@ -382,9 +303,9 @@ pub struct StateMachine<C: Curve> {
|
||||
context: String,
|
||||
state: State,
|
||||
coefficients: Option<Vec<C::F>>,
|
||||
our_commitments: Option<Vec<C::G>>,
|
||||
our_commitments: Option<Vec<u8>>,
|
||||
secret: Option<C::F>,
|
||||
commitments: Option<Vec<Vec<C::G>>>
|
||||
commitments: Option<HashMap<u16, Vec<C::G>>>
|
||||
}
|
||||
|
||||
impl<C: Curve> StateMachine<C> {
|
||||
@@ -413,14 +334,14 @@ impl<C: Curve> StateMachine<C> {
|
||||
Err(FrostError::InvalidKeyGenTransition(State::Fresh, self.state))?;
|
||||
}
|
||||
|
||||
let (coefficients, commitments, serialized) = generate_key_r1::<R, C>(
|
||||
let (coefficients, serialized) = generate_key_r1::<R, C>(
|
||||
rng,
|
||||
&self.params,
|
||||
&self.context,
|
||||
);
|
||||
|
||||
self.coefficients = Some(coefficients);
|
||||
self.our_commitments = Some(commitments);
|
||||
self.our_commitments = Some(serialized.clone());
|
||||
self.state = State::GeneratedCoefficients;
|
||||
Ok(serialized)
|
||||
}
|
||||
@@ -433,8 +354,8 @@ impl<C: Curve> StateMachine<C> {
|
||||
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
commitments: Vec<Vec<u8>>,
|
||||
) -> Result<Vec<Vec<u8>>, FrostError> {
|
||||
commitments: HashMap<u16, Vec<u8>>,
|
||||
) -> Result<HashMap<u16, Vec<u8>>, FrostError> {
|
||||
if self.state != State::GeneratedCoefficients {
|
||||
Err(FrostError::InvalidKeyGenTransition(State::GeneratedCoefficients, self.state))?;
|
||||
}
|
||||
@@ -445,7 +366,7 @@ impl<C: Curve> StateMachine<C> {
|
||||
&self.context,
|
||||
self.coefficients.take().unwrap(),
|
||||
self.our_commitments.take().unwrap(),
|
||||
&commitments,
|
||||
commitments,
|
||||
)?;
|
||||
|
||||
self.secret = Some(secret);
|
||||
@@ -462,8 +383,8 @@ impl<C: Curve> StateMachine<C> {
|
||||
/// wait for all participants to report as such
|
||||
pub fn complete(
|
||||
&mut self,
|
||||
shares: Vec<Vec<u8>>,
|
||||
) -> Result<MultisigKeys<C>, FrostError> {
|
||||
shares: HashMap<u16, Vec<u8>>,
|
||||
) -> Result<MultisigKeys<C>, FrostError> {
|
||||
if self.state != State::GeneratedSecretShares {
|
||||
Err(FrostError::InvalidKeyGenTransition(State::GeneratedSecretShares, self.state))?;
|
||||
}
|
||||
@@ -471,7 +392,7 @@ impl<C: Curve> StateMachine<C> {
|
||||
let keys = complete_r2(
|
||||
self.params,
|
||||
self.secret.take().unwrap(),
|
||||
&self.commitments.take().unwrap(),
|
||||
self.commitments.take().unwrap(),
|
||||
shares,
|
||||
)?;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user