mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-09 12:49:23 +00:00
Merge branch 'develop' into next
This resolves the conflicts and gets the workspace `Cargo.toml`s to not be invalid. It doesn't actually get clippy to pass again yet. Does move `crypto/dkg/src/evrf` into a new `crypto/dkg/evrf` crate (which does not yet compile).
This commit is contained in:
68
crypto/dkg/evrf/Cargo.toml
Normal file
68
crypto/dkg/evrf/Cargo.toml
Normal file
@@ -0,0 +1,68 @@
|
||||
[package]
|
||||
name = "dkg-evrf"
|
||||
version = "0.1.0"
|
||||
description = "Distributed key generation over ff/group"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/evrf"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
|
||||
edition = "2021"
|
||||
rust-version = "1.81"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
thiserror = { version = "2", default-features = false }
|
||||
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
|
||||
transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["recommended"] }
|
||||
|
||||
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false }
|
||||
multiexp = { path = "../../multiexp", version = "0.4", default-features = false }
|
||||
|
||||
generic-array = { version = "1", default-features = false, features = ["alloc"] }
|
||||
blake2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
rand_chacha = { version = "0.3", default-features = false, features = ["std"] }
|
||||
generalized-bulletproofs = { git = "https://github.com/kayabaNerve/monero-oxide", rev = "b6dd1a9ff7ac6b96eb7cb488a4501fd1f6f2dd1e", default-features = false }
|
||||
ec-divisors = { git = "https://github.com/kayabaNerve/monero-oxide", rev = "b6dd1a9ff7ac6b96eb7cb488a4501fd1f6f2dd1e", default-features = false }
|
||||
generalized-bulletproofs-circuit-abstraction = { git = "https://github.com/kayabaNerve/monero-oxide", rev = "b6dd1a9ff7ac6b96eb7cb488a4501fd1f6f2dd1e" }
|
||||
generalized-bulletproofs-ec-gadgets = { git = "https://github.com/kayabaNerve/monero-oxide", rev = "b6dd1a9ff7ac6b96eb7cb488a4501fd1f6f2dd1e" }
|
||||
|
||||
dkg = { path = ".." }
|
||||
|
||||
secq256k1 = { path = "../../evrf/secq256k1", optional = true }
|
||||
embedwards25519 = { path = "../../evrf/embedwards25519", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
|
||||
rand = { version = "0.8", default-features = false, features = ["std"] }
|
||||
ciphersuite = { path = "../../ciphersuite", default-features = false, features = ["std"] }
|
||||
dalek-ff-group = { path = "../../dalek-ff-group", default-features = false, features = ["std"] }
|
||||
generalized-bulletproofs = { git = "https://github.com/kayabaNerve/monero-oxide", rev = "b6dd1a9ff7ac6b96eb7cb488a4501fd1f6f2dd1e", features = ["tests"] }
|
||||
ec-divisors = { git = "https://github.com/kayabaNerve/monero-oxide", rev = "b6dd1a9ff7ac6b96eb7cb488a4501fd1f6f2dd1e" }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"thiserror/std",
|
||||
|
||||
"rand_core/std",
|
||||
|
||||
"transcript/std",
|
||||
|
||||
"ciphersuite/std",
|
||||
"multiexp/std",
|
||||
"multiexp/batch",
|
||||
]
|
||||
secp256k1 = ["secq256k1"]
|
||||
ed25519 = ["embedwards25519"]
|
||||
ristretto = ["embedwards25519"]
|
||||
tests = ["rand_core/getrandom"]
|
||||
default = ["std"]
|
||||
585
crypto/dkg/evrf/src/lib.rs
Normal file
585
crypto/dkg/evrf/src/lib.rs
Normal file
@@ -0,0 +1,585 @@
|
||||
/*
|
||||
We implement a DKG using an eVRF, as detailed in the eVRF paper. For the eVRF itself, we do not
|
||||
use a Paillier-based construction, nor the detailed construction premised on a Bulletproof.
|
||||
|
||||
For reference, the detailed construction premised on a Bulletproof involves two curves, notated
|
||||
here as `C` and `E`, where the scalar field of `C` is the field of `E`. Accordingly, Bulletproofs
|
||||
over `C` can efficiently perform group operations of points of curve `E`. Each participant has a
|
||||
private point (`P_i`) on curve `E` committed to over curve `C`. The eVRF selects a pair of
|
||||
scalars `a, b`, where the participant proves in-Bulletproof the points `A_i, B_i` are
|
||||
`a * P_i, b * P_i`. The eVRF proceeds to commit to `A_i.x + B_i.x` in a Pedersen Commitment.
|
||||
|
||||
Our eVRF uses
|
||||
[Generalized Bulletproofs](
|
||||
https://repo.getmonero.org/monero-project/ccs-proposals
|
||||
/uploads/a9baa50c38c6312efc0fea5c6a188bb9/gbp.pdf
|
||||
).
|
||||
This allows us much larger witnesses without growing the reference string, and enables us to
|
||||
efficiently sample challenges off in-circuit variables (via placing the variables in a vector
|
||||
commitment, then challenging from a transcript of the commitments). We proceed to use
|
||||
[elliptic curve divisors](
|
||||
https://repo.getmonero.org/-/project/54/
|
||||
uploads/eb1bf5b4d4855a3480c38abf895bd8e8/Veridise_Divisor_Proofs.pdf
|
||||
)
|
||||
(which require the ability to sample a challenge off in-circuit variables) to prove discrete
|
||||
logarithms efficiently.
|
||||
|
||||
This is done via having a private scalar (`p_i`) on curve `E`, not a private point, and
|
||||
publishing the public key for it (`P_i = p_i * G`, where `G` is a generator of `E`). The eVRF
|
||||
samples two points with unknown discrete logarithms `A, B`, and the circuit proves a Pedersen
|
||||
Commitment commits to `(p_i * A).x + (p_i * B).x`.
|
||||
|
||||
With the eVRF established, we now detail our other novel aspect. The eVRF paper expects secret
|
||||
shares to be sent to the other parties yet does not detail a precise way to do so. If we
|
||||
encrypted the secret shares with some stream cipher, each recipient would have to attest validity
|
||||
or accuse the sender of impropriety. We want an encryption scheme where anyone can verify the
|
||||
secret shares were encrypted properly, without additional info, efficiently.
|
||||
|
||||
Please note from the published commitments, it's possible to calculcate a commitment to the
|
||||
secret share each party should receive (`V_i`).
|
||||
|
||||
We have the sender sample two scalars per recipient, denoted `x_i, y_i` (where `i` is the
|
||||
recipient index). They perform the eVRF to prove a Pedersen Commitment commits to
|
||||
`z_i = (x_i * P_i).x + (y_i * P_i).x` and `x_i, y_i` are the discrete logarithms of `X_i, Y_i`
|
||||
over `G`. They then publish the encrypted share `s_i + z_i` and `X_i, Y_i`.
|
||||
|
||||
The recipient is able to decrypt the share via calculating
|
||||
`s_i - ((p_i * X_i).x + (p_i * Y_i).x)`.
|
||||
|
||||
To verify the secret share, we have the `F` terms of the Pedersen Commitments revealed (where
|
||||
`F, H` are generators of `C`, `F` is used for binding and `H` for blinding). This already needs
|
||||
to be done for the eVRF outputs used within the DKG, in order to obtain thecommitments to the
|
||||
coefficients. When we have the commitment `Z_i = ((p_i * A).x + (p_i * B).x) * F`, we simply
|
||||
check `s_i * F = Z_i + V_i`.
|
||||
|
||||
In order to open the Pedersen Commitments to their `F` terms, we transcript the commitments and
|
||||
the claimed openings, then assign random weights to each pair of `(commitment, opening). The
|
||||
prover proves knowledge of the discrete logarithm of the sum weighted commitments, minus the sum
|
||||
sum weighted openings, over `H`.
|
||||
|
||||
The benefit to this construction is that given an broadcast channel which is reliable and
|
||||
ordered, only `t` messages must be broadcast from honest parties in order to create a `t`-of-`n`
|
||||
multisig. If the encrypted secret shares were not verifiable, one would need at least `t + n`
|
||||
messages to ensure every participant has a correct dealing and can participate in future
|
||||
reconstructions of the secret. This would also require all `n` parties be online, whereas this is
|
||||
robust to threshold `t`.
|
||||
*/
|
||||
|
||||
use core::ops::Deref;
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
collections::{HashSet, HashMap},
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
|
||||
use blake2::{Digest, Blake2s256};
|
||||
use ciphersuite::{
|
||||
group::{
|
||||
ff::{Field, PrimeField},
|
||||
Group, GroupEncoding,
|
||||
},
|
||||
Ciphersuite,
|
||||
};
|
||||
use multiexp::multiexp_vartime;
|
||||
|
||||
use generalized_bulletproofs::{Generators, arithmetic_circuit_proof::*};
|
||||
use ec_divisors::DivisorCurve;
|
||||
|
||||
use dkg::{Participant, ThresholdParams, Interpolation, ThresholdKeys};
|
||||
|
||||
pub(crate) mod proof;
|
||||
use proof::*;
|
||||
pub use proof::{EvrfCurve, EvrfGenerators};
|
||||
|
||||
/// Participation in the DKG.
|
||||
///
|
||||
/// `Participation` is meant to be broadcast to all other participants over an authenticated,
|
||||
/// reliable broadcast channel.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Participation<C: Ciphersuite> {
|
||||
proof: Vec<u8>,
|
||||
encrypted_secret_shares: HashMap<Participant, C::F>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Participation<C> {
|
||||
pub fn read<R: Read>(reader: &mut R, n: u16) -> io::Result<Self> {
|
||||
// TODO: Replace `len` with some calculation deterministic to the params
|
||||
let mut len = [0; 4];
|
||||
reader.read_exact(&mut len)?;
|
||||
let len = usize::try_from(u32::from_le_bytes(len)).expect("<32-bit platform?");
|
||||
|
||||
// Don't allocate a buffer for the claimed length
|
||||
// Read chunks until we reach the claimed length
|
||||
// This means if we were told to read GB, we must actually be sent GB before allocating as such
|
||||
const CHUNK_SIZE: usize = 1024;
|
||||
let mut proof = Vec::with_capacity(len.min(CHUNK_SIZE));
|
||||
while proof.len() < len {
|
||||
let next_chunk = (len - proof.len()).min(CHUNK_SIZE);
|
||||
let old_proof_len = proof.len();
|
||||
proof.resize(old_proof_len + next_chunk, 0);
|
||||
reader.read_exact(&mut proof[old_proof_len ..])?;
|
||||
}
|
||||
|
||||
let mut encrypted_secret_shares = HashMap::with_capacity(usize::from(n));
|
||||
for i in (1 ..= n).map(Participant) {
|
||||
encrypted_secret_shares.insert(i, C::read_F(reader)?);
|
||||
}
|
||||
|
||||
Ok(Self { proof, encrypted_secret_shares })
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(&u32::try_from(self.proof.len()).unwrap().to_le_bytes())?;
|
||||
writer.write_all(&self.proof)?;
|
||||
for i in (1 ..= u16::try_from(self.encrypted_secret_shares.len())
|
||||
.expect("writing a Participation which has a n > u16::MAX"))
|
||||
.map(Participant)
|
||||
{
|
||||
writer.write_all(self.encrypted_secret_shares[&i].to_repr().as_ref())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn polynomial<F: PrimeField + Zeroize>(
|
||||
coefficients: &[Zeroizing<F>],
|
||||
l: Participant,
|
||||
) -> Zeroizing<F> {
|
||||
let l = F::from(u64::from(u16::from(l)));
|
||||
// This should never be reached since Participant is explicitly non-zero
|
||||
assert!(l != F::ZERO, "zero participant passed to polynomial");
|
||||
let mut share = Zeroizing::new(F::ZERO);
|
||||
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
|
||||
*share += coefficient.deref();
|
||||
if idx != (coefficients.len() - 1) {
|
||||
*share *= l;
|
||||
}
|
||||
}
|
||||
share
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn share_verification_statements<C: Ciphersuite>(
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
commitments: &[C::G],
|
||||
n: u16,
|
||||
encryption_commitments: &[C::G],
|
||||
encrypted_secret_shares: &HashMap<Participant, C::F>,
|
||||
) -> (C::F, Vec<(C::F, C::G)>) {
|
||||
debug_assert_eq!(usize::from(n), encryption_commitments.len());
|
||||
debug_assert_eq!(usize::from(n), encrypted_secret_shares.len());
|
||||
|
||||
let mut g_scalar = C::F::ZERO;
|
||||
let mut pairs = Vec::with_capacity(commitments.len() + encryption_commitments.len());
|
||||
for commitment in commitments {
|
||||
pairs.push((C::F::ZERO, *commitment));
|
||||
}
|
||||
|
||||
let mut weight;
|
||||
for (i, enc_share) in encrypted_secret_shares {
|
||||
let enc_commitment = encryption_commitments[usize::from(u16::from(*i)) - 1];
|
||||
|
||||
weight = C::F::random(&mut *rng);
|
||||
|
||||
// s_i F
|
||||
g_scalar += weight * enc_share;
|
||||
// - Z_i
|
||||
let weight = -weight;
|
||||
pairs.push((weight, enc_commitment));
|
||||
// - V_i
|
||||
{
|
||||
let i = C::F::from(u64::from(u16::from(*i)));
|
||||
// The first `commitments.len()` pairs are for the commitments
|
||||
(0 .. commitments.len()).fold(weight, |exp, j| {
|
||||
pairs[j].0 += exp;
|
||||
exp * i
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
(g_scalar, pairs)
|
||||
}
|
||||
|
||||
/// Errors from the eVRF DKG.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
|
||||
pub enum EvrfError {
|
||||
#[error("n, the amount of participants, exceeded a u16")]
|
||||
TooManyParticipants,
|
||||
#[error("the threshold t wasn't in range 1 <= t <= n")]
|
||||
InvalidThreshold,
|
||||
#[error("a public key was the identity point")]
|
||||
PublicKeyWasIdentity,
|
||||
#[error("participating in a DKG we aren't a participant in")]
|
||||
NotAParticipant,
|
||||
#[error("a participant with an unrecognized ID participated")]
|
||||
NonExistentParticipant,
|
||||
#[error("the passed in generators did not have enough generators for this DKG")]
|
||||
NotEnoughGenerators,
|
||||
}
|
||||
|
||||
/// The result of calling EvrfDkg::verify.
|
||||
pub enum VerifyResult<C: EvrfCurve> {
|
||||
Valid(EvrfDkg<C>),
|
||||
Invalid(Vec<Participant>),
|
||||
NotEnoughParticipants,
|
||||
}
|
||||
|
||||
/// Struct to perform/verify the DKG with.
|
||||
#[derive(Debug)]
|
||||
pub struct EvrfDkg<C: EvrfCurve> {
|
||||
t: u16,
|
||||
n: u16,
|
||||
evrf_public_keys: Vec<<C::EmbeddedCurve as Ciphersuite>::G>,
|
||||
group_key: C::G,
|
||||
verification_shares: HashMap<Participant, C::G>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
encrypted_secret_shares:
|
||||
HashMap<Participant, HashMap<Participant, ([<C::EmbeddedCurve as Ciphersuite>::G; 2], C::F)>>,
|
||||
}
|
||||
|
||||
impl<C: EvrfCurve> EvrfDkg<C> {
|
||||
// Form the initial transcript for the proofs.
|
||||
fn initial_transcript(
|
||||
invocation: [u8; 32],
|
||||
evrf_public_keys: &[<C::EmbeddedCurve as Ciphersuite>::G],
|
||||
t: u16,
|
||||
) -> [u8; 32] {
|
||||
let mut transcript = Blake2s256::new();
|
||||
transcript.update(invocation);
|
||||
for key in evrf_public_keys {
|
||||
transcript.update(key.to_bytes().as_ref());
|
||||
}
|
||||
transcript.update(t.to_le_bytes());
|
||||
transcript.finalize().into()
|
||||
}
|
||||
|
||||
/// Participate in performing the DKG for the specified parameters.
|
||||
///
|
||||
/// The context MUST be unique across invocations. Reuse of context will lead to sharing
|
||||
/// prior-shared secrets.
|
||||
///
|
||||
/// Public keys are not allowed to be the identity point. This will error if any are.
|
||||
pub fn participate(
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
generators: &EvrfGenerators<C>,
|
||||
context: [u8; 32],
|
||||
t: u16,
|
||||
evrf_public_keys: &[<C::EmbeddedCurve as Ciphersuite>::G],
|
||||
evrf_private_key: &Zeroizing<<C::EmbeddedCurve as Ciphersuite>::F>,
|
||||
) -> Result<Participation<C>, EvrfError> {
|
||||
let Ok(n) = u16::try_from(evrf_public_keys.len()) else { Err(EvrfError::TooManyParticipants)? };
|
||||
if (t == 0) || (t > n) {
|
||||
Err(EvrfError::InvalidThreshold)?;
|
||||
}
|
||||
if evrf_public_keys.iter().any(|key| bool::from(key.is_identity())) {
|
||||
Err(EvrfError::PublicKeyWasIdentity)?;
|
||||
};
|
||||
// This also checks the private key is not 0
|
||||
let evrf_public_key = <C::EmbeddedCurve as Ciphersuite>::generator() * evrf_private_key.deref();
|
||||
if !evrf_public_keys.iter().any(|key| *key == evrf_public_key) {
|
||||
Err(EvrfError::NotAParticipant)?;
|
||||
};
|
||||
|
||||
let transcript = Self::initial_transcript(context, evrf_public_keys, t);
|
||||
// Further bind to the participant index so each index gets unique generators
|
||||
// This allows reusing eVRF public keys as the prover
|
||||
let mut per_proof_transcript = Blake2s256::new();
|
||||
per_proof_transcript.update(transcript);
|
||||
per_proof_transcript.update(evrf_public_key.to_bytes());
|
||||
|
||||
// The above transcript is expected to be binding to all arguments here
|
||||
// The generators are constant to this ciphersuite's generator, and the parameters are
|
||||
// transcripted
|
||||
let EvrfProveResult { coefficients, encryption_masks, proof } = match Evrf::prove(
|
||||
rng,
|
||||
&generators.0,
|
||||
per_proof_transcript.finalize().into(),
|
||||
usize::from(t),
|
||||
evrf_public_keys,
|
||||
evrf_private_key,
|
||||
) {
|
||||
Ok(res) => res,
|
||||
Err(AcError::NotEnoughGenerators) => Err(EvrfError::NotEnoughGenerators)?,
|
||||
Err(
|
||||
AcError::DifferingLrLengths |
|
||||
AcError::InconsistentAmountOfConstraints |
|
||||
AcError::ConstrainedNonExistentTerm |
|
||||
AcError::ConstrainedNonExistentCommitment |
|
||||
AcError::InconsistentWitness |
|
||||
AcError::Ip(_) |
|
||||
AcError::IncompleteProof,
|
||||
) => {
|
||||
panic!("failed to prove for the eVRF proof")
|
||||
}
|
||||
};
|
||||
|
||||
let mut encrypted_secret_shares = HashMap::with_capacity(usize::from(n));
|
||||
for (l, encryption_mask) in (1 ..= n).map(Participant).zip(encryption_masks) {
|
||||
let share = polynomial::<C::F>(&coefficients, l);
|
||||
encrypted_secret_shares.insert(l, *share + *encryption_mask);
|
||||
}
|
||||
|
||||
Ok(Participation { proof, encrypted_secret_shares })
|
||||
}
|
||||
|
||||
/// Check if a batch of `Participation`s are valid.
|
||||
///
|
||||
/// If any `Participation` is invalid, the list of all invalid participants will be returned.
|
||||
/// If all `Participation`s are valid and there's at least `t`, an instance of this struct
|
||||
/// (usable to obtain a threshold share of generated key) is returned. If all are valid and
|
||||
/// there's not at least `t`, `VerifyResult::NotEnoughParticipants` is returned.
|
||||
///
|
||||
/// This DKG is unbiased if all `n` people participate. This DKG is biased if only a threshold
|
||||
/// participate.
|
||||
pub fn verify(
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
generators: &EvrfGenerators<C>,
|
||||
context: [u8; 32],
|
||||
t: u16,
|
||||
evrf_public_keys: &[<C::EmbeddedCurve as Ciphersuite>::G],
|
||||
participations: &HashMap<Participant, Participation<C>>,
|
||||
) -> Result<VerifyResult<C>, EvrfError> {
|
||||
let Ok(n) = u16::try_from(evrf_public_keys.len()) else { Err(EvrfError::TooManyParticipants)? };
|
||||
if (t == 0) || (t > n) {
|
||||
Err(EvrfError::InvalidThreshold)?;
|
||||
}
|
||||
if evrf_public_keys.iter().any(|key| bool::from(key.is_identity())) {
|
||||
Err(EvrfError::PublicKeyWasIdentity)?;
|
||||
};
|
||||
for i in participations.keys() {
|
||||
if u16::from(*i) > n {
|
||||
Err(EvrfError::NonExistentParticipant)?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut valid = HashMap::with_capacity(participations.len());
|
||||
let mut faulty = HashSet::new();
|
||||
|
||||
let transcript = Self::initial_transcript(context, evrf_public_keys, t);
|
||||
|
||||
let mut evrf_verifier = Generators::batch_verifier();
|
||||
for (i, participation) in participations {
|
||||
let evrf_public_key = evrf_public_keys[usize::from(u16::from(*i)) - 1];
|
||||
|
||||
let mut per_proof_transcript = Blake2s256::new();
|
||||
per_proof_transcript.update(transcript);
|
||||
per_proof_transcript.update(evrf_public_key.to_bytes());
|
||||
|
||||
// Clone the verifier so if this proof is faulty, it doesn't corrupt the verifier
|
||||
let mut verifier_clone = evrf_verifier.clone();
|
||||
let Ok(data) = Evrf::<C>::verify(
|
||||
rng,
|
||||
&generators.0,
|
||||
&mut verifier_clone,
|
||||
per_proof_transcript.finalize().into(),
|
||||
usize::from(t),
|
||||
evrf_public_keys,
|
||||
evrf_public_key,
|
||||
&participation.proof,
|
||||
) else {
|
||||
faulty.insert(*i);
|
||||
continue;
|
||||
};
|
||||
evrf_verifier = verifier_clone;
|
||||
|
||||
valid.insert(*i, (participation.encrypted_secret_shares.clone(), data));
|
||||
}
|
||||
debug_assert_eq!(valid.len() + faulty.len(), participations.len());
|
||||
|
||||
// Perform the batch verification of the eVRFs
|
||||
if !generators.0.verify(evrf_verifier) {
|
||||
// If the batch failed, verify them each individually
|
||||
for (i, participation) in participations {
|
||||
if faulty.contains(i) {
|
||||
continue;
|
||||
}
|
||||
let mut evrf_verifier = Generators::batch_verifier();
|
||||
Evrf::<C>::verify(
|
||||
rng,
|
||||
&generators.0,
|
||||
&mut evrf_verifier,
|
||||
context,
|
||||
usize::from(t),
|
||||
evrf_public_keys,
|
||||
evrf_public_keys[usize::from(u16::from(*i)) - 1],
|
||||
&participation.proof,
|
||||
)
|
||||
.expect("evrf failed basic checks yet prover wasn't prior marked faulty");
|
||||
if !generators.0.verify(evrf_verifier) {
|
||||
valid.remove(i);
|
||||
faulty.insert(*i);
|
||||
}
|
||||
}
|
||||
}
|
||||
debug_assert_eq!(valid.len() + faulty.len(), participations.len());
|
||||
|
||||
// Perform the batch verification of the shares
|
||||
let mut sum_encrypted_secret_shares = HashMap::with_capacity(usize::from(n));
|
||||
let mut sum_masks = HashMap::with_capacity(usize::from(n));
|
||||
let mut all_encrypted_secret_shares = HashMap::with_capacity(usize::from(t));
|
||||
{
|
||||
let mut share_verification_statements_actual = HashMap::with_capacity(valid.len());
|
||||
if !{
|
||||
let mut g_scalar = C::F::ZERO;
|
||||
let mut pairs = Vec::with_capacity(valid.len() * (usize::from(t) + evrf_public_keys.len()));
|
||||
for (i, (encrypted_secret_shares, data)) in &valid {
|
||||
let (this_g_scalar, mut these_pairs) = share_verification_statements::<C>(
|
||||
&mut *rng,
|
||||
&data.coefficients,
|
||||
evrf_public_keys
|
||||
.len()
|
||||
.try_into()
|
||||
.expect("n prior checked to be <= u16::MAX couldn't be converted to a u16"),
|
||||
&data.encryption_commitments,
|
||||
encrypted_secret_shares,
|
||||
);
|
||||
// Queue this into our batch
|
||||
g_scalar += this_g_scalar;
|
||||
pairs.extend(&these_pairs);
|
||||
|
||||
// Also push this g_scalar onto these_pairs so these_pairs can be verified individually
|
||||
// upon error
|
||||
these_pairs.push((this_g_scalar, generators.0.g()));
|
||||
share_verification_statements_actual.insert(*i, these_pairs);
|
||||
|
||||
// Also format this data as we'd need it upon success
|
||||
let mut formatted_encrypted_secret_shares = HashMap::with_capacity(usize::from(n));
|
||||
for (j, enc_share) in encrypted_secret_shares {
|
||||
/*
|
||||
We calculcate verification shares as the sum of the encrypted scalars, minus their
|
||||
masks. This only does one scalar multiplication, and `1+t` point additions (with
|
||||
one negation), and is accordingly much cheaper than interpolating the commitments.
|
||||
This is only possible because already interpolated the commitments to verify the
|
||||
encrypted secret share.
|
||||
*/
|
||||
let sum_encrypted_secret_share =
|
||||
sum_encrypted_secret_shares.get(j).copied().unwrap_or(C::F::ZERO);
|
||||
let sum_mask = sum_masks.get(j).copied().unwrap_or(C::G::identity());
|
||||
sum_encrypted_secret_shares.insert(*j, sum_encrypted_secret_share + enc_share);
|
||||
|
||||
let j_index = usize::from(u16::from(*j)) - 1;
|
||||
sum_masks.insert(*j, sum_mask + data.encryption_commitments[j_index]);
|
||||
|
||||
formatted_encrypted_secret_shares.insert(*j, (data.ecdh_keys[j_index], *enc_share));
|
||||
}
|
||||
all_encrypted_secret_shares.insert(*i, formatted_encrypted_secret_shares);
|
||||
}
|
||||
pairs.push((g_scalar, generators.0.g()));
|
||||
bool::from(multiexp_vartime(&pairs).is_identity())
|
||||
} {
|
||||
// If the batch failed, verify them each individually
|
||||
for (i, pairs) in share_verification_statements_actual {
|
||||
if !bool::from(multiexp_vartime(&pairs).is_identity()) {
|
||||
valid.remove(&i);
|
||||
faulty.insert(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
debug_assert_eq!(valid.len() + faulty.len(), participations.len());
|
||||
|
||||
let mut faulty = faulty.into_iter().collect::<Vec<_>>();
|
||||
if !faulty.is_empty() {
|
||||
faulty.sort_unstable();
|
||||
return Ok(VerifyResult::Invalid(faulty));
|
||||
}
|
||||
|
||||
// We check at least t key shares of people have participated in contributing entropy
|
||||
// Since the key shares of the participants exceed t, meaning if they're malicious they can
|
||||
// reconstruct the key regardless, this is safe to the threshold
|
||||
{
|
||||
let mut participating_weight = 0;
|
||||
let mut evrf_public_keys_mut = evrf_public_keys.to_vec();
|
||||
for i in valid.keys() {
|
||||
let evrf_public_key = evrf_public_keys[usize::from(u16::from(*i)) - 1];
|
||||
|
||||
// Remove this key from the Vec to prevent double-counting
|
||||
/*
|
||||
Double-counting would be a risk if multiple participants shared an eVRF public key and
|
||||
participated. This code does still allow such participants (in order to let participants
|
||||
be weighted), and any one of them participating will count as all participating. This is
|
||||
fine as any one such participant will be able to decrypt the shares for themselves and
|
||||
all other participants, so this is still a key generated by an amount of participants who
|
||||
could simply reconstruct the key.
|
||||
*/
|
||||
let start_len = evrf_public_keys_mut.len();
|
||||
evrf_public_keys_mut.retain(|key| *key != evrf_public_key);
|
||||
let end_len = evrf_public_keys_mut.len();
|
||||
let count = start_len - end_len;
|
||||
|
||||
participating_weight += count;
|
||||
}
|
||||
if participating_weight < usize::from(t) {
|
||||
return Ok(VerifyResult::NotEnoughParticipants);
|
||||
}
|
||||
}
|
||||
|
||||
// If we now have >= t participations, calculate the group key and verification shares
|
||||
|
||||
// The group key is the sum of the zero coefficients
|
||||
let group_key = valid.values().map(|(_, evrf_data)| evrf_data.coefficients[0]).sum::<C::G>();
|
||||
|
||||
// Calculate each user's verification share
|
||||
let mut verification_shares = HashMap::with_capacity(usize::from(n));
|
||||
for i in (1 ..= n).map(Participant) {
|
||||
verification_shares
|
||||
.insert(i, (C::generator() * sum_encrypted_secret_shares[&i]) - sum_masks[&i]);
|
||||
}
|
||||
|
||||
Ok(VerifyResult::Valid(EvrfDkg {
|
||||
t,
|
||||
n,
|
||||
evrf_public_keys: evrf_public_keys.to_vec(),
|
||||
group_key,
|
||||
verification_shares,
|
||||
encrypted_secret_shares: all_encrypted_secret_shares,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn keys(
|
||||
&self,
|
||||
evrf_private_key: &Zeroizing<<C::EmbeddedCurve as Ciphersuite>::F>,
|
||||
) -> Vec<ThresholdKeys<C>> {
|
||||
let evrf_public_key = <C::EmbeddedCurve as Ciphersuite>::generator() * evrf_private_key.deref();
|
||||
let mut is = Vec::with_capacity(1);
|
||||
for (i, evrf_key) in self.evrf_public_keys.iter().enumerate() {
|
||||
if *evrf_key == evrf_public_key {
|
||||
let i = u16::try_from(i).expect("n <= u16::MAX yet i > u16::MAX?");
|
||||
let i = Participant(1 + i);
|
||||
is.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
let mut res = Vec::with_capacity(is.len());
|
||||
for i in is {
|
||||
let mut secret_share = Zeroizing::new(C::F::ZERO);
|
||||
for shares in self.encrypted_secret_shares.values() {
|
||||
let (ecdh_keys, enc_share) = shares[&i];
|
||||
|
||||
let mut ecdh = Zeroizing::new(C::F::ZERO);
|
||||
for point in ecdh_keys {
|
||||
let (mut x, mut y) =
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::to_xy(point * evrf_private_key.deref()).unwrap();
|
||||
*ecdh += x;
|
||||
x.zeroize();
|
||||
y.zeroize();
|
||||
}
|
||||
*secret_share += enc_share - ecdh.deref();
|
||||
}
|
||||
|
||||
debug_assert_eq!(self.verification_shares[&i], C::generator() * secret_share.deref());
|
||||
|
||||
res.push(ThresholdKeys::from(ThresholdCore {
|
||||
params: ThresholdParams::new(self.t, self.n, i).unwrap(),
|
||||
interpolation: Interpolation::Lagrange,
|
||||
secret_share,
|
||||
group_key: self.group_key,
|
||||
verification_shares: self.verification_shares.clone(),
|
||||
}));
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
690
crypto/dkg/evrf/src/proof.rs
Normal file
690
crypto/dkg/evrf/src/proof.rs
Normal file
@@ -0,0 +1,690 @@
|
||||
use core::{marker::PhantomData, ops::Deref, fmt};
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use generic_array::{typenum::Unsigned, ArrayLength, GenericArray};
|
||||
|
||||
use blake2::{Digest, Blake2s256};
|
||||
use ciphersuite::{
|
||||
group::{ff::Field, Group, GroupEncoding},
|
||||
Ciphersuite,
|
||||
};
|
||||
|
||||
use generalized_bulletproofs::{
|
||||
*,
|
||||
transcript::{Transcript as ProverTranscript, VerifierTranscript},
|
||||
arithmetic_circuit_proof::*,
|
||||
};
|
||||
use generalized_bulletproofs_circuit_abstraction::*;
|
||||
|
||||
use ec_divisors::{DivisorCurve, ScalarDecomposition};
|
||||
use generalized_bulletproofs_ec_gadgets::*;
|
||||
|
||||
/// A pair of curves to perform the eVRF with.
|
||||
pub trait EvrfCurve: Ciphersuite {
|
||||
type EmbeddedCurve: Ciphersuite<G: DivisorCurve<FieldElement = <Self as Ciphersuite>::F>>;
|
||||
type EmbeddedCurveParameters: DiscreteLogParameters;
|
||||
}
|
||||
|
||||
#[cfg(feature = "evrf-secp256k1")]
|
||||
impl EvrfCurve for ciphersuite::Secp256k1 {
|
||||
type EmbeddedCurve = secq256k1::Secq256k1;
|
||||
type EmbeddedCurveParameters = secq256k1::Secq256k1;
|
||||
}
|
||||
|
||||
#[cfg(feature = "evrf-ed25519")]
|
||||
impl EvrfCurve for ciphersuite::Ed25519 {
|
||||
type EmbeddedCurve = embedwards25519::Embedwards25519;
|
||||
type EmbeddedCurveParameters = embedwards25519::Embedwards25519;
|
||||
}
|
||||
|
||||
#[cfg(feature = "evrf-ristretto")]
|
||||
impl EvrfCurve for ciphersuite::Ristretto {
|
||||
type EmbeddedCurve = embedwards25519::Embedwards25519;
|
||||
type EmbeddedCurveParameters = embedwards25519::Embedwards25519;
|
||||
}
|
||||
|
||||
fn sample_point<C: Ciphersuite>(rng: &mut (impl RngCore + CryptoRng)) -> C::G {
|
||||
let mut repr = <C::G as GroupEncoding>::Repr::default();
|
||||
loop {
|
||||
rng.fill_bytes(repr.as_mut());
|
||||
if let Ok(point) = C::read_G(&mut repr.as_ref()) {
|
||||
if bool::from(!point.is_identity()) {
|
||||
return point;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generators for eVRF proof.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct EvrfGenerators<C: EvrfCurve>(pub(crate) Generators<C>);
|
||||
|
||||
impl<C: EvrfCurve> EvrfGenerators<C> {
|
||||
/// Create a new set of generators.
|
||||
pub fn new(max_threshold: u16, max_participants: u16) -> EvrfGenerators<C> {
|
||||
let g = C::generator();
|
||||
let mut rng = ChaCha20Rng::from_seed(Blake2s256::digest(g.to_bytes()).into());
|
||||
let h = sample_point::<C>(&mut rng);
|
||||
let (_, generators) =
|
||||
Evrf::<C>::muls_and_generators_to_use(max_threshold.into(), max_participants.into());
|
||||
let mut g_bold = vec![];
|
||||
let mut h_bold = vec![];
|
||||
for _ in 0 .. generators {
|
||||
g_bold.push(sample_point::<C>(&mut rng));
|
||||
h_bold.push(sample_point::<C>(&mut rng));
|
||||
}
|
||||
Self(Generators::new(g, h, g_bold, h_bold).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of proving for an eVRF.
|
||||
pub(crate) struct EvrfProveResult<C: Ciphersuite> {
|
||||
/// The coefficients for use in the DKG.
|
||||
pub(crate) coefficients: Vec<Zeroizing<C::F>>,
|
||||
/// The masks to encrypt secret shares with.
|
||||
pub(crate) encryption_masks: Vec<Zeroizing<C::F>>,
|
||||
/// The proof itself.
|
||||
pub(crate) proof: Vec<u8>,
|
||||
}
|
||||
|
||||
/// The result of verifying an eVRF.
|
||||
pub(crate) struct EvrfVerifyResult<C: EvrfCurve> {
|
||||
/// The commitments to the coefficients for use in the DKG.
|
||||
pub(crate) coefficients: Vec<C::G>,
|
||||
/// The ephemeral public keys to perform ECDHs with
|
||||
pub(crate) ecdh_keys: Vec<[<C::EmbeddedCurve as Ciphersuite>::G; 2]>,
|
||||
/// The commitments to the masks used to encrypt secret shares with.
|
||||
pub(crate) encryption_commitments: Vec<C::G>,
|
||||
}
|
||||
|
||||
impl<C: EvrfCurve> fmt::Debug for EvrfVerifyResult<C> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt.debug_struct("EvrfVerifyResult").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
/// A struct to prove/verify eVRFs with.
|
||||
pub(crate) struct Evrf<C: EvrfCurve>(PhantomData<C>);
|
||||
impl<C: EvrfCurve> Evrf<C> {
|
||||
// Sample uniform points (via rejection-sampling) on the embedded elliptic curve
|
||||
fn transcript_to_points(
|
||||
seed: [u8; 32],
|
||||
coefficients: usize,
|
||||
) -> Vec<<C::EmbeddedCurve as Ciphersuite>::G> {
|
||||
// We need to do two Diffie-Hellman's per coefficient in order to achieve an unbiased result
|
||||
let quantity = 2 * coefficients;
|
||||
|
||||
let mut rng = ChaCha20Rng::from_seed(seed);
|
||||
let mut res = Vec::with_capacity(quantity);
|
||||
for _ in 0 .. quantity {
|
||||
res.push(sample_point::<C::EmbeddedCurve>(&mut rng));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Read a Variable from a theoretical vector commitment tape
|
||||
fn read_one_from_tape(generators_to_use: usize, start: &mut usize) -> Variable {
|
||||
// Each commitment has twice as many variables as generators in use
|
||||
let commitment = *start / generators_to_use;
|
||||
// The index will be less than the amount of generators in use, as half are left and half are
|
||||
// right
|
||||
let index = *start % generators_to_use;
|
||||
let res = Variable::CG { commitment, index };
|
||||
*start += 1;
|
||||
res
|
||||
}
|
||||
|
||||
/// Read a set of variables from a theoretical vector commitment tape
|
||||
fn read_from_tape<N: ArrayLength>(
|
||||
generators_to_use: usize,
|
||||
start: &mut usize,
|
||||
) -> GenericArray<Variable, N> {
|
||||
let mut buf = Vec::with_capacity(N::USIZE);
|
||||
for _ in 0 .. N::USIZE {
|
||||
buf.push(Self::read_one_from_tape(generators_to_use, start));
|
||||
}
|
||||
GenericArray::from_slice(&buf).clone()
|
||||
}
|
||||
|
||||
/// Read `PointWithDlog`s, which share a discrete logarithm, from the theoretical vector
|
||||
/// commitment tape.
|
||||
fn point_with_dlogs(
|
||||
start: &mut usize,
|
||||
quantity: usize,
|
||||
generators_to_use: usize,
|
||||
) -> Vec<PointWithDlog<C::EmbeddedCurveParameters>> {
|
||||
// We define a serialized tape of the discrete logarithm, then for each divisor/point, we push:
|
||||
// zero, x**i, y x**i, y, x_coord, y_coord
|
||||
// We then chunk that into vector commitments
|
||||
// Here, we take the assumed layout and generate the expected `Variable`s for this layout
|
||||
|
||||
let dlog = Self::read_from_tape(generators_to_use, start);
|
||||
|
||||
let mut res = Vec::with_capacity(quantity);
|
||||
let mut read_point_with_dlog = || {
|
||||
let zero = Self::read_one_from_tape(generators_to_use, start);
|
||||
let x_from_power_of_2 = Self::read_from_tape(generators_to_use, start);
|
||||
let yx = Self::read_from_tape(generators_to_use, start);
|
||||
let y = Self::read_one_from_tape(generators_to_use, start);
|
||||
let divisor = Divisor { zero, x_from_power_of_2, yx, y };
|
||||
|
||||
let point = (
|
||||
Self::read_one_from_tape(generators_to_use, start),
|
||||
Self::read_one_from_tape(generators_to_use, start),
|
||||
);
|
||||
|
||||
res.push(PointWithDlog { dlog: dlog.clone(), divisor, point });
|
||||
};
|
||||
|
||||
for _ in 0 .. quantity {
|
||||
read_point_with_dlog();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn muls_and_generators_to_use(coefficients: usize, ecdhs: usize) -> (usize, usize) {
|
||||
const MULS_PER_DH: usize = 7;
|
||||
// 1 DH to prove the discrete logarithm corresponds to the eVRF public key
|
||||
// 2 DHs per generated coefficient
|
||||
// 2 DHs per generated ECDH
|
||||
let expected_muls = MULS_PER_DH * (1 + (2 * coefficients) + (2 * 2 * ecdhs));
|
||||
let generators_to_use = {
|
||||
let mut padded_pow_of_2 = 1;
|
||||
while padded_pow_of_2 < expected_muls {
|
||||
padded_pow_of_2 <<= 1;
|
||||
}
|
||||
// This may as small as 16, which would create an excessive amount of vector commitments
|
||||
// We set a floor of 2048 rows for bandwidth reasons
|
||||
padded_pow_of_2.max(2048)
|
||||
};
|
||||
(expected_muls, generators_to_use)
|
||||
}
|
||||
|
||||
fn circuit(
|
||||
curve_spec: &CurveSpec<C::F>,
|
||||
evrf_public_key: (C::F, C::F),
|
||||
coefficients: usize,
|
||||
ecdh_commitments: &[[(C::F, C::F); 2]],
|
||||
generator_tables: &[&GeneratorTable<C::F, C::EmbeddedCurveParameters>],
|
||||
circuit: &mut Circuit<C>,
|
||||
transcript: &mut impl Transcript,
|
||||
) {
|
||||
let (expected_muls, generators_to_use) =
|
||||
Self::muls_and_generators_to_use(coefficients, ecdh_commitments.len());
|
||||
let (challenge, challenged_generators) =
|
||||
circuit.discrete_log_challenge(transcript, curve_spec, generator_tables);
|
||||
debug_assert_eq!(challenged_generators.len(), 1 + (2 * coefficients) + ecdh_commitments.len());
|
||||
|
||||
// The generators tables/challenged generators are expected to have the following layouts
|
||||
// G, coefficients * [A, B], ecdhs * [P]
|
||||
#[allow(non_snake_case)]
|
||||
let challenged_G = &challenged_generators[0];
|
||||
|
||||
// Execute the circuit for the coefficients
|
||||
let mut tape_pos = 0;
|
||||
{
|
||||
let mut point_with_dlogs =
|
||||
Self::point_with_dlogs(&mut tape_pos, 1 + (2 * coefficients), generators_to_use)
|
||||
.into_iter();
|
||||
|
||||
// Verify the discrete logarithm is in the fact the discrete logarithm of the eVRF public key
|
||||
let point = circuit.discrete_log(
|
||||
curve_spec,
|
||||
point_with_dlogs.next().unwrap(),
|
||||
&challenge,
|
||||
challenged_G,
|
||||
);
|
||||
circuit.equality(LinComb::from(point.x()), &LinComb::empty().constant(evrf_public_key.0));
|
||||
circuit.equality(LinComb::from(point.y()), &LinComb::empty().constant(evrf_public_key.1));
|
||||
|
||||
// Verify the DLog claims against the sampled points
|
||||
for (i, pair) in challenged_generators[1 ..].chunks(2).take(coefficients).enumerate() {
|
||||
let mut lincomb = LinComb::empty();
|
||||
debug_assert_eq!(pair.len(), 2);
|
||||
for challenged_generator in pair {
|
||||
let point = circuit.discrete_log(
|
||||
curve_spec,
|
||||
point_with_dlogs.next().unwrap(),
|
||||
&challenge,
|
||||
challenged_generator,
|
||||
);
|
||||
// For each point in this pair, add its x coordinate to a lincomb
|
||||
lincomb = lincomb.term(C::F::ONE, point.x());
|
||||
}
|
||||
// Constrain the sum of the two x coordinates to be equal to the value in the Pedersen
|
||||
// commitment
|
||||
circuit.equality(lincomb, &LinComb::from(Variable::V(i)));
|
||||
}
|
||||
debug_assert!(point_with_dlogs.next().is_none());
|
||||
}
|
||||
|
||||
// Now execute the circuit for the ECDHs
|
||||
let mut challenged_generators = challenged_generators.iter().skip(1 + (2 * coefficients));
|
||||
for (i, ecdh) in ecdh_commitments.iter().enumerate() {
|
||||
let challenged_generator = challenged_generators.next().unwrap();
|
||||
let mut lincomb = LinComb::empty();
|
||||
for ecdh in ecdh {
|
||||
let mut point_with_dlogs =
|
||||
Self::point_with_dlogs(&mut tape_pos, 2, generators_to_use).into_iter();
|
||||
|
||||
// One proof of the ECDH secret * G for the commitment published
|
||||
let point = circuit.discrete_log(
|
||||
curve_spec,
|
||||
point_with_dlogs.next().unwrap(),
|
||||
&challenge,
|
||||
challenged_G,
|
||||
);
|
||||
circuit.equality(LinComb::from(point.x()), &LinComb::empty().constant(ecdh.0));
|
||||
circuit.equality(LinComb::from(point.y()), &LinComb::empty().constant(ecdh.1));
|
||||
|
||||
// One proof of the ECDH secret * P for the ECDH
|
||||
let point = circuit.discrete_log(
|
||||
curve_spec,
|
||||
point_with_dlogs.next().unwrap(),
|
||||
&challenge,
|
||||
challenged_generator,
|
||||
);
|
||||
// For each point in this pair, add its x coordinate to a lincomb
|
||||
lincomb = lincomb.term(C::F::ONE, point.x());
|
||||
}
|
||||
|
||||
// Constrain the sum of the two x coordinates to be equal to the value in the Pedersen
|
||||
// commitment
|
||||
circuit.equality(lincomb, &LinComb::from(Variable::V(coefficients + i)));
|
||||
}
|
||||
|
||||
debug_assert_eq!(expected_muls, circuit.muls());
|
||||
debug_assert!(challenged_generators.next().is_none());
|
||||
}
|
||||
|
||||
/// Prove a point on an elliptic curve had its discrete logarithm generated via an eVRF.
|
||||
pub(crate) fn prove(
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
generators: &Generators<C>,
|
||||
transcript: [u8; 32],
|
||||
coefficients: usize,
|
||||
ecdh_public_keys: &[<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G],
|
||||
evrf_private_key: &Zeroizing<<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::F>,
|
||||
) -> Result<EvrfProveResult<C>, AcError> {
|
||||
let curve_spec = CurveSpec {
|
||||
a: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::a(),
|
||||
b: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::b(),
|
||||
};
|
||||
|
||||
// A tape of the discrete logarithm, then [zero, x**i, y x**i, y, x_coord, y_coord]
|
||||
let mut vector_commitment_tape = vec![];
|
||||
|
||||
let mut generator_tables = Vec::with_capacity(1 + (2 * coefficients) + ecdh_public_keys.len());
|
||||
|
||||
// A function to calculate a divisor and push it onto the tape
|
||||
// This defines a vec, divisor_points, outside of the fn to reuse its allocation
|
||||
let mut divisor =
|
||||
|vector_commitment_tape: &mut Vec<_>,
|
||||
dlog: &ScalarDecomposition<<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::F>,
|
||||
push_generator: bool,
|
||||
generator: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G,
|
||||
dh: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G| {
|
||||
if push_generator {
|
||||
let (x, y) = <C::EmbeddedCurve as Ciphersuite>::G::to_xy(generator).unwrap();
|
||||
generator_tables.push(GeneratorTable::new(&curve_spec, x, y));
|
||||
}
|
||||
|
||||
let mut divisor = dlog.scalar_mul_divisor(generator).normalize_x_coefficient();
|
||||
|
||||
vector_commitment_tape.push(divisor.zero_coefficient);
|
||||
|
||||
for coefficient in divisor.x_coefficients.iter().skip(1) {
|
||||
vector_commitment_tape.push(*coefficient);
|
||||
}
|
||||
for _ in divisor.x_coefficients.len() ..
|
||||
<C::EmbeddedCurveParameters as DiscreteLogParameters>::XCoefficientsMinusOne::USIZE
|
||||
{
|
||||
vector_commitment_tape.push(<C as Ciphersuite>::F::ZERO);
|
||||
}
|
||||
|
||||
for coefficient in divisor.yx_coefficients.first().unwrap_or(&vec![]) {
|
||||
vector_commitment_tape.push(*coefficient);
|
||||
}
|
||||
for _ in divisor.yx_coefficients.first().unwrap_or(&vec![]).len() ..
|
||||
<C::EmbeddedCurveParameters as DiscreteLogParameters>::YxCoefficients::USIZE
|
||||
{
|
||||
vector_commitment_tape.push(<C as Ciphersuite>::F::ZERO);
|
||||
}
|
||||
|
||||
vector_commitment_tape
|
||||
.push(divisor.y_coefficients.first().copied().unwrap_or(<C as Ciphersuite>::F::ZERO));
|
||||
|
||||
divisor.zeroize();
|
||||
drop(divisor);
|
||||
|
||||
let (x, y) = <C::EmbeddedCurve as Ciphersuite>::G::to_xy(dh).unwrap();
|
||||
vector_commitment_tape.push(x);
|
||||
vector_commitment_tape.push(y);
|
||||
|
||||
(x, y)
|
||||
};
|
||||
|
||||
// Start with the coefficients
|
||||
let evrf_public_key;
|
||||
let mut actual_coefficients = Vec::with_capacity(coefficients);
|
||||
{
|
||||
// This is checked at a higher level
|
||||
let dlog =
|
||||
ScalarDecomposition::<<C::EmbeddedCurve as Ciphersuite>::F>::new(**evrf_private_key)
|
||||
.expect("eVRF private key was zero");
|
||||
let points = Self::transcript_to_points(transcript, coefficients);
|
||||
|
||||
// Start by pushing the discrete logarithm onto the tape
|
||||
for coefficient in dlog.decomposition() {
|
||||
vector_commitment_tape.push(<_>::from(*coefficient));
|
||||
}
|
||||
|
||||
// Push a divisor for proving that we're using the correct scalar
|
||||
evrf_public_key = divisor(
|
||||
&mut vector_commitment_tape,
|
||||
&dlog,
|
||||
true,
|
||||
<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::generator(),
|
||||
<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::generator() * evrf_private_key.deref(),
|
||||
);
|
||||
|
||||
// Push a divisor for each point we use in the eVRF
|
||||
for pair in points.chunks(2) {
|
||||
let mut res = Zeroizing::new(C::F::ZERO);
|
||||
for point in pair {
|
||||
let (dh_x, _) = divisor(
|
||||
&mut vector_commitment_tape,
|
||||
&dlog,
|
||||
true,
|
||||
*point,
|
||||
*point * evrf_private_key.deref(),
|
||||
);
|
||||
*res += dh_x;
|
||||
}
|
||||
actual_coefficients.push(res);
|
||||
}
|
||||
debug_assert_eq!(actual_coefficients.len(), coefficients);
|
||||
}
|
||||
|
||||
// Now do the ECDHs for the encryption
|
||||
let mut encryption_masks = Vec::with_capacity(ecdh_public_keys.len());
|
||||
let mut ecdh_commitments = Vec::with_capacity(2 * ecdh_public_keys.len());
|
||||
let mut ecdh_commitments_xy = Vec::with_capacity(ecdh_public_keys.len());
|
||||
for ecdh_public_key in ecdh_public_keys {
|
||||
ecdh_commitments_xy.push([(C::F::ZERO, C::F::ZERO); 2]);
|
||||
|
||||
let mut res = Zeroizing::new(C::F::ZERO);
|
||||
for j in 0 .. 2 {
|
||||
let mut ecdh_private_key;
|
||||
loop {
|
||||
ecdh_private_key = <C::EmbeddedCurve as Ciphersuite>::F::random(&mut *rng);
|
||||
// Generate a non-0 ECDH private key, as necessary to not produce an identity output
|
||||
// Identity isn't representable with the divisors, hence the explicit effort
|
||||
if bool::from(!ecdh_private_key.is_zero()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let dlog =
|
||||
ScalarDecomposition::<<C::EmbeddedCurve as Ciphersuite>::F>::new(ecdh_private_key)
|
||||
.expect("ECDH private key was zero");
|
||||
let ecdh_commitment = <C::EmbeddedCurve as Ciphersuite>::generator() * ecdh_private_key;
|
||||
ecdh_commitments.push(ecdh_commitment);
|
||||
ecdh_commitments_xy.last_mut().unwrap()[j] =
|
||||
<<C::EmbeddedCurve as Ciphersuite>::G as DivisorCurve>::to_xy(ecdh_commitment).unwrap();
|
||||
|
||||
// Start by pushing the discrete logarithm onto the tape
|
||||
for coefficient in dlog.decomposition() {
|
||||
vector_commitment_tape.push(<_>::from(*coefficient));
|
||||
}
|
||||
|
||||
// Push a divisor for proving that we're using the correct scalar for the commitment
|
||||
divisor(
|
||||
&mut vector_commitment_tape,
|
||||
&dlog,
|
||||
false,
|
||||
<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::generator(),
|
||||
<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::generator() * ecdh_private_key,
|
||||
);
|
||||
// Push a divisor for the key we're performing the ECDH with
|
||||
let (dh_x, _) = divisor(
|
||||
&mut vector_commitment_tape,
|
||||
&dlog,
|
||||
j == 0,
|
||||
*ecdh_public_key,
|
||||
*ecdh_public_key * ecdh_private_key,
|
||||
);
|
||||
*res += dh_x;
|
||||
|
||||
ecdh_private_key.zeroize();
|
||||
}
|
||||
encryption_masks.push(res);
|
||||
}
|
||||
debug_assert_eq!(encryption_masks.len(), ecdh_public_keys.len());
|
||||
|
||||
// Now that we have the vector commitment tape, chunk it
|
||||
let (_, generators_to_use) =
|
||||
Self::muls_and_generators_to_use(coefficients, ecdh_public_keys.len());
|
||||
|
||||
let mut vector_commitments =
|
||||
Vec::with_capacity(vector_commitment_tape.len().div_ceil(generators_to_use));
|
||||
for chunk in vector_commitment_tape.chunks(generators_to_use) {
|
||||
let g_values = chunk[.. generators_to_use.min(chunk.len())].to_vec().into();
|
||||
vector_commitments.push(PedersenVectorCommitment { g_values, mask: C::F::random(&mut *rng) });
|
||||
}
|
||||
|
||||
vector_commitment_tape.zeroize();
|
||||
drop(vector_commitment_tape);
|
||||
|
||||
let mut commitments = Vec::with_capacity(coefficients + ecdh_public_keys.len());
|
||||
for coefficient in &actual_coefficients {
|
||||
commitments.push(PedersenCommitment { value: **coefficient, mask: C::F::random(&mut *rng) });
|
||||
}
|
||||
for enc_mask in &encryption_masks {
|
||||
commitments.push(PedersenCommitment { value: **enc_mask, mask: C::F::random(&mut *rng) });
|
||||
}
|
||||
|
||||
let mut transcript = ProverTranscript::new(transcript);
|
||||
let commited_commitments = transcript.write_commitments(
|
||||
vector_commitments
|
||||
.iter()
|
||||
.map(|commitment| {
|
||||
commitment
|
||||
.commit(generators.g_bold_slice(), generators.h())
|
||||
.ok_or(AcError::NotEnoughGenerators)
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
commitments
|
||||
.iter()
|
||||
.map(|commitment| commitment.commit(generators.g(), generators.h()))
|
||||
.collect(),
|
||||
);
|
||||
for ecdh_commitment in ecdh_commitments {
|
||||
transcript.push_point(ecdh_commitment);
|
||||
}
|
||||
|
||||
let mut circuit = Circuit::prove(vector_commitments, commitments.clone());
|
||||
Self::circuit(
|
||||
&curve_spec,
|
||||
evrf_public_key,
|
||||
coefficients,
|
||||
&ecdh_commitments_xy,
|
||||
&generator_tables.iter().collect::<Vec<_>>(),
|
||||
&mut circuit,
|
||||
&mut transcript,
|
||||
);
|
||||
|
||||
let (statement, Some(witness)) = circuit
|
||||
.statement(
|
||||
generators.reduce(generators_to_use).ok_or(AcError::NotEnoughGenerators)?,
|
||||
commited_commitments,
|
||||
)
|
||||
.unwrap()
|
||||
else {
|
||||
panic!("proving yet wasn't yielded the witness");
|
||||
};
|
||||
statement.prove(&mut *rng, &mut transcript, witness).unwrap();
|
||||
|
||||
// Push the reveal onto the transcript
|
||||
for commitment in &commitments {
|
||||
transcript.push_point(generators.g() * commitment.value);
|
||||
}
|
||||
|
||||
// Define a weight to aggregate the commitments with
|
||||
let mut agg_weights = Vec::with_capacity(commitments.len());
|
||||
agg_weights.push(C::F::ONE);
|
||||
while agg_weights.len() < commitments.len() {
|
||||
agg_weights.push(transcript.challenge::<C>());
|
||||
}
|
||||
let mut x = commitments
|
||||
.iter()
|
||||
.zip(&agg_weights)
|
||||
.map(|(commitment, weight)| commitment.mask * *weight)
|
||||
.sum::<C::F>();
|
||||
|
||||
// Do a Schnorr PoK for the randomness of the aggregated Pedersen commitment
|
||||
let mut r = C::F::random(&mut *rng);
|
||||
transcript.push_point(generators.h() * r);
|
||||
let c = transcript.challenge::<C>();
|
||||
transcript.push_scalar(r + (c * x));
|
||||
r.zeroize();
|
||||
x.zeroize();
|
||||
|
||||
Ok(EvrfProveResult {
|
||||
coefficients: actual_coefficients,
|
||||
encryption_masks,
|
||||
proof: transcript.complete(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Verify an eVRF proof, returning the commitments output.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn verify(
|
||||
rng: &mut (impl RngCore + CryptoRng),
|
||||
generators: &Generators<C>,
|
||||
verifier: &mut BatchVerifier<C>,
|
||||
transcript: [u8; 32],
|
||||
coefficients: usize,
|
||||
ecdh_public_keys: &[<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G],
|
||||
evrf_public_key: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G,
|
||||
proof: &[u8],
|
||||
) -> Result<EvrfVerifyResult<C>, ()> {
|
||||
let curve_spec = CurveSpec {
|
||||
a: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::a(),
|
||||
b: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::b(),
|
||||
};
|
||||
|
||||
let mut generator_tables = Vec::with_capacity(1 + (2 * coefficients) + ecdh_public_keys.len());
|
||||
{
|
||||
let (x, y) =
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::to_xy(<C::EmbeddedCurve as Ciphersuite>::generator())
|
||||
.unwrap();
|
||||
generator_tables.push(GeneratorTable::new(&curve_spec, x, y));
|
||||
}
|
||||
let points = Self::transcript_to_points(transcript, coefficients);
|
||||
for generator in points {
|
||||
let (x, y) = <C::EmbeddedCurve as Ciphersuite>::G::to_xy(generator).unwrap();
|
||||
generator_tables.push(GeneratorTable::new(&curve_spec, x, y));
|
||||
}
|
||||
for generator in ecdh_public_keys {
|
||||
let (x, y) = <C::EmbeddedCurve as Ciphersuite>::G::to_xy(*generator).unwrap();
|
||||
generator_tables.push(GeneratorTable::new(&curve_spec, x, y));
|
||||
}
|
||||
|
||||
let (_, generators_to_use) =
|
||||
Self::muls_and_generators_to_use(coefficients, ecdh_public_keys.len());
|
||||
|
||||
let mut transcript = VerifierTranscript::new(transcript, proof);
|
||||
|
||||
let dlog_len = <C::EmbeddedCurveParameters as DiscreteLogParameters>::ScalarBits::USIZE;
|
||||
let divisor_len = 1 +
|
||||
<C::EmbeddedCurveParameters as DiscreteLogParameters>::XCoefficientsMinusOne::USIZE +
|
||||
<C::EmbeddedCurveParameters as DiscreteLogParameters>::YxCoefficients::USIZE +
|
||||
1;
|
||||
let dlog_proof_len = divisor_len + 2;
|
||||
|
||||
let coeffs_vc_variables = dlog_len + ((1 + (2 * coefficients)) * dlog_proof_len);
|
||||
let ecdhs_vc_variables = ((2 * ecdh_public_keys.len()) * dlog_len) +
|
||||
((2 * 2 * ecdh_public_keys.len()) * dlog_proof_len);
|
||||
let vcs = (coeffs_vc_variables + ecdhs_vc_variables).div_ceil(generators_to_use);
|
||||
|
||||
let all_commitments =
|
||||
transcript.read_commitments(vcs, coefficients + ecdh_public_keys.len()).map_err(|_| ())?;
|
||||
let commitments = all_commitments.V().to_vec();
|
||||
|
||||
let mut ecdh_keys = Vec::with_capacity(ecdh_public_keys.len());
|
||||
let mut ecdh_keys_xy = Vec::with_capacity(ecdh_public_keys.len());
|
||||
for _ in 0 .. ecdh_public_keys.len() {
|
||||
let ecdh_keys_i = [
|
||||
transcript.read_point::<C::EmbeddedCurve>().map_err(|_| ())?,
|
||||
transcript.read_point::<C::EmbeddedCurve>().map_err(|_| ())?,
|
||||
];
|
||||
ecdh_keys.push(ecdh_keys_i);
|
||||
// This bans zero ECDH keys
|
||||
ecdh_keys_xy.push([
|
||||
<<C::EmbeddedCurve as Ciphersuite>::G as DivisorCurve>::to_xy(ecdh_keys_i[0]).ok_or(())?,
|
||||
<<C::EmbeddedCurve as Ciphersuite>::G as DivisorCurve>::to_xy(ecdh_keys_i[1]).ok_or(())?,
|
||||
]);
|
||||
}
|
||||
|
||||
let mut circuit = Circuit::verify();
|
||||
Self::circuit(
|
||||
&curve_spec,
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::to_xy(evrf_public_key).ok_or(())?,
|
||||
coefficients,
|
||||
&ecdh_keys_xy,
|
||||
&generator_tables.iter().collect::<Vec<_>>(),
|
||||
&mut circuit,
|
||||
&mut transcript,
|
||||
);
|
||||
|
||||
let (statement, None) =
|
||||
circuit.statement(generators.reduce(generators_to_use).ok_or(())?, all_commitments).unwrap()
|
||||
else {
|
||||
panic!("verifying yet was yielded a witness");
|
||||
};
|
||||
|
||||
statement.verify(rng, verifier, &mut transcript).map_err(|_| ())?;
|
||||
|
||||
// Read the openings for the commitments
|
||||
let mut openings = Vec::with_capacity(commitments.len());
|
||||
for _ in 0 .. commitments.len() {
|
||||
openings.push(transcript.read_point::<C>().map_err(|_| ())?);
|
||||
}
|
||||
|
||||
// Verify the openings of the commitments
|
||||
let mut agg_weights = Vec::with_capacity(commitments.len());
|
||||
agg_weights.push(C::F::ONE);
|
||||
while agg_weights.len() < commitments.len() {
|
||||
agg_weights.push(transcript.challenge::<C>());
|
||||
}
|
||||
|
||||
let sum_points =
|
||||
openings.iter().zip(&agg_weights).map(|(point, weight)| *point * *weight).sum::<C::G>();
|
||||
let sum_commitments =
|
||||
commitments.into_iter().zip(agg_weights).map(|(point, weight)| point * weight).sum::<C::G>();
|
||||
#[allow(non_snake_case)]
|
||||
let A = sum_commitments - sum_points;
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = transcript.read_point::<C>().map_err(|_| ())?;
|
||||
let c = transcript.challenge::<C>();
|
||||
let s = transcript.read_scalar::<C>().map_err(|_| ())?;
|
||||
|
||||
// Doesn't batch verify this as we can't access the internals of the GBP batch verifier
|
||||
if (R + (A * c)) != (generators.h() * s) {
|
||||
Err(())?;
|
||||
}
|
||||
|
||||
if !transcript.complete().is_empty() {
|
||||
Err(())?
|
||||
};
|
||||
|
||||
let encryption_commitments = openings[coefficients ..].to_vec();
|
||||
let coefficients = openings[.. coefficients].to_vec();
|
||||
Ok(EvrfVerifyResult { coefficients, ecdh_keys, encryption_commitments })
|
||||
}
|
||||
}
|
||||
79
crypto/dkg/evrf/src/tests/mod.rs
Normal file
79
crypto/dkg/evrf/src/tests/mod.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
use rand::seq::SliceRandom;
|
||||
|
||||
use ciphersuite::{group::ff::Field, Ciphersuite};
|
||||
|
||||
use crate::{
|
||||
Participant,
|
||||
evrf::*,
|
||||
tests::{THRESHOLD, PARTICIPANTS, recover_key},
|
||||
};
|
||||
|
||||
mod proof;
|
||||
use proof::{Pallas, Vesta};
|
||||
|
||||
#[test]
|
||||
fn evrf_dkg() {
|
||||
let generators = EvrfGenerators::<Pallas>::new(THRESHOLD, PARTICIPANTS);
|
||||
let context = [0; 32];
|
||||
|
||||
let mut priv_keys = vec![];
|
||||
let mut pub_keys = vec![];
|
||||
for i in 0 .. PARTICIPANTS {
|
||||
let priv_key = <Vesta as Ciphersuite>::F::random(&mut OsRng);
|
||||
pub_keys.push(<Vesta as Ciphersuite>::generator() * priv_key);
|
||||
priv_keys.push((Participant::new(1 + i).unwrap(), Zeroizing::new(priv_key)));
|
||||
}
|
||||
|
||||
let mut participations = HashMap::new();
|
||||
// Shuffle the private keys so we iterate over a random subset of them
|
||||
priv_keys.shuffle(&mut OsRng);
|
||||
for (i, priv_key) in priv_keys.iter().take(usize::from(THRESHOLD)) {
|
||||
participations.insert(
|
||||
*i,
|
||||
EvrfDkg::<Pallas>::participate(
|
||||
&mut OsRng,
|
||||
&generators,
|
||||
context,
|
||||
THRESHOLD,
|
||||
&pub_keys,
|
||||
priv_key,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
let VerifyResult::Valid(dkg) = EvrfDkg::<Pallas>::verify(
|
||||
&mut OsRng,
|
||||
&generators,
|
||||
context,
|
||||
THRESHOLD,
|
||||
&pub_keys,
|
||||
&participations,
|
||||
)
|
||||
.unwrap() else {
|
||||
panic!("verify didn't return VerifyResult::Valid")
|
||||
};
|
||||
|
||||
let mut group_key = None;
|
||||
let mut verification_shares = None;
|
||||
let mut all_keys = HashMap::new();
|
||||
for (i, priv_key) in priv_keys {
|
||||
let keys = dkg.keys(&priv_key).into_iter().next().unwrap();
|
||||
assert_eq!(keys.params().i(), i);
|
||||
assert_eq!(keys.params().t(), THRESHOLD);
|
||||
assert_eq!(keys.params().n(), PARTICIPANTS);
|
||||
group_key = group_key.or(Some(keys.group_key()));
|
||||
verification_shares = verification_shares.or(Some(keys.verification_shares()));
|
||||
assert_eq!(Some(keys.group_key()), group_key);
|
||||
assert_eq!(Some(keys.verification_shares()), verification_shares);
|
||||
|
||||
all_keys.insert(i, keys);
|
||||
}
|
||||
|
||||
// TODO: Test for all possible combinations of keys
|
||||
assert_eq!(Pallas::generator() * recover_key(&all_keys), group_key.unwrap());
|
||||
}
|
||||
118
crypto/dkg/evrf/src/tests/proof.rs
Normal file
118
crypto/dkg/evrf/src/tests/proof.rs
Normal file
@@ -0,0 +1,118 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use rand_core::OsRng;
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
use generic_array::typenum::{Sum, Diff, Quot, U, U1, U2};
|
||||
use blake2::{Digest, Blake2b512};
|
||||
|
||||
use ciphersuite::{
|
||||
group::{
|
||||
ff::{FromUniformBytes, Field, PrimeField},
|
||||
Group,
|
||||
},
|
||||
Ciphersuite, Secp256k1, Ed25519, Ristretto,
|
||||
};
|
||||
use pasta_curves::{Ep, Eq, Fp, Fq};
|
||||
|
||||
use generalized_bulletproofs::{Generators, tests::generators};
|
||||
use generalized_bulletproofs_ec_gadgets::DiscreteLogParameters;
|
||||
|
||||
use crate::evrf::proof::*;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct Pallas;
|
||||
impl Ciphersuite for Pallas {
|
||||
type F = Fq;
|
||||
type G = Ep;
|
||||
type H = Blake2b512;
|
||||
const ID: &'static [u8] = b"Pallas";
|
||||
fn generator() -> Ep {
|
||||
Ep::generator()
|
||||
}
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
// This naive concat may be insecure in a real world deployment
|
||||
// This is solely test code so it's fine
|
||||
Self::F::from_uniform_bytes(&Self::H::digest([dst, msg].concat()).into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct Vesta;
|
||||
impl Ciphersuite for Vesta {
|
||||
type F = Fp;
|
||||
type G = Eq;
|
||||
type H = Blake2b512;
|
||||
const ID: &'static [u8] = b"Vesta";
|
||||
fn generator() -> Eq {
|
||||
Eq::generator()
|
||||
}
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
// This naive concat may be insecure in a real world deployment
|
||||
// This is solely test code so it's fine
|
||||
Self::F::from_uniform_bytes(&Self::H::digest([dst, msg].concat()).into())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VestaParams;
|
||||
impl DiscreteLogParameters for VestaParams {
|
||||
type ScalarBits = U<{ <<Vesta as Ciphersuite>::F as PrimeField>::NUM_BITS as usize }>;
|
||||
type XCoefficients = Quot<Sum<Self::ScalarBits, U1>, U2>;
|
||||
type XCoefficientsMinusOne = Diff<Self::XCoefficients, U1>;
|
||||
type YxCoefficients = Diff<Quot<Sum<Sum<Self::ScalarBits, U1>, U1>, U2>, U2>;
|
||||
}
|
||||
|
||||
impl EvrfCurve for Pallas {
|
||||
type EmbeddedCurve = Vesta;
|
||||
type EmbeddedCurveParameters = VestaParams;
|
||||
}
|
||||
|
||||
fn evrf_proof_test<C: EvrfCurve>() {
|
||||
let generators = generators(2048);
|
||||
let vesta_private_key = Zeroizing::new(<C::EmbeddedCurve as Ciphersuite>::F::random(&mut OsRng));
|
||||
let ecdh_public_keys = [
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::random(&mut OsRng),
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::random(&mut OsRng),
|
||||
];
|
||||
let time = Instant::now();
|
||||
let res =
|
||||
Evrf::<C>::prove(&mut OsRng, &generators, [0; 32], 1, &ecdh_public_keys, &vesta_private_key)
|
||||
.unwrap();
|
||||
println!("Proving time: {:?}", time.elapsed());
|
||||
|
||||
let time = Instant::now();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
Evrf::<C>::verify(
|
||||
&mut OsRng,
|
||||
&generators,
|
||||
&mut verifier,
|
||||
[0; 32],
|
||||
1,
|
||||
&ecdh_public_keys,
|
||||
C::EmbeddedCurve::generator() * *vesta_private_key,
|
||||
&res.proof,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(generators.verify(verifier));
|
||||
println!("Verifying time: {:?}", time.elapsed());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pallas_evrf_proof_test() {
|
||||
evrf_proof_test::<Pallas>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn secp256k1_evrf_proof_test() {
|
||||
evrf_proof_test::<Secp256k1>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ed25519_evrf_proof_test() {
|
||||
evrf_proof_test::<Ed25519>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ristretto_evrf_proof_test() {
|
||||
evrf_proof_test::<Ristretto>();
|
||||
}
|
||||
Reference in New Issue
Block a user