mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-14 06:59:24 +00:00
Add the openings of the PCs to the eVRF as necessary
This commit is contained in:
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -2437,7 +2437,7 @@ dependencies = [
|
|||||||
"generalized-bulletproofs-circuit-abstraction",
|
"generalized-bulletproofs-circuit-abstraction",
|
||||||
"generalized-bulletproofs-ec-gadgets",
|
"generalized-bulletproofs-ec-gadgets",
|
||||||
"generic-array 1.1.0",
|
"generic-array 1.1.0",
|
||||||
"multiexp",
|
"pasta_curves",
|
||||||
"rand_chacha",
|
"rand_chacha",
|
||||||
"rand_core",
|
"rand_core",
|
||||||
"subtle",
|
"subtle",
|
||||||
@@ -5762,8 +5762,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "pasta_curves"
|
name = "pasta_curves"
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/kayabaNerve/pasta_curves?rev=a46b5be95cacbff54d06aad8d3bbcba42e05d616#a46b5be95cacbff54d06aad8d3bbcba42e05d616"
|
||||||
checksum = "d3e57598f73cc7e1b2ac63c79c517b31a0877cd7c402cdcaa311b5208de7a095"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"blake2b_simd",
|
"blake2b_simd",
|
||||||
"ff",
|
"ff",
|
||||||
@@ -5772,6 +5771,7 @@ dependencies = [
|
|||||||
"rand",
|
"rand",
|
||||||
"static_assertions",
|
"static_assertions",
|
||||||
"subtle",
|
"subtle",
|
||||||
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|||||||
@@ -161,6 +161,9 @@ matches = { path = "patches/matches" }
|
|||||||
option-ext = { path = "patches/option-ext" }
|
option-ext = { path = "patches/option-ext" }
|
||||||
directories-next = { path = "patches/directories-next" }
|
directories-next = { path = "patches/directories-next" }
|
||||||
|
|
||||||
|
# The official pasta_curves repo doesn't support Zeroize
|
||||||
|
pasta_curves = { git = "https://github.com/kayabaNerve/pasta_curves", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616" }
|
||||||
|
|
||||||
[workspace.lints.clippy]
|
[workspace.lints.clippy]
|
||||||
unwrap_or_default = "allow"
|
unwrap_or_default = "allow"
|
||||||
borrow_as_ptr = "deny"
|
borrow_as_ptr = "deny"
|
||||||
|
|||||||
@@ -22,11 +22,14 @@ rand_chacha = { version = "0.3", default-features = false, features = ["std"] }
|
|||||||
generic-array = { version = "1", default-features = false, features = ["alloc"] }
|
generic-array = { version = "1", default-features = false, features = ["alloc"] }
|
||||||
|
|
||||||
blake2 = { version = "0.10", default-features = false, features = ["std"] }
|
blake2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
multiexp = { path = "../multiexp", version = "0.4", default-features = false, features = ["std", "batch"] }
|
|
||||||
ciphersuite = { path = "../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
ciphersuite = { path = "../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
ec-divisors = { path = "./divisors" }
|
ec-divisors = { path = "./divisors" }
|
||||||
generalized-bulletproofs = { path = "./generalized-bulletproofs" }
|
generalized-bulletproofs = { path = "./generalized-bulletproofs" }
|
||||||
generalized-bulletproofs-circuit-abstraction = { path = "./circuit-abstraction" }
|
generalized-bulletproofs-circuit-abstraction = { path = "./circuit-abstraction" }
|
||||||
generalized-bulletproofs-ec-gadgets = { path = "./ec-gadgets" }
|
generalized-bulletproofs-ec-gadgets = { path = "./ec-gadgets" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
generalized-bulletproofs = { path = "./generalized-bulletproofs", features = ["tests"] }
|
||||||
|
ec-divisors = { path = "./divisors", features = ["pasta"] }
|
||||||
|
pasta_curves = "0.5"
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ group = "0.13"
|
|||||||
|
|
||||||
hex = { version = "0.4", optional = true }
|
hex = { version = "0.4", optional = true }
|
||||||
dalek-ff-group = { path = "../../dalek-ff-group", features = ["std"], optional = true }
|
dalek-ff-group = { path = "../../dalek-ff-group", features = ["std"], optional = true }
|
||||||
|
pasta_curves = { version = "0.5", default-features = false, features = ["bits", "alloc"], optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand_core = { version = "0.6", features = ["getrandom"] }
|
rand_core = { version = "0.6", features = ["getrandom"] }
|
||||||
@@ -30,3 +31,4 @@ pasta_curves = { version = "0.5", default-features = false, features = ["bits",
|
|||||||
|
|
||||||
[features]
|
[features]
|
||||||
ed25519 = ["hex", "dalek-ff-group"]
|
ed25519 = ["hex", "dalek-ff-group"]
|
||||||
|
pasta = ["pasta_curves"]
|
||||||
|
|||||||
@@ -180,6 +180,48 @@ pub fn new_divisor<C: DivisorCurve>(points: &[C]) -> Option<Poly<C::FieldElement
|
|||||||
Some(divs.remove(0).1)
|
Some(divs.remove(0).1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "pasta"))]
|
||||||
|
mod pasta {
|
||||||
|
use group::{ff::Field, Curve};
|
||||||
|
use pasta_curves::{
|
||||||
|
arithmetic::{Coordinates, CurveAffine},
|
||||||
|
Ep, Fp, Eq, Fq,
|
||||||
|
};
|
||||||
|
use crate::DivisorCurve;
|
||||||
|
|
||||||
|
impl DivisorCurve for Ep {
|
||||||
|
type FieldElement = Fp;
|
||||||
|
|
||||||
|
fn a() -> Self::FieldElement {
|
||||||
|
Self::FieldElement::ZERO
|
||||||
|
}
|
||||||
|
fn b() -> Self::FieldElement {
|
||||||
|
Self::FieldElement::from(5u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_xy(point: Self) -> Option<(Self::FieldElement, Self::FieldElement)> {
|
||||||
|
Option::<Coordinates<_>>::from(point.to_affine().coordinates())
|
||||||
|
.map(|coords| (*coords.x(), *coords.y()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DivisorCurve for Eq {
|
||||||
|
type FieldElement = Fq;
|
||||||
|
|
||||||
|
fn a() -> Self::FieldElement {
|
||||||
|
Self::FieldElement::ZERO
|
||||||
|
}
|
||||||
|
fn b() -> Self::FieldElement {
|
||||||
|
Self::FieldElement::from(5u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_xy(point: Self) -> Option<(Self::FieldElement, Self::FieldElement)> {
|
||||||
|
Option::<Coordinates<_>>::from(point.to_affine().coordinates())
|
||||||
|
.map(|coords| (*coords.x(), *coords.y()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "ed25519"))]
|
#[cfg(any(test, feature = "ed25519"))]
|
||||||
mod ed25519 {
|
mod ed25519 {
|
||||||
use group::{
|
use group::{
|
||||||
|
|||||||
@@ -1,30 +1,11 @@
|
|||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use group::{ff::Field, Group, Curve};
|
use group::{ff::Field, Group};
|
||||||
use dalek_ff_group::EdwardsPoint;
|
use dalek_ff_group::EdwardsPoint;
|
||||||
use pasta_curves::{
|
use pasta_curves::{Ep, Eq};
|
||||||
arithmetic::{Coordinates, CurveAffine},
|
|
||||||
Ep, Fp,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{DivisorCurve, Poly, new_divisor};
|
use crate::{DivisorCurve, Poly, new_divisor};
|
||||||
|
|
||||||
impl DivisorCurve for Ep {
|
|
||||||
type FieldElement = Fp;
|
|
||||||
|
|
||||||
fn a() -> Self::FieldElement {
|
|
||||||
Self::FieldElement::ZERO
|
|
||||||
}
|
|
||||||
fn b() -> Self::FieldElement {
|
|
||||||
Self::FieldElement::from(5u64)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_xy(point: Self) -> Option<(Self::FieldElement, Self::FieldElement)> {
|
|
||||||
Option::<Coordinates<_>>::from(point.to_affine().coordinates())
|
|
||||||
.map(|coords| (*coords.x(), *coords.y()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Equation 4 in the security proofs
|
// Equation 4 in the security proofs
|
||||||
fn check_divisor<C: DivisorCurve>(points: Vec<C>) {
|
fn check_divisor<C: DivisorCurve>(points: Vec<C>) {
|
||||||
// Create the divisor
|
// Create the divisor
|
||||||
@@ -208,6 +189,13 @@ fn test_divisor_pallas() {
|
|||||||
test_subset_sum_to_infinity::<Ep>();
|
test_subset_sum_to_infinity::<Ep>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_divisor_vesta() {
|
||||||
|
test_divisor::<Eq>();
|
||||||
|
test_same_point::<Eq>();
|
||||||
|
test_subset_sum_to_infinity::<Eq>();
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_divisor_ed25519() {
|
fn test_divisor_ed25519() {
|
||||||
// Since we're implementing Wei25519 ourselves, check the isomorphism works as expected
|
// Since we're implementing Wei25519 ourselves, check the isomorphism works as expected
|
||||||
|
|||||||
@@ -14,6 +14,10 @@ const POINT: u8 = 1;
|
|||||||
const CHALLENGE: u8 = 2;
|
const CHALLENGE: u8 = 2;
|
||||||
|
|
||||||
fn challenge<F: PrimeField>(digest: &mut Blake2b512) -> F {
|
fn challenge<F: PrimeField>(digest: &mut Blake2b512) -> F {
|
||||||
|
// Panic if this is such a wide field, we won't successfully perform a reduction into an unbiased
|
||||||
|
// scalar
|
||||||
|
debug_assert!((F::NUM_BITS + 128) < 512);
|
||||||
|
|
||||||
digest.update([CHALLENGE]);
|
digest.update([CHALLENGE]);
|
||||||
let chl = digest.clone().finalize();
|
let chl = digest.clone().finalize();
|
||||||
|
|
||||||
@@ -78,14 +82,16 @@ impl Transcript {
|
|||||||
Self { digest, transcript: Vec::with_capacity(1024) }
|
Self { digest, transcript: Vec::with_capacity(1024) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn push_scalar(&mut self, scalar: impl PrimeField) {
|
/// Push a scalar onto the transcript.
|
||||||
|
pub fn push_scalar(&mut self, scalar: impl PrimeField) {
|
||||||
self.digest.update([SCALAR]);
|
self.digest.update([SCALAR]);
|
||||||
let bytes = scalar.to_repr();
|
let bytes = scalar.to_repr();
|
||||||
self.digest.update(bytes);
|
self.digest.update(bytes);
|
||||||
self.transcript.extend(bytes.as_ref());
|
self.transcript.extend(bytes.as_ref());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn push_point(&mut self, point: impl GroupEncoding) {
|
/// Push a point onto the transcript.
|
||||||
|
pub fn push_point(&mut self, point: impl GroupEncoding) {
|
||||||
self.digest.update([POINT]);
|
self.digest.update([POINT]);
|
||||||
let bytes = point.to_bytes();
|
let bytes = point.to_bytes();
|
||||||
self.digest.update(bytes);
|
self.digest.update(bytes);
|
||||||
@@ -132,7 +138,8 @@ impl<'a> VerifierTranscript<'a> {
|
|||||||
Self { digest, transcript: proof }
|
Self { digest, transcript: proof }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_scalar<C: Ciphersuite>(&mut self) -> io::Result<C::F> {
|
/// Read a scalar from the transcript.
|
||||||
|
pub fn read_scalar<C: Ciphersuite>(&mut self) -> io::Result<C::F> {
|
||||||
let scalar = C::read_F(&mut self.transcript)?;
|
let scalar = C::read_F(&mut self.transcript)?;
|
||||||
self.digest.update([SCALAR]);
|
self.digest.update([SCALAR]);
|
||||||
let bytes = scalar.to_repr();
|
let bytes = scalar.to_repr();
|
||||||
@@ -140,7 +147,8 @@ impl<'a> VerifierTranscript<'a> {
|
|||||||
Ok(scalar)
|
Ok(scalar)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_point<C: Ciphersuite>(&mut self) -> io::Result<C::G> {
|
/// Read a point from the transcript.
|
||||||
|
pub fn read_point<C: Ciphersuite>(&mut self) -> io::Result<C::G> {
|
||||||
let point = C::read_G(&mut self.transcript)?;
|
let point = C::read_G(&mut self.transcript)?;
|
||||||
self.digest.update([POINT]);
|
self.digest.update([POINT]);
|
||||||
let bytes = point.to_bytes();
|
let bytes = point.to_bytes();
|
||||||
@@ -172,4 +180,9 @@ impl<'a> VerifierTranscript<'a> {
|
|||||||
pub fn challenge<F: PrimeField>(&mut self) -> F {
|
pub fn challenge<F: PrimeField>(&mut self) -> F {
|
||||||
challenge(&mut self.digest)
|
challenge(&mut self.digest)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Complete the transcript, returning the advanced slice.
|
||||||
|
pub fn complete(self) -> &'a [u8] {
|
||||||
|
self.transcript
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ use rand_chacha::ChaCha20Rng;
|
|||||||
|
|
||||||
use generic_array::{typenum::Unsigned, ArrayLength, GenericArray};
|
use generic_array::{typenum::Unsigned, ArrayLength, GenericArray};
|
||||||
|
|
||||||
|
use blake2::{Digest, Blake2s256};
|
||||||
use ciphersuite::{
|
use ciphersuite::{
|
||||||
group::{
|
group::{
|
||||||
ff::{Field, PrimeField, PrimeFieldBits},
|
ff::{Field, PrimeField, PrimeFieldBits},
|
||||||
@@ -24,6 +25,9 @@ use generalized_bulletproofs_circuit_abstraction::*;
|
|||||||
use ec_divisors::{DivisorCurve, new_divisor};
|
use ec_divisors::{DivisorCurve, new_divisor};
|
||||||
use generalized_bulletproofs_ec_gadgets::*;
|
use generalized_bulletproofs_ec_gadgets::*;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
/// A curve to perform the eVRF with.
|
/// A curve to perform the eVRF with.
|
||||||
pub trait EvrfCurve: Ciphersuite {
|
pub trait EvrfCurve: Ciphersuite {
|
||||||
type EmbeddedCurve: Ciphersuite;
|
type EmbeddedCurve: Ciphersuite;
|
||||||
@@ -39,7 +43,7 @@ pub struct EvrfProveResult<C: Ciphersuite> {
|
|||||||
/// A struct to prove/verify eVRFs with.
|
/// A struct to prove/verify eVRFs with.
|
||||||
pub struct Evrf;
|
pub struct Evrf;
|
||||||
impl Evrf {
|
impl Evrf {
|
||||||
fn seed_to_points<C: Ciphersuite>(seed: [u8; 32], quantity: usize) -> Vec<C::G> {
|
fn transcript_to_points<C: Ciphersuite>(seed: [u8; 32], quantity: usize) -> Vec<C::G> {
|
||||||
// We need to do two Diffie-Hellman's per point in order to achieve an unbiased result
|
// We need to do two Diffie-Hellman's per point in order to achieve an unbiased result
|
||||||
let quantity = 2 * quantity;
|
let quantity = 2 * quantity;
|
||||||
|
|
||||||
@@ -91,8 +95,8 @@ impl Evrf {
|
|||||||
|
|
||||||
let dlog = read_from_tape(generators_to_use, &mut start);
|
let dlog = read_from_tape(generators_to_use, &mut start);
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(quantity);
|
let mut res = Vec::with_capacity(quantity + 1);
|
||||||
for _ in 0 .. quantity {
|
let mut read_point_with_dlog = || {
|
||||||
let zero = read_one_from_tape(generators_to_use, &mut start);
|
let zero = read_one_from_tape(generators_to_use, &mut start);
|
||||||
let x_from_power_of_2 = read_from_tape(generators_to_use, &mut start);
|
let x_from_power_of_2 = read_from_tape(generators_to_use, &mut start);
|
||||||
let yx = read_from_tape(generators_to_use, &mut start);
|
let yx = read_from_tape(generators_to_use, &mut start);
|
||||||
@@ -105,7 +109,14 @@ impl Evrf {
|
|||||||
);
|
);
|
||||||
|
|
||||||
res.push(PointWithDlog { dlog: dlog.clone(), divisor, point });
|
res.push(PointWithDlog { dlog: dlog.clone(), divisor, point });
|
||||||
|
};
|
||||||
|
|
||||||
|
for _ in 0 .. quantity {
|
||||||
|
// One for each DH proven
|
||||||
|
read_point_with_dlog();
|
||||||
}
|
}
|
||||||
|
// And one more for the proof this is the discrete log of the public key
|
||||||
|
read_point_with_dlog();
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,7 +186,7 @@ impl Evrf {
|
|||||||
rng: &mut (impl RngCore + CryptoRng),
|
rng: &mut (impl RngCore + CryptoRng),
|
||||||
generators: &Generators<C>,
|
generators: &Generators<C>,
|
||||||
evrf_private_key: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::F,
|
evrf_private_key: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::F,
|
||||||
seed: [u8; 32],
|
invocation: [u8; 32],
|
||||||
quantity: usize,
|
quantity: usize,
|
||||||
) -> Result<EvrfProveResult<C>, AcError>
|
) -> Result<EvrfProveResult<C>, AcError>
|
||||||
where
|
where
|
||||||
@@ -187,7 +198,19 @@ impl Evrf {
|
|||||||
b: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::b(),
|
b: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::b(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let points = Self::seed_to_points::<C::EmbeddedCurve>(seed, quantity);
|
// Combine the invocation and the public key into a transcript
|
||||||
|
let transcript = Blake2s256::digest(
|
||||||
|
[
|
||||||
|
invocation.as_slice(),
|
||||||
|
(<<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::generator() * evrf_private_key)
|
||||||
|
.to_bytes()
|
||||||
|
.as_ref(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
|
)
|
||||||
|
.into();
|
||||||
|
|
||||||
|
let points = Self::transcript_to_points::<C::EmbeddedCurve>(transcript, quantity);
|
||||||
|
|
||||||
let num_bits: u32 = <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::F::NUM_BITS;
|
let num_bits: u32 = <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::F::NUM_BITS;
|
||||||
|
|
||||||
@@ -218,6 +241,8 @@ impl Evrf {
|
|||||||
let mut h_value = dlog[h as usize];
|
let mut h_value = dlog[h as usize];
|
||||||
let mut h_prior_value = dlog[(h as usize) - 1];
|
let mut h_prior_value = dlog[(h as usize) - 1];
|
||||||
|
|
||||||
|
// TODO: Squash the following two loops by iterating from the top bit to the bottom bit
|
||||||
|
|
||||||
let mut prior_scalar = dlog[(h as usize) - 1];
|
let mut prior_scalar = dlog[(h as usize) - 1];
|
||||||
for (i, scalar) in dlog.iter().enumerate().skip(h as usize) {
|
for (i, scalar) in dlog.iter().enumerate().skip(h as usize) {
|
||||||
let is_zero = <C as Ciphersuite>::F::ZERO.ct_eq(scalar);
|
let is_zero = <C as Ciphersuite>::F::ZERO.ct_eq(scalar);
|
||||||
@@ -367,7 +392,7 @@ impl Evrf {
|
|||||||
commitments.push(PedersenCommitment { value: **scalar, mask: C::F::random(&mut *rng) });
|
commitments.push(PedersenCommitment { value: **scalar, mask: C::F::random(&mut *rng) });
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut transcript = ProverTranscript::new(seed);
|
let mut transcript = ProverTranscript::new(transcript);
|
||||||
let commited_commitments = transcript.write_commitments(
|
let commited_commitments = transcript.write_commitments(
|
||||||
vector_commitments
|
vector_commitments
|
||||||
.iter()
|
.iter()
|
||||||
@@ -383,7 +408,7 @@ impl Evrf {
|
|||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut circuit = Circuit::prove(vector_commitments, commitments);
|
let mut circuit = Circuit::prove(vector_commitments, commitments.clone());
|
||||||
Self::circuit::<C>(
|
Self::circuit::<C>(
|
||||||
&curve_spec,
|
&curve_spec,
|
||||||
evrf_public_key,
|
evrf_public_key,
|
||||||
@@ -402,7 +427,32 @@ impl Evrf {
|
|||||||
else {
|
else {
|
||||||
panic!("proving yet wasn't yielded the witness");
|
panic!("proving yet wasn't yielded the witness");
|
||||||
};
|
};
|
||||||
statement.prove(rng, &mut transcript, witness).unwrap();
|
statement.prove(&mut *rng, &mut transcript, witness).unwrap();
|
||||||
|
|
||||||
|
// Push the reveal onto the transcript
|
||||||
|
for scalar in &scalars {
|
||||||
|
transcript.push_point(generators.g() * **scalar);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define a weight to aggregate the commitments with
|
||||||
|
let mut agg_weights = Vec::with_capacity(quantity);
|
||||||
|
agg_weights.push(C::F::ONE);
|
||||||
|
while agg_weights.len() < quantity {
|
||||||
|
agg_weights.push(transcript.challenge::<C::F>());
|
||||||
|
}
|
||||||
|
let mut x = commitments
|
||||||
|
.iter()
|
||||||
|
.zip(&agg_weights)
|
||||||
|
.map(|(commitment, weight)| commitment.mask * *weight)
|
||||||
|
.sum::<C::F>();
|
||||||
|
|
||||||
|
// Do a Schnorr PoK for the randomness of the aggregated Pedersen commitment
|
||||||
|
let mut r = C::F::random(&mut *rng);
|
||||||
|
transcript.push_point(generators.h() * r);
|
||||||
|
let c = transcript.challenge::<C::F>();
|
||||||
|
transcript.push_scalar(r + (c * x));
|
||||||
|
r.zeroize();
|
||||||
|
x.zeroize();
|
||||||
|
|
||||||
Ok(EvrfProveResult { scalars, proof: transcript.complete() })
|
Ok(EvrfProveResult { scalars, proof: transcript.complete() })
|
||||||
}
|
}
|
||||||
@@ -414,7 +464,7 @@ impl Evrf {
|
|||||||
generators: &Generators<C>,
|
generators: &Generators<C>,
|
||||||
verifier: &mut BatchVerifier<C>,
|
verifier: &mut BatchVerifier<C>,
|
||||||
evrf_public_key: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G,
|
evrf_public_key: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G,
|
||||||
seed: [u8; 32],
|
invocation: [u8; 32],
|
||||||
quantity: usize,
|
quantity: usize,
|
||||||
proof: &[u8],
|
proof: &[u8],
|
||||||
) -> Result<Vec<C::G>, ()>
|
) -> Result<Vec<C::G>, ()>
|
||||||
@@ -427,7 +477,11 @@ impl Evrf {
|
|||||||
b: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::b(),
|
b: <<C as EvrfCurve>::EmbeddedCurve as Ciphersuite>::G::b(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let points = Self::seed_to_points::<C::EmbeddedCurve>(seed, quantity);
|
let transcript =
|
||||||
|
Blake2s256::digest([invocation.as_slice(), evrf_public_key.to_bytes().as_ref()].concat())
|
||||||
|
.into();
|
||||||
|
|
||||||
|
let points = Self::transcript_to_points::<C::EmbeddedCurve>(transcript, quantity);
|
||||||
let mut generator_tables = Vec::with_capacity(1 + (2 * quantity));
|
let mut generator_tables = Vec::with_capacity(1 + (2 * quantity));
|
||||||
|
|
||||||
for generator in points {
|
for generator in points {
|
||||||
@@ -443,14 +497,19 @@ impl Evrf {
|
|||||||
|
|
||||||
let (_, generators_to_use) = Self::muls_and_generators_to_use(quantity);
|
let (_, generators_to_use) = Self::muls_and_generators_to_use(quantity);
|
||||||
|
|
||||||
let mut transcript = VerifierTranscript::new(seed, proof);
|
let mut transcript = VerifierTranscript::new(transcript, proof);
|
||||||
|
|
||||||
let divisor_len = 1 + <C::EmbeddedCurveParameters as DiscreteLogParameters>::XCoefficientsMinusOne::USIZE + <C::EmbeddedCurveParameters as DiscreteLogParameters>::YxCoefficients::USIZE + 1;
|
let divisor_len = 1 +
|
||||||
let dlog_len = divisor_len + 2;
|
<C::EmbeddedCurveParameters as DiscreteLogParameters>::XCoefficientsMinusOne::USIZE +
|
||||||
let vcs =
|
<C::EmbeddedCurveParameters as DiscreteLogParameters>::YxCoefficients::USIZE +
|
||||||
(<C::EmbeddedCurveParameters as DiscreteLogParameters>::ScalarBits::USIZE + ((1 + (2 * quantity)) * dlog_len)) / (2 * generators_to_use);
|
1;
|
||||||
|
let dlog_proof_len = divisor_len + 2;
|
||||||
|
let vcs = (<C::EmbeddedCurveParameters as DiscreteLogParameters>::ScalarBits::USIZE +
|
||||||
|
((1 + (2 * quantity)) * dlog_proof_len))
|
||||||
|
.div_ceil(2 * generators_to_use);
|
||||||
|
|
||||||
let commitments = transcript.read_commitments(vcs, quantity).map_err(|_| ())?;
|
let all_commitments = transcript.read_commitments(vcs, quantity).map_err(|_| ())?;
|
||||||
|
let commitments = all_commitments.V().to_vec();
|
||||||
|
|
||||||
let mut circuit = Circuit::verify();
|
let mut circuit = Circuit::verify();
|
||||||
Self::circuit::<C>(
|
Self::circuit::<C>(
|
||||||
@@ -464,14 +523,46 @@ impl Evrf {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let (statement, None) =
|
let (statement, None) =
|
||||||
circuit.statement(generators.reduce(generators_to_use).ok_or(())?, commitments).unwrap()
|
circuit.statement(generators.reduce(generators_to_use).ok_or(())?, all_commitments).unwrap()
|
||||||
else {
|
else {
|
||||||
panic!("verifying yet was yielded a witness");
|
panic!("verifying yet was yielded a witness");
|
||||||
};
|
};
|
||||||
|
|
||||||
statement.verify(rng, verifier, &mut transcript).map_err(|_| ())?;
|
statement.verify(rng, verifier, &mut transcript).map_err(|_| ())?;
|
||||||
|
|
||||||
// TODO: Unblinded PCs
|
// Read the unblinded public keys
|
||||||
Ok(vec![])
|
let mut res = Vec::with_capacity(quantity);
|
||||||
|
for _ in 0 .. quantity {
|
||||||
|
res.push(transcript.read_point::<C>().map_err(|_| ())?);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut agg_weights = Vec::with_capacity(quantity);
|
||||||
|
agg_weights.push(C::F::ONE);
|
||||||
|
while agg_weights.len() < quantity {
|
||||||
|
agg_weights.push(transcript.challenge::<C::F>());
|
||||||
|
}
|
||||||
|
|
||||||
|
let sum_points =
|
||||||
|
res.iter().zip(&agg_weights).map(|(point, weight)| *point * *weight).sum::<C::G>();
|
||||||
|
let sum_commitments =
|
||||||
|
commitments.into_iter().zip(agg_weights).map(|(point, weight)| point * weight).sum::<C::G>();
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let A = sum_commitments - sum_points;
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let R = transcript.read_point::<C>().map_err(|_| ())?;
|
||||||
|
let c = transcript.challenge::<C::F>();
|
||||||
|
let s = transcript.read_scalar::<C>().map_err(|_| ())?;
|
||||||
|
|
||||||
|
// Doesn't batch verify this as we can't access the internals of the GBP batch verifier
|
||||||
|
if (R + (A * c)) != (generators.h() * s) {
|
||||||
|
Err(())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !transcript.complete().is_empty() {
|
||||||
|
Err(())?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
88
crypto/evrf/src/tests.rs
Normal file
88
crypto/evrf/src/tests.rs
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
use generic_array::typenum::{Sum, Diff, Quot, U, U1, U2};
|
||||||
|
use blake2::{Digest, Blake2b512};
|
||||||
|
|
||||||
|
use ciphersuite::{
|
||||||
|
group::ff::{FromUniformBytes, PrimeField},
|
||||||
|
Ciphersuite,
|
||||||
|
};
|
||||||
|
use pasta_curves::{Ep, Eq, Fp, Fq};
|
||||||
|
|
||||||
|
use generalized_bulletproofs::tests::generators;
|
||||||
|
|
||||||
|
use crate::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
struct Pallas;
|
||||||
|
impl Ciphersuite for Pallas {
|
||||||
|
type F = Fq;
|
||||||
|
type G = Ep;
|
||||||
|
type H = Blake2b512;
|
||||||
|
const ID: &'static [u8] = b"Pallas";
|
||||||
|
fn generator() -> Ep {
|
||||||
|
Ep::generator()
|
||||||
|
}
|
||||||
|
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||||
|
// This naive concat may be insecure in a real world deployment
|
||||||
|
// This is solely test code so it's fine
|
||||||
|
Self::F::from_uniform_bytes(&Self::H::digest([dst, msg].concat()).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
struct Vesta;
|
||||||
|
impl Ciphersuite for Vesta {
|
||||||
|
type F = Fp;
|
||||||
|
type G = Eq;
|
||||||
|
type H = Blake2b512;
|
||||||
|
const ID: &'static [u8] = b"Vesta";
|
||||||
|
fn generator() -> Eq {
|
||||||
|
Eq::generator()
|
||||||
|
}
|
||||||
|
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||||
|
// This naive concat may be insecure in a real world deployment
|
||||||
|
// This is solely test code so it's fine
|
||||||
|
Self::F::from_uniform_bytes(&Self::H::digest([dst, msg].concat()).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct VestaParams;
|
||||||
|
impl DiscreteLogParameters for VestaParams {
|
||||||
|
type ScalarBits = U<{ <<Vesta as Ciphersuite>::F as PrimeField>::NUM_BITS as usize }>;
|
||||||
|
type XCoefficients = Quot<Sum<Self::ScalarBits, U1>, U2>;
|
||||||
|
type XCoefficientsMinusOne = Diff<Self::XCoefficients, U1>;
|
||||||
|
type YxCoefficients = Diff<Quot<Sum<Self::ScalarBits, U1>, U2>, U2>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvrfCurve for Pallas {
|
||||||
|
type EmbeddedCurve = Vesta;
|
||||||
|
type EmbeddedCurveParameters = VestaParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pasta_test() {
|
||||||
|
let generators = generators(1024);
|
||||||
|
let vesta_private_key = <Vesta as Ciphersuite>::F::random(&mut OsRng);
|
||||||
|
let time = Instant::now();
|
||||||
|
let res = Evrf::prove::<Pallas>(&mut OsRng, &generators, vesta_private_key, [0; 32], 1).unwrap();
|
||||||
|
println!("Proving time: {:?}", Instant::now() - time);
|
||||||
|
|
||||||
|
let time = Instant::now();
|
||||||
|
let mut verifier = generators.batch_verifier();
|
||||||
|
dbg!(Evrf::verify::<Pallas>(
|
||||||
|
&mut OsRng,
|
||||||
|
&generators,
|
||||||
|
&mut verifier,
|
||||||
|
Vesta::generator() * vesta_private_key,
|
||||||
|
[0; 32],
|
||||||
|
1,
|
||||||
|
&res.proof,
|
||||||
|
)
|
||||||
|
.unwrap());
|
||||||
|
assert!(generators.verify(verifier));
|
||||||
|
println!("Verifying time: {:?}", Instant::now() - time);
|
||||||
|
}
|
||||||
@@ -99,4 +99,5 @@ allow-git = [
|
|||||||
"https://github.com/serai-dex/substrate-bip39",
|
"https://github.com/serai-dex/substrate-bip39",
|
||||||
"https://github.com/serai-dex/substrate",
|
"https://github.com/serai-dex/substrate",
|
||||||
"https://github.com/orcalabs/dockertest-rs",
|
"https://github.com/orcalabs/dockertest-rs",
|
||||||
|
"https://github.com/kayabaNerve/pasta_curves",
|
||||||
]
|
]
|
||||||
|
|||||||
Reference in New Issue
Block a user