mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Initial commit
Combines the existing frost-rs, dalek-ff-group, and monero-rs repos into a monorepo. Makes tweaks necessary as needed. Replaces RedDSA (which was going to be stubbed out into a new folder for now) with an offset system that voids its need and allows stealth addresses with CLSAG.
This commit is contained in:
1
sign/dalek-ff-group/.gitignore
vendored
Normal file
1
sign/dalek-ff-group/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Cargo.lock
|
||||
17
sign/dalek-ff-group/Cargo.toml
Normal file
17
sign/dalek-ff-group/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "dalek-ff-group"
|
||||
version = "0.1.0"
|
||||
description = "ff/group bindings around curve25519-dalek"
|
||||
license = "MIT"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rand_core = "0.6"
|
||||
|
||||
subtle = "2.4"
|
||||
|
||||
ff = "0.10"
|
||||
group = "0.10"
|
||||
|
||||
curve25519-dalek = "3.2"
|
||||
21
sign/dalek-ff-group/LICENSE
Normal file
21
sign/dalek-ff-group/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
3
sign/dalek-ff-group/README.md
Normal file
3
sign/dalek-ff-group/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Dalek FF/Group
|
||||
|
||||
ff/group bindings around curve25519-dalek with a random function based around a more modern rand_core.
|
||||
291
sign/dalek-ff-group/src/lib.rs
Normal file
291
sign/dalek-ff-group/src/lib.rs
Normal file
@@ -0,0 +1,291 @@
|
||||
use core::{
|
||||
ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
|
||||
borrow::Borrow,
|
||||
iter::{Iterator, Sum}
|
||||
};
|
||||
|
||||
use rand_core::RngCore;
|
||||
|
||||
use subtle::{Choice, CtOption, ConditionallySelectable};
|
||||
|
||||
pub use curve25519_dalek as dalek;
|
||||
|
||||
use dalek::{
|
||||
constants,
|
||||
traits::Identity,
|
||||
scalar::Scalar as DScalar,
|
||||
edwards::{
|
||||
EdwardsPoint as DPoint,
|
||||
EdwardsBasepointTable as DTable,
|
||||
CompressedEdwardsY as DCompressed
|
||||
}
|
||||
};
|
||||
|
||||
use ff::{Field, PrimeField};
|
||||
use group::Group;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
|
||||
pub struct Scalar(pub DScalar);
|
||||
|
||||
impl Deref for Scalar {
|
||||
type Target = DScalar;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<DScalar> for Scalar {
|
||||
fn borrow(&self) -> &DScalar {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<DScalar> for &Scalar {
|
||||
fn borrow(&self) -> &DScalar {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<Scalar> for Scalar {
|
||||
type Output = Self;
|
||||
fn add(self, other: Scalar) -> Scalar { Self(self.0 + other.0) }
|
||||
}
|
||||
impl AddAssign for Scalar {
|
||||
fn add_assign(&mut self, other: Scalar) { self.0 += other.0 }
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Scalar> for Scalar {
|
||||
type Output = Self;
|
||||
fn add(self, other: &'a Scalar) -> Scalar { Self(self.0 + other.0) }
|
||||
}
|
||||
impl<'a> AddAssign<&'a Scalar> for Scalar {
|
||||
fn add_assign(&mut self, other: &'a Scalar) { self.0 += other.0 }
|
||||
}
|
||||
|
||||
impl Sub<Scalar> for Scalar {
|
||||
type Output = Self;
|
||||
fn sub(self, other: Scalar) -> Scalar { Self(self.0 - other.0) }
|
||||
}
|
||||
impl SubAssign for Scalar {
|
||||
fn sub_assign(&mut self, other: Scalar) { self.0 -= other.0 }
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Scalar> for Scalar {
|
||||
type Output = Self;
|
||||
fn sub(self, other: &'a Scalar) -> Scalar { Self(self.0 - other.0) }
|
||||
}
|
||||
impl<'a> SubAssign<&'a Scalar> for Scalar {
|
||||
fn sub_assign(&mut self, other: &'a Scalar) { self.0 -= other.0 }
|
||||
}
|
||||
|
||||
impl Neg for Scalar {
|
||||
type Output = Self;
|
||||
fn neg(self) -> Scalar { Self(-self.0) }
|
||||
}
|
||||
|
||||
impl Mul<Scalar> for Scalar {
|
||||
type Output = Self;
|
||||
fn mul(self, other: Scalar) -> Scalar { Self(self.0 * other.0) }
|
||||
}
|
||||
impl MulAssign for Scalar {
|
||||
fn mul_assign(&mut self, other: Scalar) { self.0 *= other.0 }
|
||||
}
|
||||
|
||||
impl<'a> Mul<&'a Scalar> for Scalar {
|
||||
type Output = Self;
|
||||
fn mul(self, other: &'a Scalar) -> Scalar { Self(self.0 * other.0) }
|
||||
}
|
||||
impl<'a> MulAssign<&'a Scalar> for Scalar {
|
||||
fn mul_assign(&mut self, other: &'a Scalar) { self.0 *= other.0 }
|
||||
}
|
||||
|
||||
impl ConditionallySelectable for Scalar {
|
||||
fn conditional_select(_: &Self, _: &Self, _: Choice) -> Self { unimplemented!() }
|
||||
}
|
||||
|
||||
impl Field for Scalar {
|
||||
fn random(mut rng: impl RngCore) -> Self {
|
||||
let mut r = [0; 64];
|
||||
rng.fill_bytes(&mut r);
|
||||
Self(DScalar::from_bytes_mod_order_wide(&r))
|
||||
}
|
||||
|
||||
fn zero() -> Self { Self(DScalar::zero()) }
|
||||
fn one() -> Self { Self(DScalar::one()) }
|
||||
fn square(&self) -> Self { *self * self }
|
||||
fn double(&self) -> Self { *self + self }
|
||||
fn invert(&self) -> CtOption<Self> { CtOption::new(Self(self.0.invert()), Choice::from(1 as u8)) }
|
||||
fn sqrt(&self) -> CtOption<Self> { unimplemented!() }
|
||||
fn is_zero(&self) -> bool { self.0 == DScalar::zero() }
|
||||
fn cube(&self) -> Self { *self * self * self }
|
||||
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self { unimplemented!() }
|
||||
}
|
||||
|
||||
impl From<u64> for Scalar {
|
||||
fn from(a: u64) -> Scalar { Self(DScalar::from(a)) }
|
||||
}
|
||||
|
||||
impl PrimeField for Scalar {
|
||||
type Repr = [u8; 32];
|
||||
const NUM_BITS: u32 = 253;
|
||||
const CAPACITY: u32 = 252;
|
||||
fn from_repr(bytes: [u8; 32]) -> Option<Self> { DScalar::from_canonical_bytes(bytes).map(|x| Scalar(x)) }
|
||||
fn to_repr(&self) -> [u8; 32] { self.0.to_bytes() }
|
||||
|
||||
const S: u32 = 0;
|
||||
fn is_odd(&self) -> bool { unimplemented!() }
|
||||
fn multiplicative_generator() -> Self { unimplemented!() }
|
||||
fn root_of_unity() -> Self { unimplemented!() }
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct EdwardsPoint(pub DPoint);
|
||||
pub const ED25519_BASEPOINT_POINT: EdwardsPoint = EdwardsPoint(constants::ED25519_BASEPOINT_POINT);
|
||||
|
||||
impl Deref for EdwardsPoint {
|
||||
type Target = DPoint;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<DPoint> for EdwardsPoint {
|
||||
fn borrow(&self) -> &DPoint {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<DPoint> for &EdwardsPoint {
|
||||
fn borrow(&self) -> &DPoint {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<EdwardsPoint> for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn add(self, b: EdwardsPoint) -> EdwardsPoint { Self(self.0 + b.0) }
|
||||
}
|
||||
impl AddAssign<EdwardsPoint> for EdwardsPoint {
|
||||
fn add_assign(&mut self, other: EdwardsPoint) { self.0 += other.0 }
|
||||
}
|
||||
impl Sum<EdwardsPoint> for EdwardsPoint {
|
||||
fn sum<I: Iterator<Item = EdwardsPoint>>(iter: I) -> EdwardsPoint { Self(DPoint::sum(iter)) }
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a EdwardsPoint> for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn add(self, b: &'a EdwardsPoint) -> EdwardsPoint { Self(self.0 + b.0) }
|
||||
}
|
||||
impl<'a> AddAssign<&'a EdwardsPoint> for EdwardsPoint {
|
||||
fn add_assign(&mut self, other: &'a EdwardsPoint) { self.0 += other.0 }
|
||||
}
|
||||
impl<'a> Sum<&'a EdwardsPoint> for EdwardsPoint {
|
||||
fn sum<I: Iterator<Item = &'a EdwardsPoint>>(iter: I) -> EdwardsPoint { Self(DPoint::sum(iter)) }
|
||||
}
|
||||
|
||||
impl Sub<EdwardsPoint> for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn sub(self, b: EdwardsPoint) -> EdwardsPoint { Self(self.0 - b.0) }
|
||||
}
|
||||
impl SubAssign<EdwardsPoint> for EdwardsPoint {
|
||||
fn sub_assign(&mut self, other: EdwardsPoint) { self.0 -= other.0 }
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a EdwardsPoint> for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn sub(self, b: &'a EdwardsPoint) -> EdwardsPoint { Self(self.0 - b.0) }
|
||||
}
|
||||
impl<'a> SubAssign<&'a EdwardsPoint> for EdwardsPoint {
|
||||
fn sub_assign(&mut self, other: &'a EdwardsPoint) { self.0 -= other.0 }
|
||||
}
|
||||
|
||||
impl Neg for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn neg(self) -> EdwardsPoint { Self(-self.0) }
|
||||
}
|
||||
|
||||
impl Mul<Scalar> for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn mul(self, b: Scalar) -> EdwardsPoint { Self(b.0 * self.0) }
|
||||
}
|
||||
impl MulAssign<Scalar> for EdwardsPoint {
|
||||
fn mul_assign(&mut self, other: Scalar) { self.0 *= other.0 }
|
||||
}
|
||||
|
||||
impl<'a> Mul<&'a Scalar> for EdwardsPoint {
|
||||
type Output = Self;
|
||||
fn mul(self, b: &'a Scalar) -> EdwardsPoint { Self(b.0 * self.0) }
|
||||
}
|
||||
impl<'a> MulAssign<&'a Scalar> for EdwardsPoint {
|
||||
fn mul_assign(&mut self, other: &'a Scalar) { self.0 *= other.0 }
|
||||
}
|
||||
|
||||
impl Group for EdwardsPoint {
|
||||
type Scalar = Scalar;
|
||||
fn random(mut _rng: impl RngCore) -> Self { unimplemented!() }
|
||||
fn identity() -> Self { Self(DPoint::identity()) }
|
||||
fn generator() -> Self { ED25519_BASEPOINT_POINT }
|
||||
fn is_identity(&self) -> Choice { unimplemented!() }
|
||||
fn double(&self) -> Self { *self + self }
|
||||
}
|
||||
|
||||
impl Scalar {
|
||||
pub fn from_canonical_bytes(bytes: [u8; 32]) -> Option<Scalar> {
|
||||
DScalar::from_canonical_bytes(bytes).map(|x| Self(x))
|
||||
}
|
||||
pub fn from_bytes_mod_order(bytes: [u8; 32]) -> Scalar {
|
||||
Self(DScalar::from_bytes_mod_order(bytes))
|
||||
}
|
||||
pub fn from_bytes_mod_order_wide(bytes: &[u8; 64]) -> Scalar {
|
||||
Self(DScalar::from_bytes_mod_order_wide(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CompressedEdwardsY(pub DCompressed);
|
||||
impl CompressedEdwardsY {
|
||||
pub fn new(y: [u8; 32]) -> CompressedEdwardsY {
|
||||
Self(DCompressed(y))
|
||||
}
|
||||
|
||||
pub fn decompress(&self) -> Option<EdwardsPoint> {
|
||||
self.0.decompress().map(|x| EdwardsPoint(x))
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> [u8; 32] {
|
||||
self.0.to_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl EdwardsPoint {
|
||||
pub fn is_torsion_free(&self) -> bool {
|
||||
self.0.is_torsion_free()
|
||||
}
|
||||
|
||||
pub fn compress(&self) -> CompressedEdwardsY {
|
||||
CompressedEdwardsY(self.0.compress())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EdwardsBasepointTable(pub DTable);
|
||||
pub const ED25519_BASEPOINT_TABLE: EdwardsBasepointTable = EdwardsBasepointTable(constants::ED25519_BASEPOINT_TABLE);
|
||||
|
||||
impl Deref for EdwardsBasepointTable {
|
||||
type Target = DTable;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<DTable> for &EdwardsBasepointTable {
|
||||
fn borrow(&self) -> &DTable {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<Scalar> for &EdwardsBasepointTable {
|
||||
type Output = EdwardsPoint;
|
||||
fn mul(self, b: Scalar) -> EdwardsPoint { EdwardsPoint(&b.0 * &self.0) }
|
||||
}
|
||||
1
sign/frost/.gitignore
vendored
Normal file
1
sign/frost/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Cargo.lock
|
||||
23
sign/frost/Cargo.toml
Normal file
23
sign/frost/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "frost"
|
||||
version = "0.1.0"
|
||||
description = "Implementation of FROST over ff/group"
|
||||
license = "MIT"
|
||||
authors = ["kayabaNerve (Luke Parker) <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
digest = "0.9"
|
||||
blake2 = "0.9"
|
||||
|
||||
rand_core = "0.6"
|
||||
|
||||
ff = "0.10"
|
||||
group = "0.10"
|
||||
|
||||
thiserror = "1"
|
||||
|
||||
[dev-dependencies]
|
||||
hex = "0.4"
|
||||
rand = "0.8"
|
||||
jubjub = "0.7"
|
||||
21
sign/frost/LICENSE
Normal file
21
sign/frost/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021-2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
3
sign/frost/README.md
Normal file
3
sign/frost/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# FROST
|
||||
|
||||
Implementation of FROST for any curve with a ff/group API.
|
||||
166
sign/frost/src/algorithm.rs
Normal file
166
sign/frost/src/algorithm.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use core::{marker::PhantomData, fmt::Debug};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use digest::Digest;
|
||||
|
||||
use group::Group;
|
||||
|
||||
use crate::{Curve, FrostError, sign};
|
||||
|
||||
pub trait Algorithm<C: Curve>: Clone + Debug {
|
||||
/// The resulting type of the signatures this algorithm will produce
|
||||
type Signature: Clone + Debug;
|
||||
|
||||
/// Context for this algorithm to be hashed into b, and therefore committed to
|
||||
fn context(&self) -> Vec<u8>;
|
||||
|
||||
/// The amount of bytes from each participant's addendum to commit to
|
||||
fn addendum_commit_len() -> usize;
|
||||
|
||||
/// Generate an addendum to FROST"s preprocessing stage
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
params: &sign::ParamsView<C>,
|
||||
nonces: &[C::F; 2],
|
||||
) -> Vec<u8>;
|
||||
|
||||
/// Proccess the addendum for the specified participant. Guaranteed to be ordered
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
params: &sign::ParamsView<C>,
|
||||
l: usize,
|
||||
commitments: &[C::G; 2],
|
||||
p: &C::F,
|
||||
serialized: &[u8],
|
||||
) -> Result<(), FrostError>;
|
||||
|
||||
/// Sign a share with the given secret/nonce
|
||||
/// The secret will already have been its lagrange coefficient applied so it is the necessary
|
||||
/// key share
|
||||
/// The nonce will already have been processed into the combined form d + (e * p)
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &sign::ParamsView<C>,
|
||||
nonce_sum: C::G,
|
||||
nonce: C::F,
|
||||
msg: &[u8],
|
||||
) -> C::F;
|
||||
|
||||
/// Verify a signature
|
||||
fn verify(&self, group_key: C::G, nonce: C::G, sum: C::F) -> Option<Self::Signature>;
|
||||
|
||||
/// Verify a specific share given as a response. Used to determine blame if signature
|
||||
/// verification fails
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: C::G,
|
||||
nonce: C::G,
|
||||
share: C::F,
|
||||
) -> bool;
|
||||
}
|
||||
|
||||
pub trait Hram: PartialEq + Eq + Copy + Clone + Debug {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram<C: Curve>(R: &C::G, A: &C::G, m: &[u8]) -> C::F;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct Blake2bHram {}
|
||||
impl Hram for Blake2bHram {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram<C: Curve>(R: &C::G, A: &C::G, m: &[u8]) -> C::F {
|
||||
C::F_from_bytes_wide(
|
||||
blake2::Blake2b::new()
|
||||
.chain(C::G_to_bytes(R))
|
||||
.chain(C::G_to_bytes(A))
|
||||
.chain(m)
|
||||
.finalize()
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.expect("couldn't convert a 64-byte hash to a 64-byte array")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct Schnorr<C: Curve, H: Hram> {
|
||||
c: Option<C::F>,
|
||||
hram: PhantomData<H>,
|
||||
}
|
||||
|
||||
impl<C: Curve, H: Hram> Schnorr<C, H> {
|
||||
pub fn new() -> Schnorr<C, H> {
|
||||
Schnorr {
|
||||
c: None,
|
||||
hram: PhantomData
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct SchnorrSignature<C: Curve> {
|
||||
pub R: C::G,
|
||||
pub s: C::F,
|
||||
}
|
||||
|
||||
impl<C: Curve, H: Hram> Algorithm<C> for Schnorr<C, H> {
|
||||
type Signature = SchnorrSignature<C>;
|
||||
|
||||
fn context(&self) -> Vec<u8> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn addendum_commit_len() -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
_: &mut R,
|
||||
_: &sign::ParamsView<C>,
|
||||
_: &[C::F; 2],
|
||||
) -> Vec<u8> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
_: &sign::ParamsView<C>,
|
||||
_: usize,
|
||||
_: &[C::G; 2],
|
||||
_: &C::F,
|
||||
_: &[u8],
|
||||
) -> Result<(), FrostError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &sign::ParamsView<C>,
|
||||
nonce_sum: C::G,
|
||||
nonce: C::F,
|
||||
msg: &[u8],
|
||||
) -> C::F {
|
||||
let c = H::hram::<C>(&nonce_sum, ¶ms.group_key(), msg);
|
||||
self.c = Some(c);
|
||||
|
||||
nonce + (params.secret_share() * c)
|
||||
}
|
||||
|
||||
fn verify(&self, group_key: C::G, nonce: C::G, sum: C::F) -> Option<Self::Signature> {
|
||||
if (C::generator_table() * sum) + (C::G::identity() - (group_key * self.c.unwrap())) == nonce {
|
||||
Some(SchnorrSignature { R: nonce, s: sum })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: C::G,
|
||||
nonce: C::G,
|
||||
share: C::F,
|
||||
) -> bool {
|
||||
(C::generator_table() * share) == (nonce + (verification_share * self.c.unwrap()))
|
||||
}
|
||||
}
|
||||
501
sign/frost/src/key_gen.rs
Normal file
501
sign/frost/src/key_gen.rs
Normal file
@@ -0,0 +1,501 @@
|
||||
use core::{convert::{TryFrom, TryInto}, cmp::min, fmt};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use blake2::{Digest, Blake2b};
|
||||
|
||||
use ff::{Field, PrimeField};
|
||||
use group::Group;
|
||||
|
||||
use crate::{Curve, MultisigParams, MultisigKeys, FrostError};
|
||||
|
||||
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
|
||||
// the serialized commitments to be broadcasted over an authenticated channel to all parties
|
||||
// TODO: This potentially could return a much more robust serialized message, including a signature
|
||||
// of its entirety. The issue is it can't use its own key as it has no chain of custody behind it.
|
||||
// While we could ask for a key to be passed in, explicitly declaring the needed for authenticated
|
||||
// communications in the API itself, systems will likely already provide a authenticated
|
||||
// communication method making this redundant. It also doesn't guarantee the system which passed
|
||||
// the key is correctly using it, meaning we can only minimize risk so much
|
||||
// One notable improvement would be to include the index in the message. While the system must
|
||||
// still track this to determine if it's ready for the next step, and to remove duplicates, it
|
||||
// would ensure no counterparties presume the same index and this system didn't mislabel a
|
||||
// counterparty
|
||||
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &MultisigParams,
|
||||
context: &str,
|
||||
) -> (Vec<C::F>, Vec<C::G>, Vec<u8>) {
|
||||
let mut coefficients = Vec::with_capacity(params.t);
|
||||
let mut commitments = Vec::with_capacity(params.t);
|
||||
let mut serialized = Vec::with_capacity((C::G_len() * params.t) + C::G_len() + C::F_len());
|
||||
for j in 0 .. params.t {
|
||||
// Step 1: Generate t random values to form a polynomial with
|
||||
coefficients.push(C::F::random(&mut *rng));
|
||||
// Step 3: Generate public commitments
|
||||
commitments.push(C::generator_table() * coefficients[j]);
|
||||
// Serialize them for publication
|
||||
serialized.extend(&C::G_to_bytes(&commitments[j]));
|
||||
}
|
||||
|
||||
// Step 2: Provide a proof of knowledge
|
||||
// This can be deterministic as the PoK is a singleton never opened up to cooperative discussion
|
||||
// There's also no reason to spend the time and effort to make this deterministic besides a
|
||||
// general obsession with canonicity and determinism
|
||||
let k = C::F::random(rng);
|
||||
#[allow(non_snake_case)]
|
||||
let R = C::generator_table() * k;
|
||||
let c = C::F_from_bytes_wide(
|
||||
Blake2b::new()
|
||||
.chain(&u64::try_from(params.i).unwrap().to_le_bytes())
|
||||
.chain(context.as_bytes())
|
||||
.chain(&C::G_to_bytes(&R)) // R
|
||||
.chain(&serialized) // A of the first commitment, which is what we're proving we have
|
||||
// the private key for
|
||||
// m of the rest of the commitments, authenticating them
|
||||
.finalize()
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.expect("couldn't convert a 64-byte hash to a 64-byte array")
|
||||
);
|
||||
let s = k + (coefficients[0] * c);
|
||||
|
||||
serialized.extend(&C::G_to_bytes(&R));
|
||||
serialized.extend(&C::F_to_le_bytes(&s));
|
||||
|
||||
// Step 4: Broadcast
|
||||
(coefficients, commitments, serialized)
|
||||
}
|
||||
|
||||
// Verify the received data from the first round of key generation
|
||||
fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &MultisigParams,
|
||||
context: &str,
|
||||
our_commitments: Vec<C::G>,
|
||||
serialized: &[Vec<u8>],
|
||||
) -> Result<Vec<Vec<C::G>>, FrostError> {
|
||||
// Deserialize all of the commitments, validating the input buffers as needed
|
||||
if serialized.len() != (params.n + 1) {
|
||||
Err(
|
||||
// Prevents a panic if serialized.len() == 0
|
||||
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
|
||||
)?;
|
||||
}
|
||||
|
||||
// Expect a null set of commitments for index 0 so the vector is guaranteed to line up with
|
||||
// actual indexes. Even if we did the offset internally, the system would need to write the vec
|
||||
// with the same offset in mind. Therefore, this trick which is probably slightly less efficient
|
||||
// yet keeps everything simple is preferred
|
||||
if serialized[0] != vec![] {
|
||||
Err(FrostError::NonEmptyParticipantZero)?;
|
||||
}
|
||||
|
||||
let commitments_len = params.t * C::G_len();
|
||||
let mut commitments = Vec::with_capacity(params.n + 1);
|
||||
commitments.push(vec![]);
|
||||
|
||||
let signature_len = C::G_len() + C::F_len();
|
||||
let mut first = true;
|
||||
let mut scalars = Vec::with_capacity((params.n - 1) * 3);
|
||||
let mut points = Vec::with_capacity((params.n - 1) * 3);
|
||||
for l in 1 ..= params.n {
|
||||
if l == params.i {
|
||||
if serialized[l].len() != 0 {
|
||||
Err(FrostError::DuplicatedIndex(l))?;
|
||||
}
|
||||
commitments.push(vec![]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if serialized[l].len() != (commitments_len + signature_len) {
|
||||
// Return an error with an approximation for how many commitments were included
|
||||
// Prevents errors if not even the signature was included
|
||||
if serialized[l].len() < signature_len {
|
||||
Err(FrostError::InvalidCommitmentQuantity(l, params.t, 0))?;
|
||||
}
|
||||
|
||||
Err(
|
||||
FrostError::InvalidCommitmentQuantity(
|
||||
l,
|
||||
params.t,
|
||||
// Could technically be x.y despite this returning x, yet any y is negligible
|
||||
// It could help with debugging to know a partial piece of data was read but this error
|
||||
// alone should be enough
|
||||
(serialized[l].len() - signature_len) / C::G_len()
|
||||
)
|
||||
)?;
|
||||
}
|
||||
|
||||
commitments.push(Vec::with_capacity(params.t));
|
||||
for o in 0 .. params.t {
|
||||
commitments[l].push(
|
||||
C::G_from_slice(
|
||||
&serialized[l][(o * C::G_len()) .. ((o + 1) * C::G_len())]
|
||||
).map_err(|_| FrostError::InvalidCommitment(l))?
|
||||
);
|
||||
}
|
||||
|
||||
// Step 5: Validate each proof of knowledge (prep)
|
||||
let mut u = C::F::one();
|
||||
if !first {
|
||||
u = C::F::random(&mut *rng);
|
||||
}
|
||||
|
||||
scalars.push(u);
|
||||
points.push(
|
||||
C::G_from_slice(
|
||||
&serialized[l][commitments_len .. commitments_len + C::G_len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?
|
||||
);
|
||||
|
||||
scalars.push(
|
||||
-C::F_from_le_slice(
|
||||
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))? * u
|
||||
);
|
||||
points.push(C::generator());
|
||||
|
||||
let c = C::F_from_bytes_wide(
|
||||
Blake2b::new()
|
||||
// Bounded by n which is already checked to be within the u64 range
|
||||
.chain(&u64::try_from(l).unwrap().to_le_bytes())
|
||||
.chain(context.as_bytes())
|
||||
.chain(&serialized[l][commitments_len .. commitments_len + C::G_len()])
|
||||
.chain(&serialized[l][0 .. commitments_len])
|
||||
.finalize()
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.expect("couldn't convert a 64-byte hash to a 64-byte array")
|
||||
);
|
||||
|
||||
if first {
|
||||
scalars.push(c);
|
||||
first = false;
|
||||
} else {
|
||||
scalars.push(c * u);
|
||||
}
|
||||
points.push(commitments[l][0]);
|
||||
}
|
||||
|
||||
// Step 5: Implementation
|
||||
// Uses batch verification to optimize the success case dramatically
|
||||
// On failure, the cost is now this + blame, yet that should happen infrequently
|
||||
if C::multiexp_vartime(&scalars, &points) != C::G::identity() {
|
||||
for l in 1 ..= params.n {
|
||||
if l == params.i {
|
||||
continue;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = C::G_from_slice(
|
||||
&serialized[l][commitments_len .. commitments_len + C::G_len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
|
||||
let s = C::F_from_le_slice(
|
||||
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
|
||||
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
|
||||
|
||||
let c = C::F_from_bytes_wide(
|
||||
Blake2b::new()
|
||||
// Bounded by n which is already checked to be within the u64 range
|
||||
.chain(&u64::try_from(l).unwrap().to_le_bytes())
|
||||
.chain(context.as_bytes())
|
||||
.chain(&serialized[l][commitments_len .. commitments_len + C::G_len()])
|
||||
.chain(&serialized[l][0 .. commitments_len])
|
||||
.finalize()
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.expect("couldn't convert a 64-byte hash to a 64-byte array")
|
||||
);
|
||||
|
||||
if R != ((C::generator_table() * s) + (commitments[l][0] * (C::F::zero() - &c))) {
|
||||
Err(FrostError::InvalidProofOfKnowledge(l))?;
|
||||
}
|
||||
}
|
||||
|
||||
Err(FrostError::InternalError("batch validation is broken".to_string()))?;
|
||||
}
|
||||
|
||||
// Write in our own commitments
|
||||
commitments[params.i] = our_commitments;
|
||||
|
||||
Ok(commitments)
|
||||
}
|
||||
|
||||
fn polynomial<F: PrimeField>(
|
||||
coefficients: &[F],
|
||||
i: usize
|
||||
) -> F {
|
||||
let i = F::from(u64::try_from(i).unwrap());
|
||||
let mut share = F::zero();
|
||||
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
|
||||
share += coefficient;
|
||||
if idx != (coefficients.len() - 1) {
|
||||
share *= i;
|
||||
}
|
||||
}
|
||||
share
|
||||
}
|
||||
|
||||
// Implements round 1, step 5 and round 2, step 1 of FROST key generation
|
||||
// Returns our secret share part, commitments for the next step, and a vector for each
|
||||
// counterparty to receive
|
||||
fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &MultisigParams,
|
||||
context: &str,
|
||||
coefficients: Vec<C::F>,
|
||||
our_commitments: Vec<C::G>,
|
||||
commitments: &[Vec<u8>],
|
||||
) -> Result<(C::F, Vec<Vec<C::G>>, Vec<Vec<u8>>), FrostError> {
|
||||
let commitments = verify_r1::<R, C>(rng, params, context, our_commitments, commitments)?;
|
||||
|
||||
// Step 1: Generate secret shares for all other parties
|
||||
let mut res = Vec::with_capacity(params.n + 1);
|
||||
res.push(vec![]);
|
||||
for i in 1 ..= params.n {
|
||||
// Don't push our own to the byte buffer which is meant to be sent around
|
||||
// An app developer could accidentally send it. Best to keep this black boxed
|
||||
if i == params.i {
|
||||
res.push(vec![]);
|
||||
continue
|
||||
}
|
||||
|
||||
res.push(C::F_to_le_bytes(&polynomial(&coefficients, i)));
|
||||
}
|
||||
|
||||
// Calculate our own share
|
||||
let share = polynomial(&coefficients, params.i);
|
||||
|
||||
// The secret shares are discarded here, not cleared. While any system which leaves its memory
|
||||
// accessible is likely totally lost already, making the distinction meaningless when the key gen
|
||||
// system acts as the signer system and therefore actively holds the signing key anyways, it
|
||||
// should be overwritten with /dev/urandom in the name of security (which still doesn't meet
|
||||
// requirements for secure data deletion yet those requirements expect hardware access which is
|
||||
// far past what this library can reasonably counter)
|
||||
// TODO: Zero out the coefficients
|
||||
|
||||
Ok((share, commitments, res))
|
||||
}
|
||||
|
||||
/// Finishes round 2 and returns both the secret share and the serialized public key.
|
||||
/// This key is not usable until all parties confirm they have completed the protocol without
|
||||
/// issue, yet simply confirming protocol completion without issue is enough to confirm the same
|
||||
/// key was generated as long as a lack of duplicated commitments was also confirmed when they were
|
||||
/// broadcasted initially
|
||||
fn complete_r2<C: Curve>(
|
||||
params: MultisigParams,
|
||||
share: C::F,
|
||||
commitments: &[Vec<C::G>],
|
||||
// Vec to preserve ownership
|
||||
serialized: Vec<Vec<u8>>,
|
||||
) -> Result<MultisigKeys<C>, FrostError> {
|
||||
// Step 2. Verify each share
|
||||
if serialized.len() != (params.n + 1) {
|
||||
Err(
|
||||
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
|
||||
)?;
|
||||
}
|
||||
|
||||
if (commitments[0].len() != 0) || (serialized[0].len() != 0) {
|
||||
Err(FrostError::NonEmptyParticipantZero)?;
|
||||
}
|
||||
|
||||
// Deserialize them
|
||||
let mut shares: Vec<C::F> = vec![C::F::zero()];
|
||||
for i in 1 .. serialized.len() {
|
||||
if i == params.i {
|
||||
if serialized[i].len() != 0 {
|
||||
Err(FrostError::DuplicatedIndex(i))?;
|
||||
}
|
||||
shares.push(C::F::zero());
|
||||
continue;
|
||||
}
|
||||
shares.push(C::F_from_le_slice(&serialized[i]).map_err(|_| FrostError::InvalidShare(i))?);
|
||||
}
|
||||
|
||||
|
||||
for l in 1 ..= params.n {
|
||||
if l == params.i {
|
||||
continue;
|
||||
}
|
||||
|
||||
let i_scalar = C::F::from(u64::try_from(params.i).unwrap());
|
||||
let mut exp = C::F::one();
|
||||
let mut exps = Vec::with_capacity(params.t);
|
||||
for _ in 0 .. params.t {
|
||||
exps.push(exp);
|
||||
exp *= i_scalar;
|
||||
}
|
||||
|
||||
// Doesn't use multiexp_vartime with -shares[l] due to not being able to push to commitments
|
||||
if C::multiexp_vartime(&exps, &commitments[l]) != (C::generator_table() * shares[l]) {
|
||||
Err(FrostError::InvalidCommitment(l))?;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Clear the original share
|
||||
|
||||
let mut secret_share = share;
|
||||
for remote_share in shares {
|
||||
secret_share += remote_share;
|
||||
}
|
||||
|
||||
let mut verification_shares = vec![C::G::identity()];
|
||||
for i in 1 ..= params.n {
|
||||
let mut exps = vec![];
|
||||
let mut cs = vec![];
|
||||
for j in 1 ..= params.n {
|
||||
for k in 0 .. params.t {
|
||||
let mut exp = C::F::one();
|
||||
for _ in 0 .. k {
|
||||
exp *= C::F::from(u64::try_from(i).unwrap());
|
||||
}
|
||||
exps.push(exp);
|
||||
cs.push(commitments[j][k]);
|
||||
}
|
||||
}
|
||||
verification_shares.push(C::multiexp_vartime(&exps, &cs));
|
||||
}
|
||||
|
||||
debug_assert_eq!(
|
||||
C::generator_table() * secret_share,
|
||||
verification_shares[params.i]
|
||||
);
|
||||
|
||||
let mut group_key = C::G::identity();
|
||||
for j in 1 ..= params.n {
|
||||
group_key += commitments[j][0];
|
||||
}
|
||||
|
||||
// TODO: Clear serialized and shares
|
||||
|
||||
Ok(MultisigKeys { params, secret_share, group_key, verification_shares, offset: None } )
|
||||
}
|
||||
|
||||
/// State of a Key Generation machine
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub enum State {
|
||||
Fresh,
|
||||
GeneratedCoefficients,
|
||||
GeneratedSecretShares,
|
||||
Complete,
|
||||
}
|
||||
|
||||
impl fmt::Display for State {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
/// State machine which manages key generation
|
||||
pub struct StateMachine<C: Curve> {
|
||||
params: MultisigParams,
|
||||
context: String,
|
||||
state: State,
|
||||
coefficients: Option<Vec<C::F>>,
|
||||
our_commitments: Option<Vec<C::G>>,
|
||||
secret: Option<C::F>,
|
||||
commitments: Option<Vec<Vec<C::G>>>,
|
||||
}
|
||||
|
||||
impl<C: Curve> StateMachine<C> {
|
||||
/// Creates a new machine to generate a key for the specified curve in the specified multisig
|
||||
// The context string must be unique among multisigs
|
||||
pub fn new(params: MultisigParams, context: String) -> StateMachine<C> {
|
||||
StateMachine {
|
||||
params,
|
||||
context,
|
||||
state: State::Fresh,
|
||||
coefficients: None,
|
||||
our_commitments: None,
|
||||
secret: None,
|
||||
commitments: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Start generating a key according to the FROST DKG spec
|
||||
/// Returns a serialized list of commitments to be sent to all parties over an authenticated
|
||||
/// channel. If any party submits multiple sets of commitments, they MUST be treated as malicious
|
||||
pub fn generate_coefficients<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R
|
||||
) -> Result<Vec<u8>, FrostError> {
|
||||
if self.state != State::Fresh {
|
||||
Err(FrostError::InvalidKeyGenTransition(State::Fresh, self.state))?;
|
||||
}
|
||||
|
||||
let (coefficients, commitments, serialized) = generate_key_r1::<R, C>(
|
||||
rng,
|
||||
&self.params,
|
||||
&self.context,
|
||||
);
|
||||
|
||||
self.coefficients = Some(coefficients);
|
||||
self.our_commitments = Some(commitments);
|
||||
self.state = State::GeneratedCoefficients;
|
||||
Ok(serialized)
|
||||
}
|
||||
|
||||
/// Continue generating a key
|
||||
/// Takes in everyone else's commitments, which are expected to be in a Vec where participant
|
||||
/// index = Vec index. An empty vector is expected at index 0 to allow for this. An empty vector
|
||||
/// is also expected at index i which is locally handled. Returns a byte vector representing a
|
||||
/// secret share for each other participant which should be encrypted before sending
|
||||
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
commitments: Vec<Vec<u8>>,
|
||||
) -> Result<Vec<Vec<u8>>, FrostError> {
|
||||
if self.state != State::GeneratedCoefficients {
|
||||
Err(FrostError::InvalidKeyGenTransition(State::GeneratedCoefficients, self.state))?;
|
||||
}
|
||||
|
||||
let (secret, commitments, shares) = generate_key_r2::<R, C>(
|
||||
rng,
|
||||
&self.params,
|
||||
&self.context,
|
||||
self.coefficients.take().unwrap(),
|
||||
self.our_commitments.take().unwrap(),
|
||||
&commitments,
|
||||
)?;
|
||||
|
||||
self.secret = Some(secret);
|
||||
self.commitments = Some(commitments);
|
||||
self.state = State::GeneratedSecretShares;
|
||||
Ok(shares)
|
||||
}
|
||||
|
||||
/// Complete key generation
|
||||
/// Takes in everyone elses' shares submitted to us as a Vec, expecting participant index =
|
||||
/// Vec index with an empty vector at index 0 and index i. Returns a byte vector representing the
|
||||
/// group's public key, while setting a valid secret share inside the machine. > t participants
|
||||
/// must report completion without issue before this key can be considered usable, yet you should
|
||||
/// wait for all participants to report as such
|
||||
pub fn complete(
|
||||
&mut self,
|
||||
shares: Vec<Vec<u8>>,
|
||||
) -> Result<MultisigKeys<C>, FrostError> {
|
||||
if self.state != State::GeneratedSecretShares {
|
||||
Err(FrostError::InvalidKeyGenTransition(State::GeneratedSecretShares, self.state))?;
|
||||
}
|
||||
|
||||
let keys = complete_r2(
|
||||
self.params,
|
||||
self.secret.take().unwrap(),
|
||||
&self.commitments.take().unwrap(),
|
||||
shares,
|
||||
)?;
|
||||
|
||||
self.state = State::Complete;
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
pub fn params(&self) -> MultisigParams {
|
||||
self.params.clone()
|
||||
}
|
||||
|
||||
pub fn state(&self) -> State {
|
||||
self.state
|
||||
}
|
||||
}
|
||||
380
sign/frost/src/lib.rs
Normal file
380
sign/frost/src/lib.rs
Normal file
@@ -0,0 +1,380 @@
|
||||
use core::{ops::Mul, fmt::Debug};
|
||||
|
||||
use ff::PrimeField;
|
||||
use group::{Group, GroupOps, ScalarMul};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
pub mod key_gen;
|
||||
pub mod algorithm;
|
||||
pub mod sign;
|
||||
|
||||
/// Set of errors for curve-related operations, namely encoding and decoding
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CurveError {
|
||||
#[error("invalid length for data (expected {0}, got {0})")]
|
||||
InvalidLength(usize, usize),
|
||||
// Push towards hex encoding in error messages
|
||||
#[error("invalid scalar ({0})")]
|
||||
InvalidScalar(String),
|
||||
#[error("invalid point ({0})")]
|
||||
InvalidPoint(String),
|
||||
}
|
||||
|
||||
/// Unified trait to manage a field/group
|
||||
// This should be moved into its own crate if the need for generic cryptography over ff/group
|
||||
// continues, which is the exact reason ff/group exists (to provide a generic interface)
|
||||
// elliptic-curve exists, yet it doesn't really serve the same role, nor does it use &[u8]/Vec<u8>
|
||||
// It uses GenericArray which will hopefully be deprecated as Rust evolves and doesn't offer enough
|
||||
// advantages in the modern day to be worth the hassle -- Kayaba
|
||||
pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
|
||||
/// Field element type
|
||||
// This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses
|
||||
type F: PrimeField;
|
||||
/// Group element type
|
||||
type G: Group + GroupOps + ScalarMul<Self::F>;
|
||||
/// Precomputed table type
|
||||
type T: Mul<Self::F, Output = Self::G>;
|
||||
|
||||
/// ID for this curve
|
||||
fn id() -> String;
|
||||
/// Byte length of the curve ID
|
||||
// While curve.id().len() is trivial, this bounds it to u8 and lets us ignore the possibility it
|
||||
// contains Unicode, therefore having a String length which is different from its byte length
|
||||
fn id_len() -> u8;
|
||||
|
||||
/// Generator for the group
|
||||
// While group does provide this in its API, Jubjub users will want to use a custom basepoint
|
||||
fn generator() -> Self::G;
|
||||
|
||||
/// Table for the generator for the group
|
||||
/// If there isn't a precomputed table available, the generator itself should be used
|
||||
fn generator_table() -> Self::T;
|
||||
|
||||
/// Multiexponentation function, presumably Straus or Pippenger
|
||||
/// This library does provide an implementation of Straus which should increase key generation
|
||||
/// performance by around 4x, also named multiexp_vartime, with the same API. However, if a more
|
||||
/// performant implementation is available, that should be used instead
|
||||
// This could also be written as -> Option<C::G> with None for not implemented
|
||||
fn multiexp_vartime(scalars: &[Self::F], points: &[Self::G]) -> Self::G;
|
||||
|
||||
// The following methods would optimally be F:: and G:: yet developers can't control F/G
|
||||
// They can control a trait they pass into this library
|
||||
|
||||
/// Constant size of a serialized field element
|
||||
// The alternative way to grab this would be either serializing a junk element and getting its
|
||||
// length or doing a naive division of its BITS property by 8 and assuming a lack of padding
|
||||
#[allow(non_snake_case)]
|
||||
fn F_len() -> usize;
|
||||
|
||||
/// Constant size of a serialized group element
|
||||
// We could grab the serialization as described above yet a naive developer may use a
|
||||
// non-constant size encoding, proving yet another reason to force this to be a provided constant
|
||||
// A naive developer could still provide a constant for a variable length encoding, yet at least
|
||||
// that is on them
|
||||
#[allow(non_snake_case)]
|
||||
fn G_len() -> usize;
|
||||
|
||||
/// Field element from slice. Should be canonical
|
||||
// Required due to the lack of standardized encoding functions provided by ff/group
|
||||
// While they do technically exist, their usage of Self::Repr breaks all potential library usage
|
||||
// without helper functions like this
|
||||
#[allow(non_snake_case)]
|
||||
fn F_from_le_slice(slice: &[u8]) -> Result<Self::F, CurveError>;
|
||||
|
||||
/// Field element from slice. Must support reducing the input into a valid field element
|
||||
#[allow(non_snake_case)]
|
||||
fn F_from_le_slice_unreduced(slice: &[u8]) -> Self::F;
|
||||
|
||||
/// Group element from slice. Should be canonical
|
||||
#[allow(non_snake_case)]
|
||||
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError>;
|
||||
|
||||
/// Obtain a vector of the byte encoding of F
|
||||
#[allow(non_snake_case)]
|
||||
fn F_to_le_bytes(f: &Self::F) -> Vec<u8>;
|
||||
|
||||
/// Obtain a vector of the byte encoding of G
|
||||
#[allow(non_snake_case)]
|
||||
fn G_to_bytes(g: &Self::G) -> Vec<u8>;
|
||||
|
||||
/// Takes 64-bytes and returns a scalar reduced mod n
|
||||
#[allow(non_snake_case)]
|
||||
fn F_from_bytes_wide(bytes: [u8; 64]) -> Self::F;
|
||||
}
|
||||
|
||||
/// Parameters for a multisig
|
||||
// These fields can not be made public as they should be static
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct MultisigParams {
|
||||
/// Participants needed to sign on behalf of the group
|
||||
t: usize,
|
||||
/// Amount of participants
|
||||
n: usize,
|
||||
/// Index of the participant being acted for
|
||||
i: usize,
|
||||
}
|
||||
|
||||
impl MultisigParams {
|
||||
pub fn new(
|
||||
t: usize,
|
||||
n: usize,
|
||||
i: usize
|
||||
) -> Result<MultisigParams, FrostError> {
|
||||
if (t == 0) || (n == 0) {
|
||||
Err(FrostError::ZeroParameter(t, n))?;
|
||||
}
|
||||
|
||||
if u64::try_from(n).is_err() {
|
||||
Err(FrostError::TooManyParticipants(n, u64::MAX))?;
|
||||
}
|
||||
|
||||
// When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason),
|
||||
// but it's not invalid to do so
|
||||
if t > n {
|
||||
Err(FrostError::InvalidRequiredQuantity(t, n))?;
|
||||
}
|
||||
if (i == 0) || (i > n) {
|
||||
Err(FrostError::InvalidParticipantIndex(n, i))?;
|
||||
}
|
||||
|
||||
Ok(MultisigParams{ t, n, i })
|
||||
}
|
||||
|
||||
pub fn t(&self) -> usize { self.t }
|
||||
pub fn n(&self) -> usize { self.n }
|
||||
pub fn i(&self) -> usize { self.i }
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum FrostError {
|
||||
#[error("a parameter was 0 (required {0}, participants {1})")]
|
||||
ZeroParameter(usize, usize),
|
||||
#[error("too many participants (max {1}, got {0})")]
|
||||
TooManyParticipants(usize, u64),
|
||||
#[error("invalid amount of required participants (max {1}, got {0})")]
|
||||
InvalidRequiredQuantity(usize, usize),
|
||||
#[error("invalid participant index (0 < index <= {0}, yet index is {1})")]
|
||||
InvalidParticipantIndex(usize, usize),
|
||||
|
||||
#[error("invalid signing set ({0})")]
|
||||
InvalidSigningSet(String),
|
||||
#[error("invalid participant quantity (expected {0}, got {1})")]
|
||||
InvalidParticipantQuantity(usize, usize),
|
||||
#[error("duplicated participant index ({0})")]
|
||||
DuplicatedIndex(usize),
|
||||
#[error("participant 0 provided data despite not existing")]
|
||||
NonEmptyParticipantZero,
|
||||
#[error("invalid commitment quantity (participant {0}, expected {1}, got {2})")]
|
||||
InvalidCommitmentQuantity(usize, usize, usize),
|
||||
#[error("invalid commitment (participant {0})")]
|
||||
InvalidCommitment(usize),
|
||||
#[error("invalid proof of knowledge (participant {0})")]
|
||||
InvalidProofOfKnowledge(usize),
|
||||
#[error("invalid share (participant {0})")]
|
||||
InvalidShare(usize),
|
||||
#[error("invalid key generation state machine transition (expected {0}, was {1})")]
|
||||
InvalidKeyGenTransition(key_gen::State, key_gen::State),
|
||||
|
||||
#[error("invalid sign state machine transition (expected {0}, was {1})")]
|
||||
InvalidSignTransition(sign::State, sign::State),
|
||||
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct MultisigKeys<C: Curve> {
|
||||
/// Multisig Parameters
|
||||
params: MultisigParams,
|
||||
|
||||
/// Secret share key
|
||||
secret_share: C::F,
|
||||
/// Group key
|
||||
group_key: C::G,
|
||||
/// Verification shares
|
||||
verification_shares: Vec<C::G>,
|
||||
|
||||
/// Offset applied to these keys
|
||||
offset: Option<C::F>,
|
||||
}
|
||||
|
||||
impl<C: Curve> MultisigKeys<C> {
|
||||
pub fn offset(&self, offset: C::F) -> MultisigKeys<C> {
|
||||
let mut res = self.clone();
|
||||
res.offset = Some(offset);
|
||||
res
|
||||
}
|
||||
|
||||
pub fn params(&self) -> MultisigParams {
|
||||
self.params
|
||||
}
|
||||
|
||||
pub fn secret_share(&self) -> C::F {
|
||||
self.secret_share
|
||||
}
|
||||
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.group_key
|
||||
}
|
||||
|
||||
pub fn verification_shares(&self) -> Vec<C::G> {
|
||||
self.verification_shares.clone()
|
||||
}
|
||||
|
||||
pub fn serialized_len(n: usize) -> usize {
|
||||
1 + usize::from(C::id_len()) + (3 * 8) + C::F_len() + C::G_len() + (n * C::G_len())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(
|
||||
1 + usize::from(C::id_len()) + MultisigKeys::<C>::serialized_len(self.params.n)
|
||||
);
|
||||
serialized.push(C::id_len());
|
||||
serialized.extend(C::id().as_bytes());
|
||||
serialized.extend(&(self.params.n as u64).to_le_bytes());
|
||||
serialized.extend(&(self.params.t as u64).to_le_bytes());
|
||||
serialized.extend(&(self.params.i as u64).to_le_bytes());
|
||||
serialized.extend(&C::F_to_le_bytes(&self.secret_share));
|
||||
serialized.extend(&C::G_to_bytes(&self.group_key));
|
||||
for i in 1 ..= self.params.n {
|
||||
serialized.extend(&C::G_to_bytes(&self.verification_shares[i]));
|
||||
}
|
||||
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize(serialized: &[u8]) -> Result<MultisigKeys<C>, FrostError> {
|
||||
if serialized.len() < 1 {
|
||||
Err(FrostError::InternalError("MultisigKeys serialization is empty".to_string()))?;
|
||||
}
|
||||
|
||||
let id_len: usize = serialized[0].into();
|
||||
let mut cursor = 1;
|
||||
|
||||
if serialized.len() < (cursor + id_len) {
|
||||
Err(FrostError::InternalError("ID wasn't included".to_string()))?;
|
||||
}
|
||||
|
||||
let id = &serialized[cursor .. (cursor + id_len)];
|
||||
if C::id().as_bytes() != id {
|
||||
Err(
|
||||
FrostError::InternalError(
|
||||
"curve is distinct between serialization and deserialization".to_string()
|
||||
)
|
||||
)?;
|
||||
}
|
||||
cursor += id_len;
|
||||
|
||||
if serialized.len() < (cursor + 8) {
|
||||
Err(FrostError::InternalError("participant quantity wasn't included".to_string()))?;
|
||||
}
|
||||
|
||||
let n = u64::from_le_bytes(serialized[cursor .. (cursor + 8)].try_into().unwrap()).try_into()
|
||||
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
|
||||
cursor += 8;
|
||||
if serialized.len() != MultisigKeys::<C>::serialized_len(n) {
|
||||
Err(FrostError::InternalError("incorrect serialization length".to_string()))?;
|
||||
}
|
||||
|
||||
let t = u64::from_le_bytes(serialized[cursor .. (cursor + 8)].try_into().unwrap()).try_into()
|
||||
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
|
||||
cursor += 8;
|
||||
let i = u64::from_le_bytes(serialized[cursor .. (cursor + 8)].try_into().unwrap()).try_into()
|
||||
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
|
||||
cursor += 8;
|
||||
|
||||
let secret_share = C::F_from_le_slice(&serialized[cursor .. (cursor + C::F_len())])
|
||||
.map_err(|_| FrostError::InternalError("invalid secret share".to_string()))?;
|
||||
cursor += C::F_len();
|
||||
let group_key = C::G_from_slice(&serialized[cursor .. (cursor + C::G_len())])
|
||||
.map_err(|_| FrostError::InternalError("invalid group key".to_string()))?;
|
||||
cursor += C::G_len();
|
||||
|
||||
let mut verification_shares = vec![C::G::identity()];
|
||||
verification_shares.reserve_exact(n + 1);
|
||||
for _ in 0 .. n {
|
||||
verification_shares.push(
|
||||
C::G_from_slice(&serialized[cursor .. (cursor + C::G_len())])
|
||||
.map_err(|_| FrostError::InternalError("invalid verification share".to_string()))?
|
||||
);
|
||||
cursor += C::G_len();
|
||||
}
|
||||
|
||||
Ok(
|
||||
MultisigKeys {
|
||||
params: MultisigParams::new(t, n, i)
|
||||
.map_err(|_| FrostError::InternalError("invalid parameters".to_string()))?,
|
||||
secret_share,
|
||||
group_key,
|
||||
verification_shares,
|
||||
offset: None
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
An implementation of Straus, which should be more efficient than Pippenger for the expected amount
|
||||
of points
|
||||
|
||||
Completing key generation from the round 2 messages takes:
|
||||
- Naive
|
||||
Completed 33-of-50 in 2.66s
|
||||
Completed 5-of-8 in 11.05ms
|
||||
|
||||
- crate Straus
|
||||
Completed 33-of-50 in 730-833ms (extremely notable effects from taking variable time)
|
||||
Completed 5-of-8 in 2.8ms
|
||||
|
||||
- dalek VartimeMultiscalarMul
|
||||
Completed 33-of-50 in 266ms
|
||||
Completed 5-of-8 in 1.6ms
|
||||
|
||||
This does show this algorithm isn't appropriately tuned (and potentially isn't even the right
|
||||
choice), at least with that quantity. Unfortunately, we can't use dalek's multiexp implementation
|
||||
everywhere, and this does work
|
||||
*/
|
||||
pub fn multiexp_vartime<C: Curve>(scalars: &[C::F], points: &[C::G]) -> C::G {
|
||||
let mut tables = vec![];
|
||||
// dalek uses 8 in their impl, along with a carry scheme where values are [-8, 8)
|
||||
// Moving to a similar system here did save a marginal amount, yet not one significant enough for
|
||||
// its pain (as some fields do have scalars which can have their top bit set, a scenario dalek
|
||||
// assumes is never true)
|
||||
tables.resize(points.len(), Vec::with_capacity(15));
|
||||
for p in 0 .. points.len() {
|
||||
let mut accum = C::G::identity();
|
||||
tables[p].push(accum);
|
||||
for _ in 0 .. 15 {
|
||||
accum += points[p];
|
||||
tables[p].push(accum);
|
||||
}
|
||||
}
|
||||
|
||||
let mut nibbles = vec![];
|
||||
nibbles.resize(scalars.len(), vec![]);
|
||||
for s in 0 .. scalars.len() {
|
||||
let bytes = C::F_to_le_bytes(&scalars[s]);
|
||||
nibbles[s].resize(C::F_len() * 2, 0);
|
||||
for i in 0 .. bytes.len() {
|
||||
nibbles[s][i * 2] = bytes[i] & 0b1111;
|
||||
nibbles[s][(i * 2) + 1] = (bytes[i] >> 4) & 0b1111;
|
||||
}
|
||||
}
|
||||
|
||||
let mut res = C::G::identity();
|
||||
for b in (0 .. (C::F_len() * 2)).rev() {
|
||||
for _ in 0 .. 4 {
|
||||
res = res.double();
|
||||
}
|
||||
|
||||
for s in 0 .. scalars.len() {
|
||||
// This creates a 250% performance increase on key gen, which uses a bunch of very low
|
||||
// scalars. This is why this function is now committed to being vartime
|
||||
if nibbles[s][b] != 0 {
|
||||
res += tables[s][nibbles[s][b] as usize];
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
478
sign/frost/src/sign.rs
Normal file
478
sign/frost/src/sign.rs
Normal file
@@ -0,0 +1,478 @@
|
||||
use core::{convert::{TryFrom, TryInto}, cmp::min, fmt};
|
||||
use std::rc::Rc;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use blake2::{Digest, Blake2b};
|
||||
|
||||
use ff::{Field, PrimeField};
|
||||
use group::Group;
|
||||
|
||||
use crate::{Curve, MultisigParams, MultisigKeys, FrostError, algorithm::Algorithm};
|
||||
|
||||
// Matches ZCash's FROST Jubjub implementation
|
||||
const BINDING_DST: &'static [u8; 9] = b"FROST_rho";
|
||||
// Doesn't match ZCash except for their desire for messages to be hashed in advance before used
|
||||
// here and domain separated
|
||||
const BINDING_MESSAGE_DST: &'static [u8; 17] = b"FROST_rho_message";
|
||||
|
||||
/// Calculate the lagrange coefficient
|
||||
pub fn lagrange<F: PrimeField>(
|
||||
i: usize,
|
||||
included: &[usize],
|
||||
) -> F {
|
||||
let mut num = F::one();
|
||||
let mut denom = F::one();
|
||||
for l in included {
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
|
||||
let share = F::from(u64::try_from(*l).unwrap());
|
||||
num *= share;
|
||||
denom *= share - F::from(u64::try_from(i).unwrap());
|
||||
}
|
||||
|
||||
// Safe as this will only be 0 if we're part of the above loop
|
||||
// (which we have an if case to avoid)
|
||||
num * denom.invert().unwrap()
|
||||
}
|
||||
|
||||
// View of params passable to algorithm implementations
|
||||
#[derive(Clone)]
|
||||
pub struct ParamsView<C: Curve> {
|
||||
group_key: C::G,
|
||||
included: Vec<usize>,
|
||||
secret_share: C::F,
|
||||
verification_shares: Vec<C::G>,
|
||||
}
|
||||
|
||||
impl<C: Curve> ParamsView<C> {
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.group_key
|
||||
}
|
||||
|
||||
pub fn included(&self) -> Vec<usize> {
|
||||
self.included.clone()
|
||||
}
|
||||
|
||||
pub fn secret_share(&self) -> C::F {
|
||||
self.secret_share
|
||||
}
|
||||
|
||||
pub fn verification_share(&self, l: usize) -> C::G {
|
||||
self.verification_shares[l]
|
||||
}
|
||||
}
|
||||
|
||||
/// Pairing of an Algorithm with a MultisigKeys instance and this specific signing set
|
||||
#[derive(Clone)]
|
||||
pub struct Params<C: Curve, A: Algorithm<C>> {
|
||||
algorithm: A,
|
||||
keys: Rc<MultisigKeys<C>>,
|
||||
view: ParamsView<C>,
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
||||
pub fn new(
|
||||
algorithm: A,
|
||||
keys: Rc<MultisigKeys<C>>,
|
||||
included: &[usize],
|
||||
) -> Result<Params<C, A>, FrostError> {
|
||||
let mut included = included.to_vec();
|
||||
(&mut included).sort_unstable();
|
||||
|
||||
// included < threshold
|
||||
if included.len() < keys.params.t {
|
||||
Err(FrostError::InvalidSigningSet("not enough signers".to_string()))?;
|
||||
}
|
||||
// Invalid index
|
||||
if included[0] == 0 {
|
||||
Err(FrostError::InvalidParticipantIndex(included[0], keys.params.n))?;
|
||||
}
|
||||
// OOB index
|
||||
if included[included.len() - 1] > keys.params.n {
|
||||
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], keys.params.n))?;
|
||||
}
|
||||
// Same signer included multiple times
|
||||
for i in 0 .. included.len() - 1 {
|
||||
if included[i] == included[i + 1] {
|
||||
Err(FrostError::DuplicatedIndex(included[i]))?;
|
||||
}
|
||||
}
|
||||
// Not included
|
||||
if !included.contains(&keys.params.i) {
|
||||
Err(FrostError::InvalidSigningSet("signing despite not being included".to_string()))?;
|
||||
}
|
||||
|
||||
let secret_share = keys.secret_share * lagrange::<C::F>(keys.params.i, &included);
|
||||
let (offset, offset_share) = if keys.offset.is_some() {
|
||||
let offset = keys.offset.unwrap();
|
||||
(offset, offset * C::F::from(included.len().try_into().unwrap()).invert().unwrap())
|
||||
} else {
|
||||
(C::F::zero(), C::F::zero())
|
||||
};
|
||||
|
||||
Ok(
|
||||
Params {
|
||||
algorithm,
|
||||
// Out of order arguments to prevent additional cloning
|
||||
view: ParamsView {
|
||||
group_key: keys.group_key + (C::generator_table() * offset),
|
||||
secret_share: secret_share + offset_share,
|
||||
verification_shares: keys.verification_shares.clone().iter().enumerate().map(
|
||||
|(l, share)| (*share * lagrange::<C::F>(l, &included)) +
|
||||
(C::generator_table() * offset_share)
|
||||
).collect(),
|
||||
included: included,
|
||||
},
|
||||
keys
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
pub fn multisig_params(&self) -> MultisigParams {
|
||||
self.keys.params
|
||||
}
|
||||
}
|
||||
|
||||
struct PreprocessPackage<C: Curve> {
|
||||
nonces: [C::F; 2],
|
||||
commitments: [C::G; 2],
|
||||
serialized: Vec<u8>,
|
||||
}
|
||||
|
||||
// This library unifies the preprocessing step with signing due to security concerns and to provide
|
||||
// a simpler UX
|
||||
fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
|
||||
rng: &mut R,
|
||||
params: &Params<C, A>,
|
||||
) -> PreprocessPackage<C> {
|
||||
let nonces = [C::F::random(&mut *rng), C::F::random(&mut *rng)];
|
||||
let commitments = [C::generator_table() * nonces[0], C::generator_table() * nonces[1]];
|
||||
let mut serialized = C::G_to_bytes(&commitments[0]);
|
||||
serialized.extend(&C::G_to_bytes(&commitments[1]));
|
||||
|
||||
serialized.extend(
|
||||
&A::preprocess_addendum(
|
||||
rng,
|
||||
¶ms.view,
|
||||
&nonces
|
||||
)
|
||||
);
|
||||
|
||||
PreprocessPackage { nonces, commitments, serialized }
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
struct Package<C: Curve> {
|
||||
Ris: Vec<C::G>,
|
||||
R: C::G,
|
||||
share: C::F
|
||||
}
|
||||
|
||||
// Has every signer perform the role of the signature aggregator
|
||||
// Step 1 was already deprecated by performing nonce generation as needed
|
||||
// Step 2 is simply the broadcast round from step 1
|
||||
fn sign_with_share<C: Curve, A: Algorithm<C>>(
|
||||
params: &mut Params<C, A>,
|
||||
our_preprocess: PreprocessPackage<C>,
|
||||
commitments: &[Option<Vec<u8>>],
|
||||
msg: &[u8],
|
||||
) -> Result<(Package<C>, Vec<u8>), FrostError> {
|
||||
let multisig_params = params.multisig_params();
|
||||
if commitments.len() != (multisig_params.n + 1) {
|
||||
Err(
|
||||
FrostError::InvalidParticipantQuantity(
|
||||
multisig_params.n,
|
||||
commitments.len() - min(1, commitments.len())
|
||||
)
|
||||
)?;
|
||||
}
|
||||
|
||||
if commitments[0].is_some() {
|
||||
Err(FrostError::NonEmptyParticipantZero)?;
|
||||
}
|
||||
|
||||
let commitments_len = C::G_len() * 2;
|
||||
let commit_len = commitments_len + A::addendum_commit_len();
|
||||
#[allow(non_snake_case)]
|
||||
let mut B = Vec::with_capacity(multisig_params.n + 1);
|
||||
B.push(None);
|
||||
let mut b: Vec<u8> = vec![];
|
||||
for l in 1 ..= multisig_params.n {
|
||||
if l == multisig_params.i {
|
||||
if commitments[l].is_some() {
|
||||
Err(FrostError::DuplicatedIndex(l))?;
|
||||
}
|
||||
|
||||
B.push(Some(our_preprocess.commitments));
|
||||
// Slightly more robust
|
||||
b.extend(&u64::try_from(l).unwrap().to_le_bytes());
|
||||
b.extend(&our_preprocess.serialized[0 .. commit_len]);
|
||||
continue;
|
||||
}
|
||||
|
||||
let included = params.view.included.contains(&l);
|
||||
if commitments[l].is_some() && (!included) {
|
||||
Err(FrostError::InvalidCommitmentQuantity(l, 0, commitments.len() / C::G_len()))?;
|
||||
}
|
||||
|
||||
if commitments[l].is_none() {
|
||||
if included {
|
||||
Err(FrostError::InvalidCommitmentQuantity(l, 2, 0))?;
|
||||
}
|
||||
B.push(None);
|
||||
continue;
|
||||
}
|
||||
|
||||
let commitments = commitments[l].as_ref().unwrap();
|
||||
if commitments.len() < commitments_len {
|
||||
Err(FrostError::InvalidCommitmentQuantity(l, 2, commitments.len() / C::G_len()))?;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let D = C::G_from_slice(&commitments[0 .. C::G_len()])
|
||||
.map_err(|_| FrostError::InvalidCommitment(l))?;
|
||||
#[allow(non_snake_case)]
|
||||
let E = C::G_from_slice(&commitments[C::G_len() .. commitments_len])
|
||||
.map_err(|_| FrostError::InvalidCommitment(l))?;
|
||||
B.push(Some([D, E]));
|
||||
b.extend(&u64::try_from(l).unwrap().to_le_bytes());
|
||||
b.extend(&commitments[0 .. commit_len]);
|
||||
}
|
||||
|
||||
let context = params.algorithm.context();
|
||||
let mut p = Vec::with_capacity(multisig_params.t);
|
||||
let mut pi = C::F::zero();
|
||||
for l in ¶ms.view.included {
|
||||
p.push(
|
||||
C::F_from_bytes_wide(
|
||||
Blake2b::new()
|
||||
.chain(BINDING_DST)
|
||||
.chain(u64::try_from(*l).unwrap().to_le_bytes())
|
||||
.chain(Blake2b::new().chain(BINDING_MESSAGE_DST).chain(msg).finalize())
|
||||
.chain(&context)
|
||||
.chain(&b)
|
||||
.finalize()
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.expect("couldn't convert a 64-byte hash to a 64-byte array")
|
||||
)
|
||||
);
|
||||
|
||||
let view = ¶ms.view;
|
||||
params.algorithm.process_addendum(
|
||||
view,
|
||||
*l,
|
||||
B[*l].as_ref().unwrap(),
|
||||
&p[p.len() - 1],
|
||||
if *l == multisig_params.i {
|
||||
pi = p[p.len() - 1];
|
||||
&our_preprocess.serialized[commitments_len .. our_preprocess.serialized.len()]
|
||||
} else {
|
||||
&commitments[*l].as_ref().unwrap()[commitments_len .. commitments[*l].as_ref().unwrap().len()]
|
||||
}
|
||||
)?;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let mut Ris = vec![];
|
||||
#[allow(non_snake_case)]
|
||||
let mut R = C::G::identity();
|
||||
for i in 0 .. params.view.included.len() {
|
||||
let commitments = B[params.view.included[i]].unwrap();
|
||||
#[allow(non_snake_case)]
|
||||
let this_R = commitments[0] + (commitments[1] * p[i]);
|
||||
Ris.push(this_R);
|
||||
R += this_R;
|
||||
}
|
||||
|
||||
let view = ¶ms.view;
|
||||
let share = params.algorithm.sign_share(
|
||||
view,
|
||||
R,
|
||||
our_preprocess.nonces[0] + (our_preprocess.nonces[1] * pi),
|
||||
msg
|
||||
);
|
||||
Ok((Package { Ris, R, share }, C::F_to_le_bytes(&share)))
|
||||
}
|
||||
|
||||
// This doesn't check the signing set is as expected and unexpected changes can cause false blames
|
||||
// if legitimate participants are still using the original, expected, signing set. This library
|
||||
// could be made more robust in that regard
|
||||
fn complete<C: Curve, A: Algorithm<C>>(
|
||||
sign_params: &Params<C, A>,
|
||||
sign: Package<C>,
|
||||
serialized: &[Option<Vec<u8>>],
|
||||
) -> Result<A::Signature, FrostError> {
|
||||
let params = sign_params.multisig_params();
|
||||
if serialized.len() != (params.n + 1) {
|
||||
Err(
|
||||
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
|
||||
)?;
|
||||
}
|
||||
|
||||
if serialized[0].is_some() {
|
||||
Err(FrostError::NonEmptyParticipantZero)?;
|
||||
}
|
||||
|
||||
let mut responses = Vec::with_capacity(params.t);
|
||||
let mut sum = sign.share;
|
||||
for i in 0 .. sign_params.view.included.len() {
|
||||
let l = sign_params.view.included[i];
|
||||
if l == params.i {
|
||||
responses.push(None);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Make sure they actually provided a share
|
||||
if serialized[l].is_none() {
|
||||
Err(FrostError::InvalidShare(l))?;
|
||||
}
|
||||
|
||||
let part = C::F_from_le_slice(serialized[l].as_ref().unwrap())
|
||||
.map_err(|_| FrostError::InvalidShare(l))?;
|
||||
sum += part;
|
||||
responses.push(Some(part));
|
||||
}
|
||||
|
||||
// Perform signature validation instead of individual share validation
|
||||
// For the success route, which should be much more frequent, this should be faster
|
||||
// It also acts as an integrity check of this library's signing function
|
||||
let res = sign_params.algorithm.verify(sign_params.view.group_key, sign.R, sum);
|
||||
if res.is_some() {
|
||||
return Ok(res.unwrap());
|
||||
}
|
||||
|
||||
// Find out who misbehaved
|
||||
for i in 0 .. sign_params.view.included.len() {
|
||||
match responses[i] {
|
||||
Some(part) => {
|
||||
let l = sign_params.view.included[i];
|
||||
if !sign_params.algorithm.verify_share(
|
||||
sign_params.view.verification_share(l),
|
||||
sign.Ris[i],
|
||||
part
|
||||
) {
|
||||
Err(FrostError::InvalidShare(l))?;
|
||||
}
|
||||
},
|
||||
|
||||
// Happens when l == i
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
// If everyone has a valid share and there were enough participants, this should've worked
|
||||
Err(
|
||||
FrostError::InternalError(
|
||||
"everyone had a valid share yet the signature was still invalid".to_string()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/// State of a Sign machine
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub enum State {
|
||||
Fresh,
|
||||
Preprocessed,
|
||||
Signed,
|
||||
Complete,
|
||||
}
|
||||
|
||||
impl fmt::Display for State {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
/// State machine which manages signing
|
||||
#[allow(non_snake_case)]
|
||||
pub struct StateMachine<C: Curve, A: Algorithm<C>> {
|
||||
params: Params<C, A>,
|
||||
state: State,
|
||||
preprocess: Option<PreprocessPackage<C>>,
|
||||
sign: Option<Package<C>>,
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> StateMachine<C, A> {
|
||||
/// Creates a new machine to generate a key for the specified curve in the specified multisig
|
||||
pub fn new(params: Params<C, A>) -> StateMachine<C, A> {
|
||||
StateMachine {
|
||||
params,
|
||||
state: State::Fresh,
|
||||
preprocess: None,
|
||||
sign: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Perform the preprocessing round required in order to sign
|
||||
/// Returns a byte vector which must be transmitted to all parties selected for this signing
|
||||
/// process, over an authenticated channel
|
||||
pub fn preprocess<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R
|
||||
) -> Result<Vec<u8>, FrostError> {
|
||||
if self.state != State::Fresh {
|
||||
Err(FrostError::InvalidSignTransition(State::Fresh, self.state))?;
|
||||
}
|
||||
let preprocess = preprocess::<R, C, A>(rng, &self.params);
|
||||
let serialized = preprocess.serialized.clone();
|
||||
self.preprocess = Some(preprocess);
|
||||
self.state = State::Preprocessed;
|
||||
Ok(serialized)
|
||||
}
|
||||
|
||||
/// Sign a message
|
||||
/// Takes in the participant's commitments, which are expected to be in a Vec where participant
|
||||
/// index = Vec index. None is expected at index 0 to allow for this. None is also expected at
|
||||
/// index i which is locally handled. Returns a byte vector representing a share of the signature
|
||||
/// for every other participant to receive, over an authenticated channel
|
||||
pub fn sign(
|
||||
&mut self,
|
||||
commitments: &[Option<Vec<u8>>],
|
||||
msg: &[u8],
|
||||
) -> Result<Vec<u8>, FrostError> {
|
||||
if self.state != State::Preprocessed {
|
||||
Err(FrostError::InvalidSignTransition(State::Preprocessed, self.state))?;
|
||||
}
|
||||
|
||||
let (sign, serialized) = sign_with_share(
|
||||
&mut self.params,
|
||||
self.preprocess.take().unwrap(),
|
||||
commitments,
|
||||
msg,
|
||||
)?;
|
||||
|
||||
self.sign = Some(sign);
|
||||
self.state = State::Signed;
|
||||
Ok(serialized)
|
||||
}
|
||||
|
||||
/// Complete signing
|
||||
/// Takes in everyone elses' shares submitted to us as a Vec, expecting participant index =
|
||||
/// Vec index with None at index 0 and index i. Returns a byte vector representing the serialized
|
||||
/// signature
|
||||
pub fn complete(&mut self, shares: &[Option<Vec<u8>>]) -> Result<A::Signature, FrostError> {
|
||||
if self.state != State::Signed {
|
||||
Err(FrostError::InvalidSignTransition(State::Signed, self.state))?;
|
||||
}
|
||||
|
||||
let signature = complete(
|
||||
&self.params,
|
||||
self.sign.take().unwrap(),
|
||||
shares,
|
||||
)?;
|
||||
|
||||
self.state = State::Complete;
|
||||
Ok(signature)
|
||||
}
|
||||
|
||||
pub fn multisig_params(&self) -> MultisigParams {
|
||||
self.params.multisig_params().clone()
|
||||
}
|
||||
|
||||
pub fn state(&self) -> State {
|
||||
self.state
|
||||
}
|
||||
}
|
||||
82
sign/frost/tests/common.rs
Normal file
82
sign/frost/tests/common.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use core::convert::TryInto;
|
||||
|
||||
use group::{Group, GroupEncoding};
|
||||
|
||||
use jubjub::{Fr, SubgroupPoint};
|
||||
use frost::{CurveError, Curve, multiexp_vartime};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct Jubjub;
|
||||
impl Curve for Jubjub {
|
||||
type F = Fr;
|
||||
type G = SubgroupPoint;
|
||||
type T = SubgroupPoint;
|
||||
|
||||
fn id() -> String {
|
||||
"Jubjub".to_string()
|
||||
}
|
||||
|
||||
fn id_len() -> u8 {
|
||||
Self::id().len() as u8
|
||||
}
|
||||
|
||||
fn generator() -> Self::G {
|
||||
Self::G::generator()
|
||||
}
|
||||
|
||||
fn generator_table() -> Self::T {
|
||||
Self::G::generator()
|
||||
}
|
||||
|
||||
fn multiexp_vartime(scalars: &[Self::F], points: &[Self::G]) -> Self::G {
|
||||
multiexp_vartime::<Jubjub>(scalars, points)
|
||||
}
|
||||
|
||||
fn F_len() -> usize {
|
||||
32
|
||||
}
|
||||
|
||||
fn G_len() -> usize {
|
||||
32
|
||||
}
|
||||
|
||||
fn F_from_le_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
|
||||
let scalar = Self::F::from_bytes(
|
||||
&slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
|
||||
);
|
||||
if scalar.is_some().into() {
|
||||
Ok(scalar.unwrap())
|
||||
} else {
|
||||
Err(CurveError::InvalidScalar(hex::encode(slice)))
|
||||
}
|
||||
}
|
||||
|
||||
fn F_from_le_slice_unreduced(slice: &[u8]) -> Self::F {
|
||||
let mut wide: [u8; 64] = [0; 64];
|
||||
wide[..slice.len()].copy_from_slice(slice);
|
||||
Self::F::from_bytes_wide(&wide)
|
||||
}
|
||||
|
||||
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> {
|
||||
let point = Self::G::from_bytes(
|
||||
&slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
|
||||
);
|
||||
if point.is_some().into() {
|
||||
Ok(point.unwrap())
|
||||
} else {
|
||||
Err(CurveError::InvalidPoint(hex::encode(slice)))?
|
||||
}
|
||||
}
|
||||
|
||||
fn F_to_le_bytes(f: &Self::F) -> Vec<u8> {
|
||||
f.to_bytes().to_vec()
|
||||
}
|
||||
|
||||
fn G_to_bytes(g: &Self::G) -> Vec<u8> {
|
||||
g.to_bytes().to_vec()
|
||||
}
|
||||
|
||||
fn F_from_bytes_wide(bytes: [u8; 64]) -> Self::F {
|
||||
Self::F::from_bytes_wide(&bytes)
|
||||
}
|
||||
}
|
||||
143
sign/frost/tests/key_gen_and_sign.rs
Normal file
143
sign/frost/tests/key_gen_and_sign.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use rand::{RngCore, rngs::OsRng};
|
||||
|
||||
use frost::{
|
||||
Curve,
|
||||
MultisigParams, MultisigKeys,
|
||||
key_gen,
|
||||
algorithm::{Algorithm, Schnorr, Blake2bHram, SchnorrSignature},
|
||||
sign
|
||||
};
|
||||
|
||||
mod common;
|
||||
use common::Jubjub;
|
||||
|
||||
const PARTICIPANTS: usize = 8;
|
||||
|
||||
fn sign<C: Curve, A: Algorithm<C, Signature = SchnorrSignature<C>>>(
|
||||
algorithm: A,
|
||||
keys: Vec<Rc<MultisigKeys<C>>>
|
||||
) {
|
||||
let t = keys[0].params().t();
|
||||
let mut machines = vec![];
|
||||
let mut commitments = Vec::with_capacity(PARTICIPANTS + 1);
|
||||
commitments.resize(PARTICIPANTS + 1, None);
|
||||
for i in 1 ..= t {
|
||||
machines.push(
|
||||
sign::StateMachine::new(
|
||||
sign::Params::new(
|
||||
algorithm.clone(),
|
||||
keys[i - 1].clone(),
|
||||
&(1 ..= t).collect::<Vec<usize>>()
|
||||
).unwrap()
|
||||
)
|
||||
);
|
||||
commitments[i] = Some(machines[i - 1].preprocess(&mut OsRng).unwrap());
|
||||
}
|
||||
|
||||
let mut shares = Vec::with_capacity(PARTICIPANTS + 1);
|
||||
shares.resize(PARTICIPANTS + 1, None);
|
||||
for i in 1 ..= t {
|
||||
shares[i] = Some(
|
||||
machines[i - 1].sign(
|
||||
&commitments
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
|
||||
.collect::<Vec<Option<Vec<u8>>>>(),
|
||||
b"Hello World"
|
||||
).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let mut signature = None;
|
||||
for i in 1 ..= t {
|
||||
let sig = machines[i - 1].complete(
|
||||
&shares
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
|
||||
.collect::<Vec<Option<Vec<u8>>>>()
|
||||
).unwrap();
|
||||
if signature.is_none() {
|
||||
signature = Some(sig);
|
||||
}
|
||||
assert_eq!(sig, signature.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn key_gen_and_sign() {
|
||||
let mut params = vec![];
|
||||
let mut machines = vec![];
|
||||
let mut commitments = vec![vec![]];
|
||||
for i in 1 ..= PARTICIPANTS {
|
||||
params.push(
|
||||
MultisigParams::new(
|
||||
((PARTICIPANTS / 3) * 2) + 1,
|
||||
PARTICIPANTS,
|
||||
i
|
||||
).unwrap()
|
||||
);
|
||||
machines.push(
|
||||
key_gen::StateMachine::<Jubjub>::new(
|
||||
params[i - 1],
|
||||
"FF/Group Rust key_gen test".to_string()
|
||||
)
|
||||
);
|
||||
commitments.push(machines[i - 1].generate_coefficients(&mut OsRng).unwrap());
|
||||
}
|
||||
|
||||
let mut secret_shares = vec![];
|
||||
for i in 1 ..= PARTICIPANTS {
|
||||
secret_shares.push(
|
||||
machines[i - 1].generate_secret_shares(
|
||||
&mut OsRng,
|
||||
commitments
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, commitments)| if idx == i { vec![] } else { commitments.to_vec() })
|
||||
.collect()
|
||||
).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let mut verification_shares = vec![];
|
||||
let mut group_key = None;
|
||||
let mut keys = vec![];
|
||||
for i in 1 ..= PARTICIPANTS {
|
||||
let mut our_secret_shares = vec![vec![]];
|
||||
our_secret_shares.extend(
|
||||
secret_shares.iter().map(|shares| shares[i].clone()).collect::<Vec<Vec<u8>>>()
|
||||
);
|
||||
|
||||
let these_keys = machines[i - 1].complete(our_secret_shares).unwrap();
|
||||
assert_eq!(
|
||||
MultisigKeys::<Jubjub>::deserialize(&these_keys.serialize()).unwrap(),
|
||||
these_keys
|
||||
);
|
||||
keys.push(Rc::new(these_keys.clone()));
|
||||
|
||||
if verification_shares.len() == 0 {
|
||||
verification_shares = these_keys.verification_shares();
|
||||
}
|
||||
assert_eq!(verification_shares, these_keys.verification_shares());
|
||||
|
||||
if group_key.is_none() {
|
||||
group_key = Some(these_keys.group_key());
|
||||
}
|
||||
assert_eq!(group_key.unwrap(), these_keys.group_key());
|
||||
}
|
||||
|
||||
sign(Schnorr::<Jubjub, Blake2bHram>::new(), keys.clone());
|
||||
|
||||
let mut randomization = [0; 64];
|
||||
(&mut OsRng).fill_bytes(&mut randomization);
|
||||
sign(
|
||||
Schnorr::<Jubjub, Blake2bHram>::new(),
|
||||
keys.iter().map(
|
||||
|keys| Rc::new(keys.offset(Jubjub::F_from_bytes_wide(randomization)))
|
||||
).collect()
|
||||
);
|
||||
}
|
||||
3
sign/monero/.gitignore
vendored
Normal file
3
sign/monero/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Cargo.lock
|
||||
|
||||
.build
|
||||
34
sign/monero/Cargo.toml
Normal file
34
sign/monero/Cargo.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[package]
|
||||
name = "monero-sign"
|
||||
version = "0.1.0"
|
||||
description = "Implementation of Monero transaction signing in Rust"
|
||||
license = "MIT"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1"
|
||||
thiserror = "1"
|
||||
|
||||
rand_core = "0.6"
|
||||
|
||||
hex = "0.4"
|
||||
|
||||
digest = "0.9"
|
||||
tiny-keccak = { version = "2.0", features = ["keccak"] }
|
||||
blake2 = "0.9"
|
||||
|
||||
curve25519-dalek = { version = "3.2", features = ["std", "simd_backend"] }
|
||||
|
||||
ff = { version = "0.10", optional = true }
|
||||
group = { version = "0.10", optional = true }
|
||||
dalek-ff-group = { path = "../dalek-ff-group", optional = true }
|
||||
frost = { path = "../frost", optional = true }
|
||||
|
||||
monero = "0.16.0" # Locked to this specific patch version due to a bug we compensate for
|
||||
|
||||
[features]
|
||||
multisig = ["ff", "group", "dalek-ff-group", "frost"]
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8"
|
||||
21
sign/monero/LICENSE
Normal file
21
sign/monero/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
104
sign/monero/build.rs
Normal file
104
sign/monero/build.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use std::process::Command;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
|
||||
panic!("git failed to init submodules");
|
||||
}
|
||||
|
||||
if !Command ::new("mkdir").args(&["-p", ".build"])
|
||||
.current_dir(&Path::new("c")).status().unwrap().success() {
|
||||
panic!("failed to create a directory to track build progress");
|
||||
}
|
||||
|
||||
let out_dir = &env::var("OUT_DIR").unwrap();
|
||||
|
||||
// Use a file to signal if Monero was already built, as that should never be rebuilt
|
||||
// If the signaling file was deleted, run this script again to rebuild Monero though
|
||||
// TODO: Move this signaling file into OUT_DIR once Monero is built statically successfully
|
||||
println!("cargo:rerun-if-changed=c/.build/monero");
|
||||
if !Path::new("c/.build/monero").exists() {
|
||||
if !Command::new("cmake").args(&["cmake", "-DCMAKE_BUILD_TYPE=Release", "-DBUILD_SHARED_LIBS=1", "."])
|
||||
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
|
||||
panic!("cmake failed to generate Monero's build scripts");
|
||||
}
|
||||
|
||||
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
|
||||
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
|
||||
panic!("make failed to build Monero. Please check your dependencies");
|
||||
}
|
||||
|
||||
if !Command::new("cp").args(&[
|
||||
&format!(
|
||||
"c/monero/src/crypto/{}cncrypto.{}",
|
||||
&env::consts::DLL_PREFIX,
|
||||
&env::consts::DLL_EXTENSION
|
||||
),
|
||||
out_dir
|
||||
]).status().unwrap().success() {
|
||||
panic!("Failed to cp cncrypto");
|
||||
}
|
||||
|
||||
if !Command::new("cp").args(&[
|
||||
&format!(
|
||||
"c/monero/src/device/{}device.{}",
|
||||
&env::consts::DLL_PREFIX,
|
||||
&env::consts::DLL_EXTENSION
|
||||
),
|
||||
out_dir
|
||||
]).status().unwrap().success() {
|
||||
panic!("Failed to cp device");
|
||||
}
|
||||
|
||||
if !Command::new("cp").args(&[
|
||||
&format!(
|
||||
"c/monero/src/ringct/{}ringct_basic.{}",
|
||||
&env::consts::DLL_PREFIX,
|
||||
&env::consts::DLL_EXTENSION
|
||||
),
|
||||
out_dir
|
||||
]).status().unwrap().success() {
|
||||
panic!("Failed to cp ringct_basic");
|
||||
}
|
||||
|
||||
if !Command::new("cp").args(&[
|
||||
&format!(
|
||||
"c/monero/src/ringct/{}ringct.{}",
|
||||
&env::consts::DLL_PREFIX,
|
||||
&env::consts::DLL_EXTENSION
|
||||
),
|
||||
out_dir
|
||||
]).status().unwrap().success() {
|
||||
panic!("Failed to cp ringct");
|
||||
}
|
||||
|
||||
if !Command::new("touch").arg("monero")
|
||||
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
|
||||
panic!("failed to create a file to label Monero as built");
|
||||
}
|
||||
}
|
||||
|
||||
println!("cargo:rerun-if-changed=c/wrapper.c");
|
||||
if !Command::new("g++").args(&[
|
||||
"-O3", "-Wall", "-shared", "-std=c++14", "-fPIC",
|
||||
"-Imonero/contrib/epee/include", "-Imonero/src",
|
||||
"wrapper.c", "-o", &format!(
|
||||
"{}/{}wrapper.{}",
|
||||
out_dir,
|
||||
&env::consts::DLL_PREFIX,
|
||||
&env::consts::DLL_EXTENSION
|
||||
),
|
||||
&format!("-L{}", out_dir),
|
||||
"-ldevice", "-lringct_basic", "-lringct"
|
||||
]).current_dir(&Path::new("c")).status().unwrap().success() {
|
||||
panic!("g++ failed to build the wrapper");
|
||||
}
|
||||
|
||||
println!("cargo:rustc-link-search={}", out_dir);
|
||||
println!("cargo:rustc-link-lib=cncrypto");
|
||||
println!("cargo:rustc-link-lib=device");
|
||||
println!("cargo:rustc-link-lib=ringct_basic");
|
||||
println!("cargo:rustc-link-lib=ringct");
|
||||
println!("cargo:rustc-link-lib=wrapper");
|
||||
}
|
||||
1
sign/monero/c/monero
Submodule
1
sign/monero/c/monero
Submodule
Submodule sign/monero/c/monero added at ab18fea350
42
sign/monero/c/wrapper.c
Normal file
42
sign/monero/c/wrapper.c
Normal file
@@ -0,0 +1,42 @@
|
||||
#include "device/device_default.hpp"
|
||||
|
||||
#include "ringct/rctSigs.h"
|
||||
|
||||
extern "C" {
|
||||
void c_hash_to_point(uint8_t* point) {
|
||||
rct::key key_point;
|
||||
ge_p3 e_p3;
|
||||
memcpy(key_point.bytes, point, 32);
|
||||
rct::hash_to_p3(e_p3, key_point);
|
||||
ge_p3_tobytes(point, &e_p3);
|
||||
}
|
||||
|
||||
bool c_verify_clsag(uint s_len, uint8_t* s, uint8_t* I, uint8_t* m, uint8_t k_len, uint8_t* k, uint8_t* p) {
|
||||
rct::clsag clsag;
|
||||
std::stringstream ss;
|
||||
std::string str;
|
||||
str.assign((char*) s, (size_t) s_len);
|
||||
ss << str;
|
||||
binary_archive<false> ba(ss);
|
||||
::serialization::serialize(ba, clsag);
|
||||
if (!ss.good()) {
|
||||
return false;
|
||||
}
|
||||
memcpy(clsag.I.bytes, I, 32);
|
||||
|
||||
rct::key msg;
|
||||
memcpy(msg.bytes, m, 32);
|
||||
|
||||
rct::ctkeyV keys;
|
||||
keys.resize(k_len);
|
||||
for (uint8_t i = 0; i < k_len; i++) {
|
||||
memcpy(keys[i].dest.bytes, &k[(i * 2) * 32], 32);
|
||||
memcpy(keys[i].mask.bytes, &k[((i * 2) + 1) * 32], 32);
|
||||
}
|
||||
|
||||
rct::key pseudo_out;
|
||||
memcpy(pseudo_out.bytes, p, 32);
|
||||
|
||||
return verRctCLSAGSimple(msg, clsag, keys, pseudo_out);
|
||||
}
|
||||
}
|
||||
241
sign/monero/src/clsag/mod.rs
Normal file
241
sign/monero/src/clsag/mod.rs
Normal file
@@ -0,0 +1,241 @@
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use digest::Digest;
|
||||
use blake2::Blake2b;
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
traits::VartimePrecomputedMultiscalarMul,
|
||||
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
|
||||
};
|
||||
|
||||
use monero::{
|
||||
consensus::Encodable,
|
||||
util::ringct::{Key, Clsag}
|
||||
};
|
||||
|
||||
use crate::{SignError, c_verify_clsag, random_scalar, commitment, hash_to_scalar, hash_to_point};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod multisig;
|
||||
#[cfg(feature = "multisig")]
|
||||
pub use multisig::Multisig;
|
||||
|
||||
// Ring with both the index we're signing for and the data needed to rebuild its commitment
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub(crate) struct SemiSignableRing {
|
||||
ring: Vec<[EdwardsPoint; 2]>,
|
||||
i: usize,
|
||||
randomness: Scalar,
|
||||
amount: u64
|
||||
}
|
||||
|
||||
pub(crate) fn validate_sign_args(
|
||||
ring: Vec<[EdwardsPoint; 2]>,
|
||||
i: u8,
|
||||
private_key: Option<&Scalar>, // Option as multisig won't have access to this
|
||||
randomness: &Scalar,
|
||||
amount: u64
|
||||
) -> Result<SemiSignableRing, SignError> {
|
||||
let n = ring.len();
|
||||
if n > u8::MAX.into() {
|
||||
Err(SignError::InternalError("max ring size in this library is u8 max".to_string()))?;
|
||||
}
|
||||
if i >= (n as u8) {
|
||||
Err(SignError::InvalidRingMember(i, n as u8))?;
|
||||
}
|
||||
let i: usize = i.into();
|
||||
|
||||
// Validate the secrets match these ring members
|
||||
if private_key.is_some() && (ring[i][0] != (private_key.unwrap() * &ED25519_BASEPOINT_TABLE)) {
|
||||
Err(SignError::InvalidSecret(0))?;
|
||||
}
|
||||
if ring[i][1] != commitment(&randomness, amount) {
|
||||
Err(SignError::InvalidSecret(1))?;
|
||||
}
|
||||
|
||||
Ok(SemiSignableRing { ring, i, randomness: *randomness, amount })
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn sign_core(
|
||||
rand_source: [u8; 64],
|
||||
image: EdwardsPoint,
|
||||
msg: &[u8; 32],
|
||||
ssr: &SemiSignableRing,
|
||||
A: EdwardsPoint,
|
||||
AH: EdwardsPoint
|
||||
) -> (Clsag, Scalar, Scalar, Scalar, Scalar, EdwardsPoint) {
|
||||
let n = ssr.ring.len();
|
||||
let i: usize = ssr.i.into();
|
||||
|
||||
let C_out;
|
||||
|
||||
let mut P = vec![];
|
||||
P.reserve_exact(n);
|
||||
let mut C = vec![];
|
||||
C.reserve_exact(n);
|
||||
let mut C_non_zero = vec![];
|
||||
C_non_zero.reserve_exact(n);
|
||||
|
||||
let z;
|
||||
|
||||
let mut next_rand = rand_source;
|
||||
next_rand = Blake2b::digest(&next_rand).as_slice().try_into().unwrap();
|
||||
{
|
||||
let a = Scalar::from_bytes_mod_order_wide(&next_rand);
|
||||
next_rand = Blake2b::digest(&next_rand).as_slice().try_into().unwrap();
|
||||
C_out = commitment(&a, ssr.amount);
|
||||
|
||||
for member in &ssr.ring {
|
||||
P.push(member[0]);
|
||||
C_non_zero.push(member[1]);
|
||||
C.push(C_non_zero[C_non_zero.len() - 1] - C_out);
|
||||
}
|
||||
|
||||
z = ssr.randomness - a;
|
||||
}
|
||||
|
||||
let H = hash_to_point(&P[i]);
|
||||
let mut D = H * z;
|
||||
|
||||
// Doesn't use a constant time table as dalek takes longer to generate those then they save
|
||||
let images_precomp = VartimeEdwardsPrecomputation::new(&[image, D]);
|
||||
D = Scalar::from(8 as u8).invert() * D;
|
||||
|
||||
let mut to_hash = vec![];
|
||||
to_hash.reserve_exact(((2 * n) + 4) * 32);
|
||||
const PREFIX: &str = "CLSAG_";
|
||||
const AGG_0: &str = "CLSAG_agg_0";
|
||||
const ROUND: &str = "round";
|
||||
to_hash.extend(AGG_0.bytes());
|
||||
to_hash.extend([0; 32 - AGG_0.len()]);
|
||||
|
||||
for j in 0 .. n {
|
||||
to_hash.extend(P[j].compress().to_bytes());
|
||||
}
|
||||
|
||||
for j in 0 .. n {
|
||||
to_hash.extend(C_non_zero[j].compress().to_bytes());
|
||||
}
|
||||
|
||||
to_hash.extend(image.compress().to_bytes());
|
||||
let D_bytes = D.compress().to_bytes();
|
||||
to_hash.extend(D_bytes);
|
||||
to_hash.extend(C_out.compress().to_bytes());
|
||||
let mu_P = hash_to_scalar(&to_hash);
|
||||
to_hash[AGG_0.len() - 1] = '1' as u8;
|
||||
let mu_C = hash_to_scalar(&to_hash);
|
||||
|
||||
to_hash.truncate(((2 * n) + 1) * 32);
|
||||
to_hash.reserve_exact(((2 * n) + 5) * 32);
|
||||
for j in 0 .. ROUND.len() {
|
||||
to_hash[PREFIX.len() + j] = ROUND.as_bytes()[j] as u8;
|
||||
}
|
||||
to_hash.extend(C_out.compress().to_bytes());
|
||||
to_hash.extend(msg);
|
||||
to_hash.extend(A.compress().to_bytes());
|
||||
to_hash.extend(AH.compress().to_bytes());
|
||||
let mut c = hash_to_scalar(&to_hash);
|
||||
|
||||
let mut c1 = Scalar::zero();
|
||||
let mut j = (i + 1) % n;
|
||||
if j == 0 {
|
||||
c1 = c;
|
||||
}
|
||||
|
||||
let mut s = vec![];
|
||||
s.resize(n, Scalar::zero());
|
||||
while j != i {
|
||||
s[j] = Scalar::from_bytes_mod_order_wide(&next_rand);
|
||||
next_rand = Blake2b::digest(&next_rand).as_slice().try_into().unwrap();
|
||||
let c_p = mu_P * c;
|
||||
let c_c = mu_C * c;
|
||||
|
||||
let L = (&s[j] * &ED25519_BASEPOINT_TABLE) + (c_p * P[j]) + (c_c * C[j]);
|
||||
let PH = hash_to_point(&P[j]);
|
||||
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
||||
let R = (s[j] * PH) + images_precomp.vartime_multiscalar_mul(&[c_p, c_c]);
|
||||
|
||||
to_hash.truncate(((2 * n) + 3) * 32);
|
||||
to_hash.extend(L.compress().to_bytes());
|
||||
to_hash.extend(R.compress().to_bytes());
|
||||
c = hash_to_scalar(&to_hash);
|
||||
|
||||
j = (j + 1) % n;
|
||||
if j == 0 {
|
||||
c1 = c;
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
Clsag {
|
||||
s: s.iter().map(|s| Key { key: s.to_bytes() }).collect(),
|
||||
c1: Key { key: c1.to_bytes() },
|
||||
D: Key { key: D_bytes }
|
||||
},
|
||||
c, mu_C, z, mu_P,
|
||||
C_out
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn sign<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
image: EdwardsPoint,
|
||||
msg: [u8; 32],
|
||||
ring: Vec<[EdwardsPoint; 2]>,
|
||||
i: u8,
|
||||
private_key: &Scalar,
|
||||
randomness: &Scalar,
|
||||
amount: u64
|
||||
) -> Result<(Clsag, EdwardsPoint), SignError> {
|
||||
let ssr = validate_sign_args(ring, i, Some(private_key), randomness, amount)?;
|
||||
let a = random_scalar(rng);
|
||||
let mut rand_source = [0; 64];
|
||||
rng.fill_bytes(&mut rand_source);
|
||||
let (mut clsag, c, mu_C, z, mu_P, C_out) = sign_core(
|
||||
rand_source,
|
||||
image,
|
||||
&msg, &ssr,
|
||||
&a * &ED25519_BASEPOINT_TABLE, a * hash_to_point(&ssr.ring[ssr.i][0])
|
||||
);
|
||||
clsag.s[i as usize] = Key { key: (a - (c * ((mu_C * z) + (mu_P * private_key)))).to_bytes() };
|
||||
|
||||
Ok((clsag, C_out))
|
||||
}
|
||||
|
||||
// Uses Monero's C verification function to ensure compatibility with Monero
|
||||
pub fn verify(
|
||||
clsag: &Clsag,
|
||||
image: EdwardsPoint,
|
||||
msg: &[u8; 32],
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
pseudo_out: EdwardsPoint
|
||||
) -> Result<(), SignError> {
|
||||
// Workaround for the fact monero-rs doesn't include the length of clsag.s in clsag encoding
|
||||
// despite it being part of clsag encoding. Reason for the patch version pin
|
||||
let mut serialized = vec![clsag.s.len() as u8];
|
||||
clsag.consensus_encode(&mut serialized).unwrap();
|
||||
|
||||
let image_bytes = image.compress().to_bytes();
|
||||
|
||||
let mut ring_bytes = vec![];
|
||||
for member in ring {
|
||||
ring_bytes.extend(&member[0].compress().to_bytes());
|
||||
ring_bytes.extend(&member[1].compress().to_bytes());
|
||||
}
|
||||
|
||||
let pseudo_out_bytes = pseudo_out.compress().to_bytes();
|
||||
|
||||
let success;
|
||||
unsafe {
|
||||
success = c_verify_clsag(
|
||||
serialized.len(), serialized.as_ptr(), image_bytes.as_ptr(),
|
||||
msg.as_ptr(), ring.len() as u8, ring_bytes.as_ptr(), pseudo_out_bytes.as_ptr()
|
||||
);
|
||||
}
|
||||
|
||||
if success { Ok(()) } else { Err(SignError::InvalidSignature) }
|
||||
}
|
||||
198
sign/monero/src/clsag/multisig.rs
Normal file
198
sign/monero/src/clsag/multisig.rs
Normal file
@@ -0,0 +1,198 @@
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use digest::Digest;
|
||||
use blake2::Blake2b;
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
edwards::EdwardsPoint
|
||||
};
|
||||
|
||||
use dalek_ff_group as dfg;
|
||||
use group::Group;
|
||||
use frost::{Curve, FrostError, algorithm::Algorithm};
|
||||
|
||||
use monero::util::ringct::{Key, Clsag};
|
||||
|
||||
use crate::{
|
||||
SignError,
|
||||
hash_to_point,
|
||||
frost::{Ed25519, DLEqProof},
|
||||
clsag::{SemiSignableRing, validate_sign_args, sign_core, verify}
|
||||
};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Debug)]
|
||||
struct ClsagSignInterim {
|
||||
c: Scalar,
|
||||
mu_C: Scalar,
|
||||
z: Scalar,
|
||||
mu_P: Scalar,
|
||||
|
||||
clsag: Clsag,
|
||||
C_out: EdwardsPoint
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Multisig {
|
||||
b: Vec<u8>,
|
||||
AH: dfg::EdwardsPoint,
|
||||
|
||||
image: EdwardsPoint,
|
||||
msg: [u8; 32],
|
||||
ssr: SemiSignableRing,
|
||||
|
||||
interim: Option<ClsagSignInterim>
|
||||
}
|
||||
|
||||
impl Multisig {
|
||||
pub fn new(
|
||||
image: EdwardsPoint,
|
||||
msg: [u8; 32],
|
||||
ring: Vec<[EdwardsPoint; 2]>,
|
||||
i: u8,
|
||||
randomness: &Scalar,
|
||||
amount: u64
|
||||
) -> Result<Multisig, SignError> {
|
||||
let ssr = validate_sign_args(ring, i, None, randomness, amount)?;
|
||||
Ok(
|
||||
Multisig {
|
||||
b: vec![],
|
||||
AH: dfg::EdwardsPoint::identity(),
|
||||
|
||||
image,
|
||||
msg,
|
||||
ssr,
|
||||
|
||||
interim: None
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Algorithm<Ed25519> for Multisig {
|
||||
type Signature = (Clsag, EdwardsPoint);
|
||||
|
||||
fn context(&self) -> Vec<u8> {
|
||||
let mut context = self.image.compress().to_bytes().to_vec();
|
||||
for pair in &self.ssr.ring {
|
||||
context.extend(&pair[0].compress().to_bytes());
|
||||
}
|
||||
context.extend(&u8::try_from(self.ssr.i).unwrap().to_le_bytes());
|
||||
context.extend(&self.ssr.randomness.to_bytes());
|
||||
context.extend(&self.ssr.amount.to_le_bytes());
|
||||
context
|
||||
}
|
||||
|
||||
// We arguably don't have to commit to at all thanks to xG and yG being committed to, both of
|
||||
// those being proven to have the same scalar as xH and yH, yet it doesn't hurt
|
||||
fn addendum_commit_len() -> usize {
|
||||
64
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
group_key: &dfg::EdwardsPoint,
|
||||
nonces: &[dfg::Scalar; 2]
|
||||
) -> Vec<u8> {
|
||||
#[allow(non_snake_case)]
|
||||
let H = hash_to_point(&group_key.0);
|
||||
let h0 = nonces[0].0 * H;
|
||||
let h1 = nonces[1].0 * H;
|
||||
// 32 + 32 + 64 + 64
|
||||
let mut serialized = Vec::with_capacity(192);
|
||||
serialized.extend(h0.compress().to_bytes());
|
||||
serialized.extend(h1.compress().to_bytes());
|
||||
serialized.extend(&DLEqProof::prove(rng, &nonces[0].0, &H, &h0).serialize());
|
||||
serialized.extend(&DLEqProof::prove(rng, &nonces[1].0, &H, &h1).serialize());
|
||||
serialized
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
l: usize,
|
||||
commitments: &[dfg::EdwardsPoint; 2],
|
||||
p: &dfg::Scalar,
|
||||
serialized: &[u8]
|
||||
) -> Result<(), FrostError> {
|
||||
if serialized.len() != 192 {
|
||||
// Not an optimal error but...
|
||||
Err(FrostError::InvalidCommitmentQuantity(l, 6, serialized.len() / 32))?;
|
||||
}
|
||||
|
||||
let alt = &hash_to_point(&self.ssr.ring[self.ssr.i][0]);
|
||||
|
||||
let h0 = <Ed25519 as Curve>::G_from_slice(&serialized[0 .. 32]).map_err(|_| FrostError::InvalidCommitment(l))?;
|
||||
DLEqProof::deserialize(&serialized[64 .. 128]).ok_or(FrostError::InvalidCommitment(l))?.verify(
|
||||
&alt,
|
||||
&commitments[0],
|
||||
&h0
|
||||
).map_err(|_| FrostError::InvalidCommitment(l))?;
|
||||
|
||||
let h1 = <Ed25519 as Curve>::G_from_slice(&serialized[32 .. 64]).map_err(|_| FrostError::InvalidCommitment(l))?;
|
||||
DLEqProof::deserialize(&serialized[128 .. 192]).ok_or(FrostError::InvalidCommitment(l))?.verify(
|
||||
&alt,
|
||||
&commitments[1],
|
||||
&h1
|
||||
).map_err(|_| FrostError::InvalidCommitment(l))?;
|
||||
|
||||
self.b.extend(&l.to_le_bytes());
|
||||
self.b.extend(&serialized[0 .. 64]);
|
||||
self.AH += h0 + (h1 * p);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
_: dfg::EdwardsPoint,
|
||||
secret: dfg::Scalar,
|
||||
nonce: dfg::Scalar,
|
||||
nonce_sum: dfg::EdwardsPoint,
|
||||
_: &[u8],
|
||||
) -> dfg::Scalar {
|
||||
// Use everyone's commitments to derive a random source all signers can agree upon
|
||||
// Cannot be manipulated to effect and all signers must, and will, know this
|
||||
let rand_source = Blake2b::new().chain("Clsag_randomness").chain(&self.b).finalize().as_slice().try_into().unwrap();
|
||||
#[allow(non_snake_case)]
|
||||
let (clsag, c, mu_C, z, mu_P, C_out) = sign_core(rand_source, self.image, &self.msg, &self.ssr, nonce_sum.0, self.AH.0);
|
||||
|
||||
let share = dfg::Scalar(nonce.0 - (c * (mu_P * secret.0)));
|
||||
|
||||
self.interim = Some(ClsagSignInterim { c, mu_C, z, mu_P, clsag, C_out });
|
||||
share
|
||||
}
|
||||
|
||||
fn verify(
|
||||
&self,
|
||||
_: dfg::EdwardsPoint,
|
||||
_: dfg::EdwardsPoint,
|
||||
sum: dfg::Scalar
|
||||
) -> Option<Self::Signature> {
|
||||
let interim = self.interim.as_ref().unwrap();
|
||||
|
||||
// Subtract the randomness's presence, which is done once and not fractionalized among shares
|
||||
let s = sum.0 - (interim.c * (interim.mu_C * interim.z));
|
||||
|
||||
let mut clsag = interim.clsag.clone();
|
||||
clsag.s[self.ssr.i] = Key { key: s.to_bytes() };
|
||||
if verify(&clsag, self.image, &self.msg, &self.ssr.ring, interim.C_out).is_ok() {
|
||||
return Some((clsag, interim.C_out));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: dfg::EdwardsPoint,
|
||||
nonce: dfg::EdwardsPoint,
|
||||
share: dfg::Scalar,
|
||||
) -> bool {
|
||||
let interim = self.interim.as_ref().unwrap();
|
||||
return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
|
||||
nonce.0 - (interim.c * (interim.mu_P * verification_share.0))
|
||||
);
|
||||
}
|
||||
}
|
||||
189
sign/monero/src/frost.rs
Normal file
189
sign/monero/src/frost.rs
Normal file
@@ -0,0 +1,189 @@
|
||||
use core::convert::TryInto;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use digest::Digest;
|
||||
use blake2::Blake2b;
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE as DTable,
|
||||
traits::VartimeMultiscalarMul,
|
||||
scalar::Scalar as DScalar,
|
||||
edwards::EdwardsPoint as DPoint
|
||||
};
|
||||
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
use ff::PrimeField;
|
||||
use group::Group;
|
||||
|
||||
use dalek_ff_group as dfg;
|
||||
use frost::{CurveError, Curve};
|
||||
|
||||
use crate::{SignError, random_scalar};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct Ed25519;
|
||||
impl Curve for Ed25519 {
|
||||
type F = dfg::Scalar;
|
||||
type G = dfg::EdwardsPoint;
|
||||
type T = &'static dfg::EdwardsBasepointTable;
|
||||
|
||||
fn id() -> String {
|
||||
"Ed25519".to_string()
|
||||
}
|
||||
|
||||
fn id_len() -> u8 {
|
||||
Self::id().len() as u8
|
||||
}
|
||||
|
||||
fn generator() -> Self::G {
|
||||
Self::G::generator()
|
||||
}
|
||||
|
||||
fn generator_table() -> Self::T {
|
||||
&dfg::ED25519_BASEPOINT_TABLE
|
||||
}
|
||||
|
||||
fn multiexp_vartime(scalars: &[Self::F], points: &[Self::G]) -> Self::G {
|
||||
EdwardsPoint(DPoint::vartime_multiscalar_mul(scalars, points))
|
||||
}
|
||||
|
||||
fn F_len() -> usize {
|
||||
32
|
||||
}
|
||||
|
||||
fn G_len() -> usize {
|
||||
32
|
||||
}
|
||||
|
||||
fn F_from_le_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
|
||||
let scalar = Self::F::from_repr(
|
||||
slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
|
||||
);
|
||||
if scalar.is_some() {
|
||||
Ok(scalar.unwrap())
|
||||
} else {
|
||||
Err(CurveError::InvalidScalar(hex::encode(slice)))
|
||||
}
|
||||
}
|
||||
|
||||
fn F_from_le_slice_unreduced(slice: &[u8]) -> Self::F {
|
||||
let mut wide: [u8; 64] = [0; 64];
|
||||
wide[..slice.len()].copy_from_slice(slice);
|
||||
dfg::Scalar::from_bytes_mod_order_wide(&wide)
|
||||
}
|
||||
|
||||
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> {
|
||||
let point = dfg::CompressedEdwardsY::new(
|
||||
slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
|
||||
).decompress();
|
||||
|
||||
if point.is_some() {
|
||||
let point = point.unwrap();
|
||||
// Ban torsioned points
|
||||
if !point.is_torsion_free() {
|
||||
Err(CurveError::InvalidPoint(hex::encode(slice)))?
|
||||
}
|
||||
Ok(point)
|
||||
} else {
|
||||
Err(CurveError::InvalidPoint(hex::encode(slice)))?
|
||||
}
|
||||
}
|
||||
|
||||
fn F_to_le_bytes(f: &Self::F) -> Vec<u8> {
|
||||
f.to_repr().to_vec()
|
||||
}
|
||||
|
||||
fn G_to_bytes(g: &Self::G) -> Vec<u8> {
|
||||
g.compress().to_bytes().to_vec()
|
||||
}
|
||||
|
||||
fn F_from_bytes_wide(bytes: [u8; 64]) -> Self::F {
|
||||
dfg::Scalar::from_bytes_mod_order_wide(&bytes)
|
||||
}
|
||||
}
|
||||
|
||||
// Used to prove legitimacy in several locations
|
||||
#[derive(Clone)]
|
||||
pub struct DLEqProof {
|
||||
s: DScalar,
|
||||
c: DScalar
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
impl DLEqProof {
|
||||
pub fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
secret: &DScalar,
|
||||
H: &DPoint,
|
||||
alt: &DPoint
|
||||
) -> DLEqProof {
|
||||
let r = random_scalar(rng);
|
||||
let R1 = &DTable * &r;
|
||||
let R2 = r * H;
|
||||
|
||||
let c = DScalar::from_hash(
|
||||
Blake2b::new()
|
||||
.chain(R1.compress().to_bytes())
|
||||
.chain(R2.compress().to_bytes())
|
||||
.chain((secret * &DTable).compress().to_bytes())
|
||||
.chain(alt.compress().to_bytes())
|
||||
);
|
||||
let s = r + (c * secret);
|
||||
|
||||
DLEqProof { s, c }
|
||||
}
|
||||
|
||||
pub fn verify(
|
||||
&self,
|
||||
H: &DPoint,
|
||||
primary: &DPoint,
|
||||
alt: &DPoint
|
||||
) -> Result<(), SignError> {
|
||||
let s = self.s;
|
||||
let c = self.c;
|
||||
|
||||
let R1 = (&s * &DTable) - (c * primary);
|
||||
let R2 = (s * H) - (c * alt);
|
||||
|
||||
let expected_c = DScalar::from_hash(
|
||||
Blake2b::new()
|
||||
.chain(R1.compress().to_bytes())
|
||||
.chain(R2.compress().to_bytes())
|
||||
.chain(primary.compress().to_bytes())
|
||||
.chain(alt.compress().to_bytes())
|
||||
);
|
||||
|
||||
// Take the opportunity to ensure a lack of torsion in key images/randomness commitments
|
||||
if (!primary.is_torsion_free()) || (!alt.is_torsion_free()) || (c != expected_c) {
|
||||
Err(SignError::InvalidDLEqProof)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn serialize(
|
||||
&self
|
||||
) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(64);
|
||||
res.extend(self.s.to_bytes());
|
||||
res.extend(self.c.to_bytes());
|
||||
res
|
||||
}
|
||||
|
||||
pub fn deserialize(
|
||||
serialized: &[u8]
|
||||
) -> Option<DLEqProof> {
|
||||
if serialized.len() != 64 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
DLEqProof {
|
||||
s: DScalar::from_bytes_mod_order(serialized[0 .. 32].try_into().unwrap()),
|
||||
c: DScalar::from_bytes_mod_order(serialized[32 .. 64].try_into().unwrap())
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
16
sign/monero/src/key_image/mod.rs
Normal file
16
sign/monero/src/key_image/mod.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
edwards::EdwardsPoint
|
||||
};
|
||||
|
||||
use crate::hash_to_point;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod multisig;
|
||||
#[cfg(feature = "multisig")]
|
||||
pub use crate::key_image::multisig::{Package, multisig};
|
||||
|
||||
pub fn single(secret: &Scalar) -> EdwardsPoint {
|
||||
secret * hash_to_point(&(secret * &ED25519_BASEPOINT_TABLE))
|
||||
}
|
||||
75
sign/monero/src/key_image/multisig.rs
Normal file
75
sign/monero/src/key_image/multisig.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
use dalek_ff_group::Scalar;
|
||||
use frost::{MultisigKeys, sign::lagrange};
|
||||
|
||||
use crate::{SignError, hash_to_point, frost::{Ed25519, DLEqProof}};
|
||||
|
||||
#[derive(Clone)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct Package {
|
||||
// Don't serialize
|
||||
H: EdwardsPoint,
|
||||
i: usize,
|
||||
|
||||
// Serialize
|
||||
image: EdwardsPoint,
|
||||
proof: DLEqProof
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn multisig<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
keys: &MultisigKeys<Ed25519>,
|
||||
included: &[usize]
|
||||
) -> Package {
|
||||
let i = keys.params().i();
|
||||
let secret = (keys.secret_share() * lagrange::<Scalar>(i, included)).0;
|
||||
|
||||
let H = hash_to_point(&keys.group_key().0);
|
||||
let image = secret * H;
|
||||
// Includes a proof. Since:
|
||||
// sum(lagranged_secrets) = group_private
|
||||
// group_private * G = output_key
|
||||
// group_private * H = key_image
|
||||
// Then sum(lagranged_secrets * H) = key_image
|
||||
// lagranged_secret * G is known. lagranged_secret * H is being sent
|
||||
// Any discrete log equality proof confirms the same secret was used,
|
||||
// forming a valid key_image share
|
||||
Package { H, i, image, proof: DLEqProof::prove(rng, &secret, &H, &image) }
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
impl Package {
|
||||
pub fn resolve(
|
||||
self,
|
||||
shares: Vec<Option<(EdwardsPoint, Package)>>
|
||||
) -> Result<EdwardsPoint, SignError> {
|
||||
let mut included = vec![self.i];
|
||||
for i in 1 .. shares.len() {
|
||||
if shares[i].is_some() {
|
||||
included.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
let mut image = self.image;
|
||||
for i in 0 .. shares.len() {
|
||||
if shares[i].is_none() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (other, shares) = shares[i].as_ref().unwrap();
|
||||
let other = other * lagrange::<Scalar>(i, &included).0;
|
||||
|
||||
// Verify their proof
|
||||
let share = shares.image;
|
||||
shares.proof.verify(&self.H, &other, &share).map_err(|_| SignError::InvalidKeyImage(i))?;
|
||||
|
||||
// Add their share to the image
|
||||
image += share;
|
||||
}
|
||||
|
||||
Ok(image)
|
||||
}
|
||||
}
|
||||
82
sign/monero/src/lib.rs
Normal file
82
sign/monero/src/lib.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use lazy_static::lazy_static;
|
||||
use thiserror::Error;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
|
||||
};
|
||||
|
||||
use monero::util::key;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
pub mod frost;
|
||||
|
||||
pub mod key_image;
|
||||
pub mod clsag;
|
||||
|
||||
#[link(name = "wrapper")]
|
||||
extern "C" {
|
||||
fn c_hash_to_point(point: *const u8);
|
||||
pub(crate) fn c_verify_clsag(
|
||||
serialized_len: usize, serialized: *const u8, I: *const u8,
|
||||
msg: *const u8, ring_size: u8, ring: *const u8, pseudo_out: *const u8
|
||||
) -> bool;
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum SignError {
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(String),
|
||||
#[error("invalid discrete log equality proof")]
|
||||
InvalidDLEqProof,
|
||||
#[error("invalid key image {0}")]
|
||||
InvalidKeyImage(usize),
|
||||
#[error("invalid ring member (member {0}, ring size {1})")]
|
||||
InvalidRingMember(u8, u8),
|
||||
#[error("invalid secret for ring (index {0})")]
|
||||
InvalidSecret(u8),
|
||||
#[error("invalid commitment {0}")]
|
||||
InvalidCommitment(usize),
|
||||
#[error("invalid share {0}")]
|
||||
InvalidShare(usize),
|
||||
#[error("invalid signature")]
|
||||
InvalidSignature
|
||||
}
|
||||
|
||||
// Allows using a modern rand as dalek's is notoriously dated
|
||||
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
||||
let mut r = [0; 64];
|
||||
rng.fill_bytes(&mut r);
|
||||
Scalar::from_bytes_mod_order_wide(&r)
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&key::H.point.decompress().unwrap());
|
||||
}
|
||||
|
||||
// aG + bH
|
||||
pub fn commitment(randomness: &Scalar, amount: u64) -> EdwardsPoint {
|
||||
(randomness * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(amount) * &*H_TABLE)
|
||||
}
|
||||
|
||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
let mut keccak = Keccak::v256();
|
||||
keccak.update(data);
|
||||
|
||||
let mut res = [0; 32];
|
||||
keccak.finalize(&mut res);
|
||||
Scalar::from_bytes_mod_order(res)
|
||||
}
|
||||
|
||||
pub fn hash_to_point(point: &EdwardsPoint) -> EdwardsPoint {
|
||||
let mut bytes = point.compress().to_bytes();
|
||||
unsafe {
|
||||
c_hash_to_point(bytes.as_mut_ptr());
|
||||
}
|
||||
CompressedEdwardsY::from_slice(&bytes).decompress().unwrap()
|
||||
}
|
||||
155
sign/monero/tests/clsag.rs
Normal file
155
sign/monero/tests/clsag.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use rand::{RngCore, rngs::OsRng};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||
|
||||
use monero_sign::{SignError, random_scalar, commitment, key_image, clsag};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
use ::frost::sign;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod frost;
|
||||
#[cfg(feature = "multisig")]
|
||||
use crate::frost::generate_keys;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
const THRESHOLD: usize = 5;
|
||||
#[cfg(feature = "multisig")]
|
||||
const PARTICIPANTS: usize = 8;
|
||||
|
||||
const RING_INDEX: u8 = 3;
|
||||
const RING_LEN: u64 = 11;
|
||||
const AMOUNT: u64 = 1337;
|
||||
|
||||
#[test]
|
||||
fn test_single() -> Result<(), SignError> {
|
||||
let msg = [1; 32];
|
||||
|
||||
let mut secrets = [Scalar::zero(), Scalar::zero()];
|
||||
let mut ring = vec![];
|
||||
for i in 0 .. RING_LEN {
|
||||
let dest = random_scalar(&mut OsRng);
|
||||
let a = random_scalar(&mut OsRng);
|
||||
let amount;
|
||||
if i == RING_INDEX.into() {
|
||||
secrets = [dest, a];
|
||||
amount = AMOUNT;
|
||||
} else {
|
||||
amount = OsRng.next_u64();
|
||||
}
|
||||
let mask = commitment(&a, amount);
|
||||
ring.push([&dest * &ED25519_BASEPOINT_TABLE, mask]);
|
||||
}
|
||||
|
||||
let image = key_image::single(&secrets[0]);
|
||||
let (clsag, pseudo_out) = clsag::sign(
|
||||
&mut OsRng,
|
||||
image,
|
||||
msg,
|
||||
ring.clone(),
|
||||
RING_INDEX,
|
||||
&secrets[0],
|
||||
&secrets[1],
|
||||
AMOUNT
|
||||
)?;
|
||||
clsag::verify(&clsag, image, &msg, &ring, pseudo_out)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
#[test]
|
||||
fn test_multisig() -> Result<(), SignError> {
|
||||
let (keys, group_private) = generate_keys(THRESHOLD, PARTICIPANTS);
|
||||
let t = keys[0].params().t();
|
||||
|
||||
let mut images = vec![];
|
||||
images.resize(PARTICIPANTS + 1, None);
|
||||
let included = (1 ..= THRESHOLD).collect::<Vec<usize>>();
|
||||
for i in &included {
|
||||
let i = *i;
|
||||
images[i] = Some(
|
||||
(
|
||||
keys[0].verification_shares()[i].0,
|
||||
key_image::multisig(&mut OsRng, &keys[i - 1], &included)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let msg = [1; 32];
|
||||
|
||||
images.push(None);
|
||||
let ki_used = images.swap_remove(1).unwrap().1;
|
||||
let image = ki_used.resolve(images).unwrap();
|
||||
|
||||
let randomness = random_scalar(&mut OsRng);
|
||||
let mut ring = vec![];
|
||||
for i in 0 .. RING_LEN {
|
||||
let dest;
|
||||
let a;
|
||||
let amount;
|
||||
if i != RING_INDEX.into() {
|
||||
dest = random_scalar(&mut OsRng);
|
||||
a = random_scalar(&mut OsRng);
|
||||
amount = OsRng.next_u64();
|
||||
} else {
|
||||
dest = group_private.0;
|
||||
a = randomness;
|
||||
amount = AMOUNT;
|
||||
}
|
||||
let mask = commitment(&a, amount);
|
||||
ring.push([&dest * &ED25519_BASEPOINT_TABLE, mask]);
|
||||
}
|
||||
|
||||
let mut machines = vec![];
|
||||
let mut commitments = Vec::with_capacity(PARTICIPANTS + 1);
|
||||
commitments.resize(PARTICIPANTS + 1, None);
|
||||
for i in 1 ..= t {
|
||||
machines.push(
|
||||
sign::StateMachine::new(
|
||||
sign::Params::new(
|
||||
clsag::Multisig::new(image, msg, ring.clone(), RING_INDEX, &randomness, AMOUNT).unwrap(),
|
||||
keys[i - 1].clone(),
|
||||
&(1 ..= t).collect::<Vec<usize>>()
|
||||
).unwrap()
|
||||
)
|
||||
);
|
||||
commitments[i] = Some(machines[i - 1].preprocess(&mut OsRng).unwrap());
|
||||
}
|
||||
|
||||
let mut shares = Vec::with_capacity(PARTICIPANTS + 1);
|
||||
shares.resize(PARTICIPANTS + 1, None);
|
||||
for i in 1 ..= t {
|
||||
shares[i] = Some(
|
||||
machines[i - 1].sign(
|
||||
&commitments
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
|
||||
.collect::<Vec<Option<Vec<u8>>>>(),
|
||||
b"Hello World"
|
||||
).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let mut signature = None;
|
||||
for i in 1 ..= t {
|
||||
// Multisig does call verify to ensure integrity upon complete, before checking individual key
|
||||
// shares. For FROST Schnorr, it's cheaper. For CLSAG, it may be more expensive? Yet it ensures
|
||||
// we have usable signatures, not just signatures we think are usable
|
||||
let sig = machines[i - 1].complete(
|
||||
&shares
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
|
||||
.collect::<Vec<Option<Vec<u8>>>>()
|
||||
).unwrap();
|
||||
if signature.is_none() {
|
||||
signature = Some(sig.clone());
|
||||
}
|
||||
// Check the commitment out and the non-decoy s scalar are identical to every other signature
|
||||
assert_eq!(sig.1, signature.as_ref().unwrap().1);
|
||||
assert_eq!(sig.0.s[RING_INDEX as usize], signature.as_ref().unwrap().0.s[RING_INDEX as usize]);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
65
sign/monero/tests/frost.rs
Normal file
65
sign/monero/tests/frost.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
#![cfg(feature = "multisig")]
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
use rand::rngs::OsRng;
|
||||
|
||||
use ff::Field;
|
||||
use dalek_ff_group::{ED25519_BASEPOINT_TABLE, Scalar};
|
||||
|
||||
use frost::{
|
||||
MultisigParams, MultisigKeys,
|
||||
key_gen,
|
||||
sign::lagrange
|
||||
};
|
||||
|
||||
use monero_sign::frost::Ed25519;
|
||||
|
||||
pub fn generate_keys(t: usize, n: usize) -> (Vec<Rc<MultisigKeys<Ed25519>>>, Scalar) {
|
||||
let mut params = vec![];
|
||||
let mut machines = vec![];
|
||||
let mut commitments = vec![vec![]];
|
||||
for i in 1 ..= n {
|
||||
params.push(
|
||||
MultisigParams::new(t, n, i).unwrap()
|
||||
);
|
||||
machines.push(
|
||||
key_gen::StateMachine::<Ed25519>::new(
|
||||
params[i - 1],
|
||||
"monero-sign-rs test suite".to_string()
|
||||
)
|
||||
);
|
||||
commitments.push(machines[i - 1].generate_coefficients(&mut OsRng).unwrap());
|
||||
}
|
||||
|
||||
let mut secret_shares = vec![];
|
||||
for i in 1 ..= n {
|
||||
secret_shares.push(
|
||||
machines[i - 1].generate_secret_shares(
|
||||
&mut OsRng,
|
||||
commitments
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, commitments)| if idx == i { vec![] } else { commitments.to_vec() })
|
||||
.collect()
|
||||
).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let mut keys = vec![];
|
||||
for i in 1 ..= n {
|
||||
let mut our_secret_shares = vec![vec![]];
|
||||
our_secret_shares.extend(
|
||||
secret_shares.iter().map(|shares| shares[i].clone()).collect::<Vec<Vec<u8>>>()
|
||||
);
|
||||
keys.push(Rc::new(machines[i - 1].complete(our_secret_shares).unwrap().clone()));
|
||||
}
|
||||
|
||||
let mut group_private = Scalar::zero();
|
||||
for i in 0 .. t {
|
||||
group_private += keys[i].secret_share() * lagrange::<Scalar>(i + 1, &(1 ..= t).collect::<Vec<usize>>());
|
||||
}
|
||||
assert_eq!(&ED25519_BASEPOINT_TABLE * group_private, keys[0].group_key());
|
||||
|
||||
(keys, group_private)
|
||||
}
|
||||
36
sign/monero/tests/key_image.rs
Normal file
36
sign/monero/tests/key_image.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
#![cfg(feature = "multisig")]
|
||||
|
||||
use rand::rngs::OsRng;
|
||||
|
||||
use monero_sign::{SignError, key_image};
|
||||
|
||||
mod frost;
|
||||
use crate::frost::generate_keys;
|
||||
|
||||
#[test]
|
||||
fn test() -> Result<(), SignError> {
|
||||
let (keys, group_private) = generate_keys(3, 5);
|
||||
let image = key_image::single(&group_private);
|
||||
|
||||
let mut packages = vec![];
|
||||
packages.resize(5 + 1, None);
|
||||
let included = vec![1, 3, 4];
|
||||
for i in &included {
|
||||
let i = *i;
|
||||
packages[i] = Some(
|
||||
(
|
||||
keys[0].verification_shares()[i].0,
|
||||
key_image::multisig(&mut OsRng, &keys[i - 1], &included)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
for i in included {
|
||||
let mut packages = packages.clone();
|
||||
packages.push(None);
|
||||
let package = packages.swap_remove(i).unwrap().1;
|
||||
assert_eq!(image, package.resolve(packages).unwrap());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user