Merge branch 'develop' into HEAD

Also updates monero-generators per comments in
https://github.com/serai-dex/serai/pull/308.
This commit is contained in:
Luke Parker
2023-07-03 09:02:27 -04:00
67 changed files with 2471 additions and 1799 deletions

View File

@@ -21,7 +21,7 @@ runs:
using: "composite"
steps:
- name: Install Protobuf
uses: arduino/setup-protoc@master
uses: arduino/setup-protoc@v2.0.0
with:
repo-token: ${{ inputs.github-token }}
@@ -37,7 +37,7 @@ runs:
with:
toolchain: ${{ inputs.rust-toolchain }}
components: ${{ inputs.rust-components }}
targets: wasm32-unknown-unknown
targets: wasm32-unknown-unknown, riscv32imac-unknown-none-elf
- name: Cache Rust
uses: Swatinem/rust-cache@v2

View File

@@ -1 +1 @@
nightly-2023-05-01
nightly-2023-07-01

View File

@@ -18,4 +18,4 @@ jobs:
github-token: ${{ inputs.github-token }}
- name: Verify no-std builds
run: cd tests/no-std && cargo build --target wasm32-unknown-unknown
run: cd tests/no-std && cargo build --target riscv32imac-unknown-none-elf

2652
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -23,6 +23,8 @@ members = [
"coins/monero/generators",
"coins/monero",
"message-queue",
"processor/messages",
"processor",

View File

@@ -12,48 +12,53 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
futures = "0.3"
std-shims = { path = "../../common/std-shims", version = "0.1", default-features = false }
lazy_static = "1"
async-trait = "0.1"
thiserror = "1"
async-trait = { version = "0.1", default-features = false }
thiserror = { version = "1", optional = true }
rand_core = "0.6"
rand_chacha = "0.3"
rand = "0.8"
rand_distr = "0.4"
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
zeroize = { version = "^1.5", features = ["zeroize_derive"] }
subtle = "^2.4"
rand_core = { version = "0.6", default-features = false }
# Used to send transactions
rand = { version = "0.8", default-features = false }
rand_chacha = { version = "0.3", default-features = false }
# Used to select decoys
rand_distr = { version = "0.4", default-features = false }
crc = "3"
sha3 = "0.10"
crc = { version = "3", default-features = false }
sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "^3.2", features = ["std"] }
curve25519-dalek = { version = "^3.2", default-features = false }
group = "0.13"
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3" }
multiexp = { path = "../../crypto/multiexp", version = "0.3", features = ["batch"] }
# Used for the hash to curve, along with the more complicated proofs
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
multiexp = { path = "../../crypto/multiexp", version = "0.3", default-features = false, features = ["batch"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
# Needed for multisig
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.3", features = ["serialize"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
monero-generators = { path = "generators", version = "0.3" }
monero-generators = { path = "generators", version = "0.3", default-features = false }
hex = "0.4"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
base58-monero = "1"
monero-epee-bin-serde = "1"
hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1", default-features = false, features = ["derive"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
digest_auth = "0.3"
reqwest = { version = "0.11", features = ["json"] }
base58-monero = { version = "1", git = "https://github.com/monero-rs/base58-monero", rev = "5045e8d2b817b3b6c1190661f504e879bc769c29", default-features = false, features = ["check"] }
# Used for the provided RPC
digest_auth = { version = "0.3", optional = true }
reqwest = { version = "0.11", features = ["json"], optional = true }
[build-dependencies]
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3" }
monero-generators = { path = "generators", version = "0.3" }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
monero-generators = { path = "generators", version = "0.3", default-features = false }
[dev-dependencies]
hex-literal = "0.4"
@@ -64,4 +69,33 @@ monero-rpc = "0.3"
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
[features]
multisig = ["transcript", "frost", "dleq"]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"subtle/std",
"rand_core/std",
"rand_chacha/std",
"rand/std",
"rand_distr/std",
"sha3/std",
"curve25519-dalek/std",
"multiexp/std",
"monero-generators/std",
"futures/std",
"hex/std",
"serde/std",
"serde_json/std",
]
http_rpc = ["digest_auth", "reqwest"]
multisig = ["transcript", "frost", "dleq", "std"]
default = ["std", "http_rpc"]

View File

@@ -41,15 +41,16 @@ fn generators(prefix: &'static str, path: &str) {
.write_all(
format!(
"
lazy_static! {{
pub static ref GENERATORS: Generators = Generators {{
pub static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| Generators {{
G: [
{G_str}
],
H: [
{H_str}
],
}};
}})
}}
",
)

View File

@@ -14,8 +14,6 @@ rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
lazy_static = "1"
subtle = { version = "^2.4", default-features = false }
sha3 = { version = "0.10", default-features = false }
@@ -26,6 +24,5 @@ group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.3" }
[features]
alloc = ["lazy_static/spin_no_std"]
std = ["std-shims/std"]
default = ["std"]

View File

@@ -1,10 +1,12 @@
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
//!
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
//! `hash_to_point` here, is included, as needed to generate generators.
#![cfg_attr(not(feature = "std"), no_std)]
use lazy_static::lazy_static;
use core::cell::OnceCell;
use std_shims::sync::Mutex;
use sha3::{Digest, Keccak256};
@@ -25,29 +27,33 @@ fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
}
lazy_static! {
/// Monero alternate generator `H`, used for amounts in Pedersen commitments.
pub static ref H: DalekPoint =
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
#[allow(non_snake_case)]
pub fn H() -> DalekPoint {
*H_CELL.get_or_init(|| {
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
.decompress()
.unwrap()
.mul_by_cofactor();
.mul_by_cofactor()
})
}
/// Monero's `H` generator multiplied 2^i for each index, i.e. H, 2H, 4H, 8H, ...
/// used in old range proofs.
/// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/src/
/// ringct/rctTypes.h#L628
pub static ref H2: [DalekPoint; 64] = generate_H2();
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
#[allow(non_snake_case)]
pub fn H_pow_2() -> &[DalekPoint; 64] {
H_POW_2_CELL.get_or_init(|| {
let mut res = [H(); 64];
for i in 1 .. 64 {
res[i] = res[i - 1].double();
}
res
})
}
#[allow(non_snake_case)]
fn generate_H2() -> [DalekPoint; 64] {
let mut temp = Vec::with_capacity(64);
for i in 0 .. 64 {
temp.push(Scalar::from(2_u128.pow(i)) * *H)
}
temp.try_into().unwrap()
}
const MAX_M: usize = 16;
const N: usize = 64;
@@ -67,7 +73,7 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
for i in 0 .. MAX_MN {
let i = 2 * i;
let mut even = H.compress().to_bytes().to_vec();
let mut even = H().compress().to_bytes().to_vec();
even.extend(dst);
let mut odd = even.clone();

View File

@@ -1,4 +1,7 @@
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use crate::{
hash,

View File

@@ -1,20 +1,20 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
use std::io;
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
use std_shims::{sync::OnceLock, io};
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use sha3::{Digest, Keccak256};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, EdwardsBasepointTable},
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
pub use monero_generators::H;
@@ -37,6 +37,12 @@ pub mod wallet;
#[cfg(test)]
mod tests;
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
#[allow(non_snake_case)]
pub(crate) fn INV_EIGHT() -> Scalar {
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
}
/// Monero protocol version. v15 is omitted as v15 was simply v14 and v16 being active at the same
/// time, with regards to the transactions supported. Accordingly, v16 should be used during v15.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
@@ -107,10 +113,6 @@ impl Protocol {
}
}
lazy_static! {
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&H);
}
/// Transparent structure representing a Pedersen commitment's contents.
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
@@ -131,7 +133,7 @@ impl Commitment {
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
pub fn calculate(&self) -> EdwardsPoint {
(&self.mask * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(self.amount) * &*H_TABLE)
(&self.mask * &ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
}
}

View File

@@ -1,7 +1,5 @@
// Required to be for this entire file, which isn't an issue, as it wouldn't bind to the static
#![allow(non_upper_case_globals)]
use std_shims::{vec::Vec, sync::OnceLock};
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use subtle::{Choice, ConditionallySelectable};
@@ -15,13 +13,17 @@ use multiexp::multiexp as multiexp_const;
pub(crate) use monero_generators::Generators;
use crate::{H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
use crate::{INV_EIGHT as DALEK_INV_EIGHT, H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
// Bring things into ff/group
lazy_static! {
pub(crate) static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert().unwrap();
pub(crate) static ref H: EdwardsPoint = EdwardsPoint(*DALEK_H);
#[inline]
pub(crate) fn INV_EIGHT() -> Scalar {
Scalar(DALEK_INV_EIGHT())
}
#[inline]
pub(crate) fn H() -> EdwardsPoint {
EdwardsPoint(DALEK_H())
}
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
@@ -34,7 +36,7 @@ pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
pub(crate) const N: usize = 64;
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
multiexp_const(pairs) * *INV_EIGHT
multiexp_const(pairs) * INV_EIGHT()
}
pub(crate) fn vector_exponent(
@@ -91,7 +93,7 @@ pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, Scalar
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
commitments: C,
) -> (Scalar, Vec<EdwardsPoint>) {
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * *INV_EIGHT).collect::<Vec<_>>();
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * INV_EIGHT()).collect::<Vec<_>>();
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
}
@@ -102,7 +104,7 @@ pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
aR: &ScalarVector,
) -> (Scalar, EdwardsPoint) {
let ar = Scalar::random(rng);
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * *INV_EIGHT)
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * INV_EIGHT())
}
pub(crate) fn LR_statements(
@@ -124,8 +126,9 @@ pub(crate) fn LR_statements(
res
}
lazy_static! {
pub(crate) static ref TWO_N: ScalarVector = ScalarVector::powers(Scalar::from(2u8), N);
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
pub(crate) fn TWO_N() -> &'static ScalarVector {
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), N))
}
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {

View File

@@ -1,6 +1,9 @@
#![allow(non_snake_case)]
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use rand_core::{RngCore, CryptoRng};

View File

@@ -1,4 +1,5 @@
use lazy_static::lazy_static;
use std_shims::{vec::Vec, sync::OnceLock};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
@@ -14,9 +15,9 @@ use crate::{Commitment, ringct::bulletproofs::core::*};
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
lazy_static! {
static ref ONE_N: ScalarVector = ScalarVector(vec![Scalar::ONE; N]);
static ref IP12: Scalar = inner_product(&ONE_N, &TWO_N);
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
pub(crate) fn IP12() -> Scalar {
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
}
#[derive(Clone, PartialEq, Eq, Debug)]
@@ -48,8 +49,9 @@ impl OriginalStruct {
let (sL, sR) =
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
let (mut alpha, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
let (mut rho, S) = alpha_rho(&mut *rng, &GENERATORS, &sL, &sR);
let generators = GENERATORS();
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
@@ -62,7 +64,7 @@ impl OriginalStruct {
let zpow = ScalarVector::powers(z, M + 2);
for j in 0 .. M {
for i in 0 .. N {
zero_twos.push(zpow[j + 2] * TWO_N[i]);
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
}
}
@@ -77,8 +79,8 @@ impl OriginalStruct {
let mut tau1 = Scalar::random(&mut *rng);
let mut tau2 = Scalar::random(&mut *rng);
let T1 = prove_multiexp(&[(t1, *H), (tau1, EdwardsPoint::generator())]);
let T2 = prove_multiexp(&[(t2, *H), (tau2, EdwardsPoint::generator())]);
let T1 = prove_multiexp(&[(t1, H()), (tau1, EdwardsPoint::generator())]);
let T2 = prove_multiexp(&[(t2, H()), (tau2, EdwardsPoint::generator())]);
let x =
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
@@ -112,10 +114,10 @@ impl OriginalStruct {
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = GENERATORS.G[.. a.len()].to_vec();
let mut H_proof = GENERATORS.H[.. a.len()].to_vec();
let mut G_proof = generators.G[.. a.len()].to_vec();
let mut H_proof = generators.H[.. a.len()].to_vec();
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
let U = *H * x_ip;
let U = H() * x_ip;
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
@@ -230,10 +232,10 @@ impl OriginalStruct {
let ip1y = ScalarVector::powers(y, M * N).sum();
let mut k = -(zpow[2] * ip1y);
for j in 1 ..= M {
k -= zpow[j + 2] * *IP12;
k -= zpow[j + 2] * IP12();
}
let y1 = Scalar(self.t) - ((z * ip1y) + k);
proof.push((-y1, *H));
proof.push((-y1, H()));
proof.push((-Scalar(self.taux), G));
@@ -247,7 +249,7 @@ impl OriginalStruct {
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
proof.push((z3, *H));
proof.push((z3, H()));
proof.push((-Scalar(self.mu), G));
proof.push((Scalar::ONE, A));
@@ -260,13 +262,14 @@ impl OriginalStruct {
let w_cache = challenge_products(&w, &winv);
let generators = GENERATORS();
for i in 0 .. MN {
let g = (Scalar(self.a) * w_cache[i]) + z;
proof.push((-g, GENERATORS.G[i]));
proof.push((-g, generators.G[i]));
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
h -= ((zpow[(i / N) + 2] * TWO_N[i % N]) + (z * ypow[i])) * yinvpow[i];
proof.push((-h, GENERATORS.H[i]));
h -= ((zpow[(i / N) + 2] * TWO_N()[i % N]) + (z * ypow[i])) * yinvpow[i];
proof.push((-h, generators.H[i]));
}
}

View File

@@ -1,4 +1,5 @@
use lazy_static::lazy_static;
use std_shims::{vec::Vec, sync::OnceLock};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
@@ -17,15 +18,17 @@ use crate::{
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
lazy_static! {
static ref TRANSCRIPT: [u8; 32] =
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes();
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
*TRANSCRIPT_CELL.get_or_init(|| {
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes()
})
}
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
let (cache, commitments) = hash_commitments(commitments);
(hash_to_scalar(&[&*TRANSCRIPT as &[u8], &cache.to_bytes()].concat()), commitments)
(hash_to_scalar(&[TRANSCRIPT().as_ref(), &cache.to_bytes()].concat()), commitments)
}
// d[j*N+i] = z**(2*(j+1)) * 2**i
@@ -34,7 +37,7 @@ fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
let mut d = vec![Scalar::ZERO; MN];
for j in 0 .. M {
for i in 0 .. N {
d[(j * N) + i] = zpow[j] * TWO_N[i];
d[(j * N) + i] = zpow[j] * TWO_N()[i];
}
}
(zpow, ScalarVector(d))
@@ -57,12 +60,14 @@ impl PlusStruct {
rng: &mut R,
commitments: &[Commitment],
) -> PlusStruct {
let generators = GENERATORS();
let (logMN, M, MN) = MN(commitments.len());
let (aL, aR) = bit_decompose(commitments);
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let (mut cache, _) = hash_plus(commitments_points.clone());
let (mut alpha1, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
let (mut alpha1, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
@@ -87,8 +92,8 @@ impl PlusStruct {
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = GENERATORS.G[.. a.len()].to_vec();
let mut H_proof = GENERATORS.H[.. a.len()].to_vec();
let mut G_proof = generators.G[.. a.len()].to_vec();
let mut H_proof = generators.H[.. a.len()].to_vec();
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
@@ -105,12 +110,12 @@ impl PlusStruct {
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, *H);
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, H());
L_i.push((dL, G));
let L_i = prove_multiexp(&L_i);
L.push(L_i);
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, *H);
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, H());
R_i.push((dR, G));
let R_i = prove_multiexp(&R_i);
R.push(R_i);
@@ -139,9 +144,9 @@ impl PlusStruct {
(r, G_proof[0]),
(s, H_proof[0]),
(d, G),
((r * y * b[0]) + (s * y * a[0]), *H),
((r * y * b[0]) + (s * y * a[0]), H()),
]);
let B = prove_multiexp(&[(r * y * s, *H), (eta, G)]);
let B = prove_multiexp(&[(r * y * s, H()), (eta, G)]);
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
let r1 = (a[0] * e) + r;
@@ -248,7 +253,7 @@ impl PlusStruct {
let y_sum = weighted_powers(y, MN).sum();
proof.push((
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
*H,
H(),
));
let w_cache = challenge_products(&w, &winv);
@@ -259,11 +264,12 @@ impl PlusStruct {
let minus_esq_z = -esq_z;
let mut minus_esq_y = minus_esq * yMN;
let generators = GENERATORS();
for i in 0 .. MN {
proof.push((e_r1_y * w_cache[i] + esq_z, GENERATORS.G[i]));
proof.push((e_r1_y * w_cache[i] + esq_z, generators.G[i]));
proof.push((
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
GENERATORS.H[i],
generators.H[i],
));
e_r1_y *= yinv;

View File

@@ -1,4 +1,5 @@
use core::ops::{Add, Sub, Mul, Index};
use std_shims::vec::Vec;
use zeroize::{Zeroize, ZeroizeOnDrop};

View File

@@ -1,10 +1,11 @@
#![allow(non_snake_case)]
use core::ops::Deref;
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use lazy_static::lazy_static;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
@@ -18,8 +19,8 @@ use curve25519_dalek::{
};
use crate::{
Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys, ringct::hash_to_point,
serialize::*,
INV_EIGHT, Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys,
ringct::hash_to_point, serialize::*,
};
#[cfg(feature = "multisig")]
@@ -29,28 +30,25 @@ pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
#[cfg(feature = "multisig")]
pub(crate) use multisig::add_key_image_share;
lazy_static! {
static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert();
}
/// Errors returned when CLSAG signing fails.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum ClsagError {
#[error("internal error ({0})")]
#[cfg_attr(feature = "std", error("internal error ({0})"))]
InternalError(&'static str),
#[error("invalid ring")]
#[cfg_attr(feature = "std", error("invalid ring"))]
InvalidRing,
#[error("invalid ring member (member {0}, ring size {1})")]
#[cfg_attr(feature = "std", error("invalid ring member (member {0}, ring size {1})"))]
InvalidRingMember(u8, u8),
#[error("invalid commitment")]
#[cfg_attr(feature = "std", error("invalid commitment"))]
InvalidCommitment,
#[error("invalid key image")]
#[cfg_attr(feature = "std", error("invalid key image"))]
InvalidImage,
#[error("invalid D")]
#[cfg_attr(feature = "std", error("invalid D"))]
InvalidD,
#[error("invalid s")]
#[cfg_attr(feature = "std", error("invalid s"))]
InvalidS,
#[error("invalid c1")]
#[cfg_attr(feature = "std", error("invalid c1"))]
InvalidC1,
}
@@ -103,7 +101,7 @@ fn core(
let n = ring.len();
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
let D = D * *INV_EIGHT;
let D = D * INV_EIGHT();
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed

View File

@@ -1,8 +1,6 @@
use core::{ops::Deref, fmt::Debug};
use std::{
io::{self, Read, Write},
sync::{Arc, RwLock},
};
use std_shims::io::{self, Read, Write};
use std::sync::{Arc, RwLock};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;

View File

@@ -1,5 +1,8 @@
use core::ops::Deref;
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::Zeroizing;

View File

@@ -0,0 +1,91 @@
use async_trait::async_trait;
use digest_auth::AuthContext;
use reqwest::Client;
use crate::rpc::{RpcError, RpcConnection, Rpc};
#[derive(Clone, Debug)]
pub struct HttpRpc {
client: Client,
userpass: Option<(String, String)>,
url: String,
}
impl HttpRpc {
/// Create a new HTTP(S) RPC connection.
///
/// A daemon requiring authentication can be used via including the username and password in the
/// URL.
pub fn new(mut url: String) -> Result<Rpc<HttpRpc>, RpcError> {
// Parse out the username and password
let userpass = if url.contains('@') {
let url_clone = url;
let split_url = url_clone.split('@').collect::<Vec<_>>();
if split_url.len() != 2 {
Err(RpcError::InvalidNode)?;
}
let mut userpass = split_url[0];
url = split_url[1].to_string();
// If there was additionally a protocol string, restore that to the daemon URL
if userpass.contains("://") {
let split_userpass = userpass.split("://").collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
url = split_userpass[0].to_string() + "://" + &url;
userpass = split_userpass[1];
}
let split_userpass = userpass.split(':').collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
} else {
None
};
Ok(Rpc(HttpRpc { client: Client::new(), userpass, url }))
}
}
#[async_trait]
impl RpcConnection for HttpRpc {
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
if let Some((user, pass)) = &self.userpass {
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
// Only provide authentication if this daemon actually expects it
if let Some(header) = req.headers().get("www-authenticate") {
builder = builder.header(
"Authorization",
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
.map_err(|_| RpcError::InvalidNode)?
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
user,
pass,
"/".to_string() + route,
None,
))
.map_err(|_| RpcError::InvalidNode)?
.to_header_string(),
);
}
}
Ok(
builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?
.bytes()
.await
.map_err(|_| RpcError::ConnectionError)?
.slice(..)
.to_vec(),
)
}
}

View File

@@ -1,23 +1,32 @@
use std::fmt::Debug;
use core::fmt::Debug;
#[cfg(not(feature = "std"))]
use alloc::boxed::Box;
use std_shims::{
vec::Vec,
io,
string::{String, ToString},
};
use async_trait::async_trait;
use thiserror::Error;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use serde::{Serialize, Deserialize, de::DeserializeOwned};
use serde_json::{Value, json};
use digest_auth::AuthContext;
use reqwest::Client;
use crate::{
Protocol,
serialize::*,
transaction::{Input, Timelock, Transaction},
block::Block,
wallet::Fee,
};
#[cfg(feature = "http_rpc")]
mod http;
#[cfg(feature = "http_rpc")]
pub use http::*;
#[derive(Deserialize, Debug)]
pub struct EmptyResponse {}
#[derive(Deserialize, Debug)]
@@ -38,23 +47,24 @@ struct TransactionsResponse {
txs: Vec<TransactionResponse>,
}
#[derive(Clone, PartialEq, Eq, Debug, Error)]
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum RpcError {
#[error("internal error ({0})")]
#[cfg_attr(feature = "std", error("internal error ({0})"))]
InternalError(&'static str),
#[error("connection error")]
#[cfg_attr(feature = "std", error("connection error"))]
ConnectionError,
#[error("invalid node")]
#[cfg_attr(feature = "std", error("invalid node"))]
InvalidNode,
#[error("unsupported protocol version ({0})")]
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
UnsupportedProtocol(usize),
#[error("transactions not found")]
#[cfg_attr(feature = "std", error("transactions not found"))]
TransactionsNotFound(Vec<[u8; 32]>),
#[error("invalid point ({0})")]
#[cfg_attr(feature = "std", error("invalid point ({0})"))]
InvalidPoint(String),
#[error("pruned transaction")]
#[cfg_attr(feature = "std", error("pruned transaction"))]
PrunedTransaction,
#[error("invalid transaction ({0:?})")]
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
InvalidTransaction([u8; 32]),
}
@@ -74,6 +84,23 @@ fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
}
// Read an EPEE VarInt, distinct from the VarInts used throughout the rest of the protocol
fn read_epee_vi<R: io::Read>(reader: &mut R) -> io::Result<u64> {
let vi_start = read_byte(reader)?;
let len = match vi_start & 0b11 {
0 => 1,
1 => 2,
2 => 4,
3 => 8,
_ => unreachable!(),
};
let mut vi = u64::from(vi_start >> 2);
for i in 1 .. len {
vi |= u64::from(read_byte(reader)?) << (((i - 1) * 8) + 6);
}
Ok(vi)
}
#[async_trait]
pub trait RpcConnection: Clone + Debug {
/// Perform a POST request to the specified route with the specified body.
@@ -82,91 +109,7 @@ pub trait RpcConnection: Clone + Debug {
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError>;
}
#[derive(Clone, Debug)]
pub struct HttpRpc {
client: Client,
userpass: Option<(String, String)>,
url: String,
}
impl HttpRpc {
/// Create a new HTTP(S) RPC connection.
///
/// A daemon requiring authentication can be used via including the username and password in the
/// URL.
pub fn new(mut url: String) -> Result<Rpc<HttpRpc>, RpcError> {
// Parse out the username and password
let userpass = if url.contains('@') {
let url_clone = url;
let split_url = url_clone.split('@').collect::<Vec<_>>();
if split_url.len() != 2 {
Err(RpcError::InvalidNode)?;
}
let mut userpass = split_url[0];
url = split_url[1].to_string();
// If there was additionally a protocol string, restore that to the daemon URL
if userpass.contains("://") {
let split_userpass = userpass.split("://").collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
url = split_userpass[0].to_string() + "://" + &url;
userpass = split_userpass[1];
}
let split_userpass = userpass.split(':').collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
} else {
None
};
Ok(Rpc(HttpRpc { client: Client::new(), userpass, url }))
}
}
#[async_trait]
impl RpcConnection for HttpRpc {
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
if let Some((user, pass)) = &self.userpass {
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
// Only provide authentication if this daemon actually expects it
if let Some(header) = req.headers().get("www-authenticate") {
builder = builder.header(
"Authorization",
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
.map_err(|_| RpcError::InvalidNode)?
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
user,
pass,
"/".to_string() + route,
None,
))
.map_err(|_| RpcError::InvalidNode)?
.to_header_string(),
);
}
}
Ok(
builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?
.bytes()
.await
.map_err(|_| RpcError::ConnectionError)?
.slice(..)
.to_vec(),
)
}
}
// TODO: Make this provided methods for RpcConnection?
#[derive(Clone, Debug)]
pub struct Rpc<R: RpcConnection>(R);
impl<R: RpcConnection> Rpc<R> {
@@ -179,10 +122,9 @@ impl<R: RpcConnection> Rpc<R> {
route: &str,
params: Option<Params>,
) -> Result<Response, RpcError> {
self
.call_tail(
route,
self
serde_json::from_str(
std_shims::str::from_utf8(
&self
.0
.post(
route,
@@ -194,7 +136,9 @@ impl<R: RpcConnection> Rpc<R> {
)
.await?,
)
.await
.map_err(|_| RpcError::InvalidNode)?,
)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response"))
}
/// Perform a JSON-RPC call with the specified method with the provided parameters
@@ -211,26 +155,8 @@ impl<R: RpcConnection> Rpc<R> {
}
/// Perform a binary call to the specified route with the provided parameters.
pub async fn bin_call<Response: DeserializeOwned + Debug>(
&self,
route: &str,
params: Vec<u8>,
) -> Result<Response, RpcError> {
self.call_tail(route, self.0.post(route, params).await?).await
}
async fn call_tail<Response: DeserializeOwned + Debug>(
&self,
route: &str,
res: Vec<u8>,
) -> Result<Response, RpcError> {
Ok(if !route.ends_with(".bin") {
serde_json::from_str(std::str::from_utf8(&res).map_err(|_| RpcError::InvalidNode)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response"))?
} else {
monero_epee_bin_serde::from_bytes(&res)
.map_err(|_| RpcError::InternalError("Failed to parse binary response"))?
})
pub async fn bin_call(&self, route: &str, params: Vec<u8>) -> Result<Vec<u8>, RpcError> {
self.0.post(route, params).await
}
/// Get the active blockchain protocol version.
@@ -391,6 +317,9 @@ impl<R: RpcConnection> Rpc<R> {
/// Get the output indexes of the specified transaction.
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
/*
TODO: Use these when a suitable epee serde lib exists
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32],
@@ -400,20 +329,125 @@ impl<R: RpcConnection> Rpc<R> {
#[derive(Deserialize, Debug)]
struct OIndexes {
o_indexes: Vec<u64>,
status: String,
untrusted: bool,
credits: usize,
top_hash: String,
}
*/
let indexes: OIndexes = self
.bin_call(
"get_o_indexes.bin",
monero_epee_bin_serde::to_bytes(&Request { txid: hash }).unwrap(),
)
.await?;
// Given the immaturity of Rust epee libraries, this is a homegrown one which is only validated
// to work against this specific function
Ok(indexes.o_indexes)
// Header for EPEE, an 8-byte magic and a version
const EPEE_HEADER: &[u8] = b"\x01\x11\x01\x01\x01\x01\x02\x01\x01";
let mut request = EPEE_HEADER.to_vec();
// Number of fields (shifted over 2 bits as the 2 LSBs are reserved for metadata)
request.push(1 << 2);
// Length of field name
request.push(4);
// Field name
request.extend(b"txid");
// Type of field
request.push(10);
// Length of string, since this byte array is technically a string
request.push(32 << 2);
// The "string"
request.extend(hash);
let indexes_buf = self.bin_call("get_o_indexes.bin", request).await?;
let mut indexes: &[u8] = indexes_buf.as_ref();
(|| {
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
Err(io::Error::new(io::ErrorKind::Other, "invalid header"))?;
}
let read_object = |reader: &mut &[u8]| {
let fields = read_byte(reader)? >> 2;
for _ in 0 .. fields {
let name_len = read_byte(reader)?;
let name = read_raw_vec(read_byte, name_len.into(), reader)?;
let type_with_array_flag = read_byte(reader)?;
let kind = type_with_array_flag & (!0x80);
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
if (&name == b"o_indexes") && (kind != 5) {
Err(io::Error::new(io::ErrorKind::Other, "o_indexes weren't u64s"))?;
}
let f = match kind {
// i64
1 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
// i32
2 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
// i16
3 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
// i8
4 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
// u64
5 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
// u32
6 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
// u16
7 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
// u8
8 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
// double
9 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
// string, or any collection of bytes
10 => |reader: &mut &[u8]| {
let len = read_epee_vi(reader)?;
read_raw_vec(
read_byte,
len
.try_into()
.map_err(|_| io::Error::new(io::ErrorKind::Other, "u64 length exceeded usize"))?,
reader,
)
},
// bool
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
// object, errors here as it shouldn't be used on this call
12 => |_: &mut &[u8]| {
Err(io::Error::new(
io::ErrorKind::Other,
"node used object in reply to get_o_indexes",
))
},
// array, so far unused
13 => |_: &mut &[u8]| {
Err(io::Error::new(io::ErrorKind::Other, "node used the unused array type"))
},
_ => {
|_: &mut &[u8]| Err(io::Error::new(io::ErrorKind::Other, "node used an invalid type"))
}
};
let mut res = vec![];
for _ in 0 .. iters {
res.push(f(reader)?);
}
let mut actual_res = Vec::with_capacity(res.len());
if &name == b"o_indexes" {
for o_index in res {
actual_res.push(u64::from_le_bytes(o_index.try_into().map_err(|_| {
io::Error::new(io::ErrorKind::Other, "node didn't provide 8 bytes for a u64")
})?));
}
return Ok(actual_res);
}
}
// Didn't return a response with o_indexes
// TODO: Check if this didn't have o_indexes because it's an error response
Err(io::Error::new(io::ErrorKind::Other, "response didn't contain o_indexes"))
};
read_object(&mut indexes)
})()
.map_err(|_| RpcError::InvalidNode)
}
/// Get the output distribution, from the specified height to the specified height (both

View File

@@ -1,5 +1,8 @@
use std::fmt::Debug;
use std::io::{self, Read, Write};
use core::fmt::Debug;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use curve25519_dalek::{
scalar::Scalar,

View File

@@ -1,5 +1,5 @@
use hex_literal::hex;
use rand::rngs::OsRng;
use rand_core::OsRng;
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
use multiexp::BatchVerifier;

View File

@@ -1,5 +1,8 @@
use core::cmp::Ordering;
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::Zeroize;
@@ -17,7 +20,7 @@ use crate::{
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Input {
Gen(u64),
ToKey { amount: u64, key_offsets: Vec<u64>, key_image: EdwardsPoint },
ToKey { amount: Option<u64>, key_offsets: Vec<u64>, key_image: EdwardsPoint },
}
impl Input {
@@ -37,7 +40,7 @@ impl Input {
Input::ToKey { amount, key_offsets, key_image } => {
w.write_all(&[2])?;
write_varint(amount, w)?;
write_varint(&amount.unwrap_or(0), w)?;
write_vec(write_varint, key_offsets, w)?;
write_point(key_image, w)
}
@@ -50,14 +53,18 @@ impl Input {
res
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Input> {
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Input> {
Ok(match read_byte(r)? {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_torsion_free_point(r)?,
},
2 => {
let amount = read_varint(r)?;
let amount = if (amount == 0) && interpret_as_rct { None } else { Some(amount) };
Input::ToKey {
amount,
key_offsets: read_vec(read_varint, r)?,
key_image: read_torsion_free_point(r)?,
}
}
_ => {
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
}
@@ -68,7 +75,7 @@ impl Input {
// Doesn't bother moving to an enum for the unused Script classes
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Output {
pub amount: u64,
pub amount: Option<u64>,
pub key: CompressedEdwardsY,
pub view_tag: Option<u8>,
}
@@ -79,7 +86,7 @@ impl Output {
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
write_varint(&self.amount, w)?;
write_varint(&self.amount.unwrap_or(0), w)?;
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
w.write_all(&self.key.to_bytes())?;
if let Some(view_tag) = self.view_tag {
@@ -94,8 +101,17 @@ impl Output {
res
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Output> {
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Output> {
let amount = read_varint(r)?;
let amount = if interpret_as_rct {
if amount != 0 {
Err(io::Error::new(io::ErrorKind::Other, "RCT TX output wasn't 0"))?;
}
None
} else {
Some(amount)
};
let view_tag = match read_byte(r)? {
2 => false,
3 => true,
@@ -191,11 +207,25 @@ impl TransactionPrefix {
}
pub fn read<R: Read>(r: &mut R) -> io::Result<TransactionPrefix> {
let version = read_varint(r)?;
// TODO: Create an enum out of version
if (version == 0) || (version > 2) {
Err(io::Error::new(io::ErrorKind::Other, "unrecognized transaction version"))?;
}
let timelock = Timelock::from_raw(read_varint(r)?);
let inputs = read_vec(|r| Input::read(version == 2, r), r)?;
if inputs.is_empty() {
Err(io::Error::new(io::ErrorKind::Other, "transaction had no inputs"))?;
}
let is_miner_tx = matches!(inputs[0], Input::Gen { .. });
let mut prefix = TransactionPrefix {
version: read_varint(r)?,
timelock: Timelock::from_raw(read_varint(r)?),
inputs: read_vec(Input::read, r)?,
outputs: read_vec(Output::read, r)?,
version,
timelock,
inputs,
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?,
extra: vec![],
};
prefix.extra = read_vec(read_byte, r)?;
@@ -277,10 +307,10 @@ impl Transaction {
.iter()
.map(|input| match input {
Input::Gen(..) => 0,
Input::ToKey { amount, .. } => *amount,
Input::ToKey { amount, .. } => amount.unwrap(),
})
.sum::<u64>()
.saturating_sub(prefix.outputs.iter().map(|output| output.amount).sum());
.saturating_sub(prefix.outputs.iter().map(|output| output.amount.unwrap()).sum());
} else if prefix.version == 2 {
rct_signatures = RctSignatures::read(
prefix

View File

@@ -1,7 +1,5 @@
use core::{marker::PhantomData, fmt::Debug};
use std::string::ToString;
use thiserror::Error;
use std_shims::string::{String, ToString};
use zeroize::Zeroize;
@@ -114,19 +112,20 @@ impl<B: AddressBytes> Zeroize for AddressMeta<B> {
}
/// Error when decoding an address.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum AddressError {
#[error("invalid address byte")]
#[cfg_attr(feature = "std", error("invalid address byte"))]
InvalidByte,
#[error("invalid address encoding")]
#[cfg_attr(feature = "std", error("invalid address encoding"))]
InvalidEncoding,
#[error("invalid length")]
#[cfg_attr(feature = "std", error("invalid length"))]
InvalidLength,
#[error("invalid key")]
#[cfg_attr(feature = "std", error("invalid key"))]
InvalidKey,
#[error("unknown features")]
#[cfg_attr(feature = "std", error("unknown features"))]
UnknownFeatures,
#[error("different network than expected")]
#[cfg_attr(feature = "std", error("different network than expected"))]
DifferentNetwork,
}

View File

@@ -1,13 +1,16 @@
use std::collections::HashSet;
use std_shims::{sync::OnceLock, vec::Vec, collections::HashSet};
use futures::lock::{Mutex, MutexGuard};
#[cfg(not(feature = "std"))]
use std_shims::sync::Mutex;
#[cfg(feature = "std")]
use futures::lock::Mutex;
use lazy_static::lazy_static;
use zeroize::{Zeroize, ZeroizeOnDrop};
use rand_core::{RngCore, CryptoRng};
use rand_distr::{Distribution, Gamma};
use zeroize::{Zeroize, ZeroizeOnDrop};
#[cfg(not(feature = "std"))]
use rand_distr::num_traits::Float;
use curve25519_dalek::edwards::EdwardsPoint;
@@ -23,18 +26,19 @@ const BLOCK_TIME: usize = 120;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
lazy_static! {
static ref GAMMA: Gamma<f64> = Gamma::new(19.28, 1.0 / 1.61).unwrap();
// TODO: Expose an API to reset this in case a reorg occurs/the RPC fails/returns garbage
// TODO: Update this when scanning a block, as possible
static ref DISTRIBUTION: Mutex<Vec<u64>> = Mutex::new(Vec::with_capacity(3000000));
// TODO: Expose an API to reset this in case a reorg occurs/the RPC fails/returns garbage
// TODO: Update this when scanning a block, as possible
static DISTRIBUTION_CELL: OnceLock<Mutex<Vec<u64>>> = OnceLock::new();
#[allow(non_snake_case)]
fn DISTRIBUTION() -> &'static Mutex<Vec<u64>> {
DISTRIBUTION_CELL.get_or_init(|| Mutex::new(Vec::with_capacity(3000000)))
}
#[allow(clippy::too_many_arguments)]
async fn select_n<'a, R: RngCore + CryptoRng, RPC: RpcConnection>(
rng: &mut R,
rpc: &Rpc<RPC>,
distribution: &MutexGuard<'a, Vec<u64>>,
distribution: &[u64],
height: usize,
high: u64,
per_second: f64,
@@ -60,7 +64,7 @@ async fn select_n<'a, R: RngCore + CryptoRng, RPC: RpcConnection>(
}
// Use a gamma distribution
let mut age = GAMMA.sample(rng).exp();
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
if age > TIP_APPLICATION {
age -= TIP_APPLICATION;
} else {
@@ -144,7 +148,10 @@ impl Decoys {
height: usize,
inputs: &[SpendableOutput],
) -> Result<Vec<Decoys>, RpcError> {
let mut distribution = DISTRIBUTION.lock().await;
#[cfg(not(feature = "std"))]
let mut distribution = DISTRIBUTION().lock();
#[cfg(feature = "std")]
let mut distribution = DISTRIBUTION().lock().await;
let decoy_count = ring_len - 1;

View File

@@ -1,5 +1,8 @@
use core::ops::BitXor;
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::Zeroize;

View File

@@ -1,5 +1,5 @@
use core::ops::Deref;
use std::collections::{HashSet, HashMap};
use std_shims::collections::{HashSet, HashMap};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
@@ -28,16 +28,16 @@ pub(crate) mod decoys;
pub(crate) use decoys::Decoys;
mod send;
pub use send::{
Fee, TransactionError, Change, SignableTransaction, SignableTransactionBuilder, Eventuality,
};
pub use send::{Fee, TransactionError, Change, SignableTransaction, Eventuality};
#[cfg(feature = "std")]
pub use send::SignableTransactionBuilder;
#[cfg(feature = "multisig")]
pub(crate) use send::InternalPayment;
#[cfg(feature = "multisig")]
pub use send::TransactionMachine;
use crate::ringct::EcdhInfo;
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> std::cmp::Ordering {
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
}

View File

@@ -1,5 +1,8 @@
use core::ops::Deref;
use std::io::{self, Read, Write};
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::{Zeroize, ZeroizeOnDrop};
@@ -282,6 +285,11 @@ impl<O: Clone + Zeroize> Timelocked<O> {
impl Scanner {
/// Scan a transaction to discover the received outputs.
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
// Only scan RCT TXs since we can only spend RCT outputs
if tx.prefix.version != 2 {
return Timelocked(tx.prefix.timelock, vec![]);
}
let extra = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref());
let extra = if let Ok(extra) = extra {
extra
@@ -367,8 +375,8 @@ impl Scanner {
let mut commitment = Commitment::zero();
// Miner transaction
if output.amount != 0 {
commitment.amount = output.amount;
if let Some(amount) = output.amount {
commitment.amount = amount;
// Regular transaction
} else {
let amount = match tx.rct_signatures.base.ecdh_info.get(o) {
@@ -450,10 +458,10 @@ impl Scanner {
tx.prefix
.outputs
.iter()
// Filter to miner TX outputs/0-amount outputs since we're tacking the 0-amount index
// This will fail to scan blocks containing pre-RingCT miner TXs
// Filter to v2 miner TX outputs/RCT outputs since we're tracking the RCT output index
.filter(|output| {
matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..))) || (output.amount == 0)
((tx.prefix.version == 2) && matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..)))) ||
output.amount.is_none()
})
.count(),
)

View File

@@ -1,7 +1,10 @@
use core::ops::Deref;
use std::collections::HashMap;
use lazy_static::lazy_static;
use std_shims::{
sync::OnceLock,
vec::Vec,
string::{String, ToString},
collections::HashMap,
};
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
@@ -47,28 +50,32 @@ impl WordList {
}
}
lazy_static! {
static ref LANGUAGES: HashMap<Language, WordList> = HashMap::from([
(Language::Chinese, WordList::new(include!("./classic/zh.rs"), 1)),
(Language::English, WordList::new(include!("./classic/en.rs"), 3)),
(Language::Dutch, WordList::new(include!("./classic/nl.rs"), 4)),
(Language::French, WordList::new(include!("./classic/fr.rs"), 4)),
(Language::Spanish, WordList::new(include!("./classic/es.rs"), 4)),
(Language::German, WordList::new(include!("./classic/de.rs"), 4)),
(Language::Italian, WordList::new(include!("./classic/it.rs"), 4)),
(Language::Portuguese, WordList::new(include!("./classic/pt.rs"), 4)),
(Language::Japanese, WordList::new(include!("./classic/ja.rs"), 3)),
(Language::Russian, WordList::new(include!("./classic/ru.rs"), 4)),
(Language::Esperanto, WordList::new(include!("./classic/eo.rs"), 4)),
(Language::Lojban, WordList::new(include!("./classic/jbo.rs"), 4)),
(Language::EnglishOld, WordList::new(include!("./classic/ang.rs"), 4)),
]);
static LANGUAGES_CELL: OnceLock<HashMap<Language, WordList>> = OnceLock::new();
#[allow(non_snake_case)]
fn LANGUAGES() -> &'static HashMap<Language, WordList> {
LANGUAGES_CELL.get_or_init(|| {
HashMap::from([
(Language::Chinese, WordList::new(include!("./classic/zh.rs"), 1)),
(Language::English, WordList::new(include!("./classic/en.rs"), 3)),
(Language::Dutch, WordList::new(include!("./classic/nl.rs"), 4)),
(Language::French, WordList::new(include!("./classic/fr.rs"), 4)),
(Language::Spanish, WordList::new(include!("./classic/es.rs"), 4)),
(Language::German, WordList::new(include!("./classic/de.rs"), 4)),
(Language::Italian, WordList::new(include!("./classic/it.rs"), 4)),
(Language::Portuguese, WordList::new(include!("./classic/pt.rs"), 4)),
(Language::Japanese, WordList::new(include!("./classic/ja.rs"), 3)),
(Language::Russian, WordList::new(include!("./classic/ru.rs"), 4)),
(Language::Esperanto, WordList::new(include!("./classic/eo.rs"), 4)),
(Language::Lojban, WordList::new(include!("./classic/jbo.rs"), 4)),
(Language::EnglishOld, WordList::new(include!("./classic/ang.rs"), 4)),
])
})
}
#[cfg(test)]
pub(crate) fn trim_by_lang(word: &str, lang: Language) -> String {
if lang != Language::EnglishOld {
word.chars().take(LANGUAGES[&lang].unique_prefix_length).collect()
word.chars().take(LANGUAGES()[&lang].unique_prefix_length).collect()
} else {
word.to_string()
}
@@ -92,7 +99,7 @@ fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> ClassicSeed {
let bytes = Zeroizing::new(key.to_bytes());
// get the language words
let words = &LANGUAGES[&lang].word_list;
let words = &LANGUAGES()[&lang].word_list;
let list_len = u64::try_from(words.len()).unwrap();
// To store the found words & add the checksum word later.
@@ -126,7 +133,7 @@ fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> ClassicSeed {
// create a checksum word for all languages except old english
if lang != Language::EnglishOld {
let checksum = seed[checksum_index(&seed, &LANGUAGES[&lang])].clone();
let checksum = seed[checksum_index(&seed, &LANGUAGES()[&lang])].clone();
seed.push(checksum);
}
@@ -154,7 +161,7 @@ pub(crate) fn seed_to_bytes(words: &str) -> Result<(Language, Zeroizing<[u8; 32]
let mut matched_indices = Zeroizing::new(vec![]);
// Iterate through all the languages
'language: for (lang_name, lang) in LANGUAGES.iter() {
'language: for (lang_name, lang) in LANGUAGES().iter() {
matched_indices.zeroize();
matched_indices.clear();

View File

@@ -1,25 +1,25 @@
use core::fmt;
use std_shims::string::String;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use thiserror::Error;
pub(crate) mod classic;
use classic::{CLASSIC_SEED_LENGTH, CLASSIC_SEED_LENGTH_WITH_CHECKSUM, ClassicSeed};
/// Error when decoding a seed.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum SeedError {
#[error("invalid number of words in seed")]
#[cfg_attr(feature = "std", error("invalid number of words in seed"))]
InvalidSeedLength,
#[error("unknown language")]
#[cfg_attr(feature = "std", error("unknown language"))]
UnknownLanguage,
#[error("invalid checksum")]
#[cfg_attr(feature = "std", error("invalid checksum"))]
InvalidChecksum,
#[error("english old seeds don't support checksums")]
#[cfg_attr(feature = "std", error("english old seeds don't support checksums"))]
EnglishOldWithChecksum,
#[error("invalid seed")]
#[cfg_attr(feature = "std", error("invalid seed"))]
InvalidSeed,
}

View File

@@ -1,7 +1,9 @@
use core::{ops::Deref, fmt};
use std::io;
use thiserror::Error;
use std_shims::{
vec::Vec,
io,
string::{String, ToString},
};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;
@@ -42,7 +44,9 @@ use crate::{
},
};
#[cfg(feature = "std")]
mod builder;
#[cfg(feature = "std")]
pub use builder::SignableTransactionBuilder;
#[cfg(feature = "multisig")]
@@ -117,34 +121,35 @@ impl SendOutput {
}
}
#[derive(Clone, PartialEq, Eq, Debug, Error)]
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum TransactionError {
#[error("multiple addresses with payment IDs")]
#[cfg_attr(feature = "std", error("multiple addresses with payment IDs"))]
MultiplePaymentIds,
#[error("no inputs")]
#[cfg_attr(feature = "std", error("no inputs"))]
NoInputs,
#[error("no outputs")]
#[cfg_attr(feature = "std", error("no outputs"))]
NoOutputs,
#[error("only one output and no change address")]
#[cfg_attr(feature = "std", error("only one output and no change address"))]
NoChange,
#[error("too many outputs")]
#[cfg_attr(feature = "std", error("too many outputs"))]
TooManyOutputs,
#[error("too much data")]
#[cfg_attr(feature = "std", error("too much data"))]
TooMuchData,
#[error("too many inputs/too much arbitrary data")]
#[cfg_attr(feature = "std", error("too many inputs/too much arbitrary data"))]
TooLargeTransaction,
#[error("not enough funds (in {0}, out {1})")]
#[cfg_attr(feature = "std", error("not enough funds (in {0}, out {1})"))]
NotEnoughFunds(u64, u64),
#[error("wrong spend private key")]
#[cfg_attr(feature = "std", error("wrong spend private key"))]
WrongPrivateKey,
#[error("rpc error ({0})")]
#[cfg_attr(feature = "std", error("rpc error ({0})"))]
RpcError(RpcError),
#[error("clsag error ({0})")]
#[cfg_attr(feature = "std", error("clsag error ({0})"))]
ClsagError(ClsagError),
#[error("invalid transaction ({0})")]
#[cfg_attr(feature = "std", error("invalid transaction ({0})"))]
InvalidTransaction(RpcError),
#[cfg(feature = "multisig")]
#[error("frost error {0}")]
#[cfg_attr(feature = "std", error("frost error {0}"))]
FrostError(FrostError),
}
@@ -180,7 +185,7 @@ async fn prepare_inputs<R: RngCore + CryptoRng, RPC: RpcConnection>(
));
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
amount: None,
key_offsets: decoys[i].offsets.clone(),
key_image: signable[i].1,
});
@@ -629,7 +634,7 @@ impl SignableTransaction {
for output in &outputs {
fee -= output.commitment.amount;
tx_outputs.push(Output {
amount: 0,
amount: None,
key: output.dest.compress(),
view_tag: Some(output.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
});
@@ -688,7 +693,7 @@ impl SignableTransaction {
uniqueness(
&images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.map(|image| Input::ToKey { amount: None, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
);
@@ -750,7 +755,7 @@ impl Eventuality {
for (o, (expected, actual)) in outputs.iter().zip(tx.prefix.outputs.iter()).enumerate() {
// Verify the output, commitment, and encrypted amount.
if (&Output {
amount: 0,
amount: None,
key: expected.dest.compress(),
view_tag: Some(expected.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
} != actual) ||

View File

@@ -1,8 +1,9 @@
use std::{
use std_shims::{
vec::Vec,
io::{self, Read},
sync::{Arc, RwLock},
collections::HashMap,
};
use std::sync::{Arc, RwLock};
use zeroize::Zeroizing;
@@ -339,7 +340,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
uniqueness(
&sorted_images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.map(|image| Input::ToKey { amount: None, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
)
@@ -372,7 +373,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
}
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
amount: None,
key_offsets: value.2.offsets.clone(),
key_image: value.0,
});

View File

@@ -1,7 +1,5 @@
use core::ops::Deref;
use std::collections::HashSet;
use lazy_static::lazy_static;
use std_shims::{sync::OnceLock, collections::HashSet};
use zeroize::Zeroizing;
use rand_core::OsRng;
@@ -98,9 +96,7 @@ pub async fn rpc() -> Rpc<HttpRpc> {
rpc
}
lazy_static! {
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
}
pub static SEQUENTIAL: OnceLock<Mutex<()>> = OnceLock::new();
#[macro_export]
macro_rules! async_sequential {
@@ -108,7 +104,7 @@ macro_rules! async_sequential {
$(
#[tokio::test]
async fn $name() {
let guard = runner::SEQUENTIAL.lock().await;
let guard = runner::SEQUENTIAL.get_or_init(|| tokio::sync::Mutex::new(())).lock().await;
let local = tokio::task::LocalSet::new();
local.run_until(async move {
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {

View File

@@ -13,7 +13,8 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
hashbrown = "0.13"
spin = "0.9"
hashbrown = "0.14"
[features]
std = []

View File

@@ -53,7 +53,7 @@ mod shims {
impl Read for &[u8] {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let mut read = 0;
let mut read = buf.len();
if self.len() < buf.len() {
read = self.len();
}

View File

@@ -1,3 +1,5 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
@@ -6,9 +8,17 @@
#[macro_use]
pub extern crate alloc;
pub mod sync;
pub mod collections;
pub mod io;
pub mod vec {
#[cfg(not(feature = "std"))]
pub use alloc::vec::*;
#[cfg(feature = "std")]
pub use std::vec::*;
}
pub mod str {
#[cfg(not(feature = "std"))]
pub use alloc::str::*;
@@ -16,9 +26,9 @@ pub mod str {
pub use std::str::*;
}
pub mod vec {
pub mod string {
#[cfg(not(feature = "std"))]
pub use alloc::vec::*;
pub use alloc::string::*;
#[cfg(feature = "std")]
pub use std::vec::*;
pub use std::string::*;
}

View File

@@ -0,0 +1,71 @@
pub use core::sync::*;
mod mutex_shim {
#[cfg(feature = "std")]
pub use std::sync::*;
#[cfg(not(feature = "std"))]
pub use spin::*;
#[derive(Default, Debug)]
pub struct ShimMutex<T>(Mutex<T>);
impl<T> ShimMutex<T> {
pub const fn new(value: T) -> Self {
Self(Mutex::new(value))
}
pub fn lock(&self) -> MutexGuard<'_, T> {
#[cfg(feature = "std")]
let res = self.0.lock().unwrap();
#[cfg(not(feature = "std"))]
let res = self.0.lock();
res
}
}
}
pub use mutex_shim::{ShimMutex as Mutex, MutexGuard};
#[cfg(feature = "std")]
pub use std::sync::OnceLock;
#[cfg(not(feature = "std"))]
mod oncelock_shim {
use super::Mutex;
pub struct OnceLock<T>(Mutex<bool>, Option<T>);
impl<T> OnceLock<T> {
pub const fn new() -> OnceLock<T> {
OnceLock(Mutex::new(false), None)
}
// These return a distinct Option in case of None so another caller using get_or_init doesn't
// transform it from None to Some
pub fn get(&self) -> Option<&T> {
if !*self.0.lock() {
None
} else {
self.1.as_ref()
}
}
pub fn get_mut(&mut self) -> Option<&mut T> {
if !*self.0.lock() {
None
} else {
self.1.as_mut()
}
}
pub fn get_or_init<F: FnOnce() -> T>(&self, f: F) -> &T {
let mut lock = self.0.lock();
if !*lock {
unsafe {
(core::ptr::addr_of!(self.1) as *mut Option<_>).write_unaligned(Some(f()));
}
}
*lock = true;
drop(lock);
self.get().unwrap()
}
}
}
#[cfg(not(feature = "std"))]
pub use oncelock_shim::*;

View File

@@ -1,5 +1,5 @@
[package]
name = "coordinator"
name = "serai-coordinator"
version = "0.1.0"
description = "Serai coordinator to prepare batches and sign transactions"
license = "AGPL-3.0-only"
@@ -34,7 +34,7 @@ sp-application-crypto = { git = "https://github.com/serai-dex/substrate", defaul
serai-db = { path = "../common/db" }
processor-messages = { package = "processor-messages", path = "../processor/messages" }
processor-messages = { package = "serai-processor-messages", path = "../processor/messages" }
tributary = { package = "tributary-chain", path = "./tributary" }
serai-client = { path = "../substrate/client", features = ["serai"] }

View File

@@ -35,10 +35,11 @@ async fn sync_test() {
// Have the rest form a P2P net
let mut tributary_arcs = vec![];
let mut p2p_threads = vec![];
for (i, (p2p, tributary)) in tributaries.drain(..).enumerate() {
let tributary = Arc::new(RwLock::new(tributary));
tributary_arcs.push(tributary.clone());
tokio::spawn(handle_p2p(
let thread = tokio::spawn(handle_p2p(
Ristretto::generator() * *keys[i],
p2p,
Arc::new(RwLock::new(HashMap::from([(
@@ -46,6 +47,7 @@ async fn sync_test() {
ActiveTributary { spec: spec.clone(), tributary },
)]))),
));
p2p_threads.push(thread);
}
let tributaries = tributary_arcs;
@@ -112,21 +114,28 @@ async fn sync_test() {
assert!(syncer_tributary.read().await.tip().await != syncer_tip);
// Verify it's now participating in consensus
// Because only `t` validators are used in a commit, check several commits
// This should be biased in favor of the syncer since we're using the syncer's view of the commit
for _ in 0 .. 10 {
let syncer_tributary = syncer_tributary.read().await;
if syncer_tributary
.reader()
.parsed_commit(&syncer_tributary.tip().await)
.unwrap()
.validators
.iter()
.any(|signer| signer == &syncer_key.to_bytes())
{
return;
}
sleep(Duration::from_secs(block_time)).await;
// Because only `t` validators are used in a commit, take n - t nodes offline
// leaving only `t` nodes. Which should force it to participate in the consensus
// of next blocks.
let spares = usize::from(spec.n() - spec.t());
for thread in p2p_threads.iter().take(spares) {
thread.abort();
}
// wait for a block
sleep(Duration::from_secs(block_time)).await;
let syncer_tributary = syncer_tributary.read().await;
if syncer_tributary
.reader()
.parsed_commit(&syncer_tributary.tip().await)
.unwrap()
.validators
.iter()
.any(|signer| signer == &syncer_key.to_bytes())
{
return;
}
panic!("synced tributary didn't start participating in consensus");
}

View File

@@ -458,11 +458,9 @@ impl Transaction {
signed_ref.signer = Ristretto::generator() * key.deref();
signed_ref.nonce = nonce;
let sig_nonce = Zeroizing::new(<Ristretto as Ciphersuite>::F::random(rng));
signed(self).signature.R = <Ristretto as Ciphersuite>::generator() * sig_nonce.deref();
let sig_hash = self.sig_hash(genesis);
signed(self).signature = SchnorrSignature::<Ristretto>::sign(
key,
Zeroizing::new(<Ristretto as Ciphersuite>::F::random(rng)),
sig_hash,
);
signed(self).signature = SchnorrSignature::<Ristretto>::sign(key, sig_nonce, sig_hash);
}
}

View File

@@ -1,3 +1,4 @@
use core::ops::Deref;
use std::{io, collections::HashMap};
use zeroize::Zeroizing;
@@ -114,11 +115,9 @@ pub fn signed_transaction<R: RngCore + CryptoRng>(
let mut tx =
SignedTransaction(data, Signed { signer, nonce, signature: random_signed(rng).signature });
tx.1.signature = SchnorrSignature::sign(
key,
Zeroizing::new(<Ristretto as Ciphersuite>::F::random(rng)),
tx.sig_hash(genesis),
);
let sig_nonce = Zeroizing::new(<Ristretto as Ciphersuite>::F::random(rng));
tx.1.signature.R = Ristretto::generator() * sig_nonce.deref();
tx.1.signature = SchnorrSignature::sign(key, sig_nonce, tx.sig_hash(genesis));
let mut nonces = HashMap::from([(signer, nonce)]);
verify_transaction(&tx, genesis, &mut nonces).unwrap();

View File

@@ -98,10 +98,20 @@ pub trait Transaction: 'static + Send + Sync + Clone + Eq + Debug + ReadWrite {
/// Obtain the challenge for this transaction's signature.
///
/// Do not override this unless you know what you're doing.
///
/// Panics if called on non-signed transactions.
fn sig_hash(&self, genesis: [u8; 32]) -> <Ristretto as Ciphersuite>::F {
<Ristretto as Ciphersuite>::F::from_bytes_mod_order_wide(
&Blake2b512::digest([genesis, self.hash()].concat()).into(),
)
match self.kind() {
TransactionKind::Signed(Signed { signature, .. }) => {
<Ristretto as Ciphersuite>::F::from_bytes_mod_order_wide(
&Blake2b512::digest(
[genesis.as_ref(), &self.hash(), signature.R.to_bytes().as_ref()].concat(),
)
.into(),
)
}
_ => panic!("sig_hash called on non-signed transaction"),
}
}
}

View File

@@ -45,7 +45,7 @@ ff-group-tests = { version = "0.13", path = "../ff-group-tests" }
[features]
alloc = ["std-shims"]
std = ["std-shims/std"]
std = ["zeroize/std", "std-shims/std"]
dalek = ["sha2", "dalek-ff-group"]
ed25519 = ["dalek"]

View File

@@ -55,10 +55,13 @@ pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G
let mut repr = G::Repr::default();
r.read_exact(repr.as_mut())?;
let point = G::from_bytes(&repr);
if point.is_none().into() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "invalid point"))?;
let Some(point) = Option::<G>::from(point) else {
Err(std::io::Error::new(std::io::ErrorKind::Other, "invalid point"))?
};
if point.to_bytes().as_ref() != repr.as_ref() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "non-canonical point"))?;
}
Ok(point.unwrap())
Ok(point)
}
/// A pair of generators, one committing to values (primary), one blinding (alt), for an elliptic

View File

@@ -35,5 +35,5 @@ dalek-ff-group = { path = "../dalek-ff-group", version = "0.3" }
ciphersuite = { path = "../ciphersuite", version = "0.3", features = ["ed25519"] }
[features]
std = ["std-shims/std", "ciphersuite/std"]
std = ["std-shims/std", "ciphersuite/std", "multiexp/std"]
default = ["std"]

View File

@@ -45,17 +45,19 @@ default = "deny"
exceptions = [
{ allow = ["AGPL-3.0"], name = "ethereum-serai" },
{ allow = ["AGPL-3.0"], name = "processor-messages" },
{ allow = ["AGPL-3.0"], name = "processor" },
{ allow = ["AGPL-3.0"], name = "serai-message-queue" },
{ allow = ["AGPL-3.0"], name = "serai-processor-messages" },
{ allow = ["AGPL-3.0"], name = "serai-processor" },
{ allow = ["AGPL-3.0"], name = "tributary-chain" },
{ allow = ["AGPL-3.0"], name = "coordinator" },
{ allow = ["AGPL-3.0"], name = "serai-coordinator" },
{ allow = ["AGPL-3.0"], name = "tokens-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-tokens-pallet" },
{ allow = ["AGPL-3.0"], name = "in-instructions-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-in-instructions-pallet" },
{ allow = ["AGPL-3.0"], name = "validator-sets-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-validator-sets-pallet" },
{ allow = ["AGPL-3.0"], name = "serai-runtime" },
{ allow = ["AGPL-3.0"], name = "serai-node" },
@@ -83,4 +85,5 @@ allow-registry = ["https://github.com/rust-lang/crates.io-index"]
allow-git = [
"https://github.com/serai-dex/substrate-bip39",
"https://github.com/serai-dex/substrate",
"https://github.com/monero-rs/base58-monero",
]

38
message-queue/Cargo.toml Normal file
View File

@@ -0,0 +1,38 @@
[package]
name = "serai-message-queue"
version = "0.1.0"
description = "A message queue focused on safety"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/message-log"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
publish = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
# Macros
lazy_static = "1"
serde = { version = "1", features = ["derive"] }
# Encoders
hex = "0.4"
serde_json = "1"
# Cryptography
transcript = { package = "flexible-transcript", path = "../crypto/transcript", features = ["recommended"] }
ciphersuite = { path = "../crypto/ciphersuite", features = ["ristretto"] }
schnorr-signatures = { path = "../crypto/schnorr" }
# Application
log = "0.4"
tokio = { version = "1", features = ["full"] }
serai-db = { path = "../common/db" }
serai-primitives = { path = "../substrate/primitives" }
jsonrpsee = { version = "0.16", features = ["server"] }

15
message-queue/LICENSE Normal file
View File

@@ -0,0 +1,15 @@
AGPL-3.0-only license
Copyright (c) 2023 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

14
message-queue/README.md Normal file
View File

@@ -0,0 +1,14 @@
# Message Log
A message log for various services to communicate over.
Each message is checked to be of the claimed origin. Then, it's added to the
recipient's message queue. This queue is sequentially handled, FIFO, only
dropping messages once the recipient acknowledges it's been handled.
A client which publishes an event specifies its own ID for the publication. If
multiple publications with the same ID occur, they are assumed repeats and
dropped.
This library always panics as its error-cases should be unreachable, given its
intranet status.

2
message-queue/src/lib.rs Normal file
View File

@@ -0,0 +1,2 @@
mod messages;
pub use messages::*;

152
message-queue/src/main.rs Normal file
View File

@@ -0,0 +1,152 @@
use std::{
sync::{Arc, RwLock},
collections::HashMap,
};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use schnorr_signatures::SchnorrSignature;
use serai_primitives::NetworkId;
use jsonrpsee::{RpcModule, server::ServerBuilder};
mod messages;
use messages::*;
mod queue;
use queue::Queue;
lazy_static::lazy_static! {
static ref KEYS: Arc<RwLock<HashMap<Service, <Ristretto as Ciphersuite>::G>>> =
Arc::new(RwLock::new(HashMap::new()));
static ref QUEUES: Arc<RwLock<HashMap<Service, RwLock<Queue<serai_db::MemDb>>>>> =
Arc::new(RwLock::new(HashMap::new()));
}
// queue RPC method
fn queue_message(meta: Metadata, msg: Vec<u8>, sig: SchnorrSignature<Ristretto>) {
{
let from = (*KEYS).read().unwrap()[&meta.from];
assert!(sig.verify(from, message_challenge(from, &msg, sig.R)));
}
// Assert one, and only one of these, is the coordinator
assert!(matches!(meta.from, Service::Coordinator) ^ matches!(meta.to, Service::Coordinator));
// TODO: Verify the from_id hasn't been prior seen
// Queue it
(*QUEUES).read().unwrap()[&meta.to].write().unwrap().queue_message(QueuedMessage {
from: meta.from,
msg,
sig: sig.serialize(),
});
}
// get RPC method
fn get_next_message(
service: Service,
_expected: u64,
_signature: SchnorrSignature<Ristretto>,
) -> Option<QueuedMessage> {
// TODO: Verify the signature
// TODO: Verify the expected next message ID matches
let queue_outer = (*QUEUES).read().unwrap();
let queue = queue_outer[&service].read().unwrap();
let next = queue.last_acknowledged().map(|i| i + 1).unwrap_or(0);
queue.get_message(next)
}
// ack RPC method
fn ack_message(service: Service, id: u64, _signature: SchnorrSignature<Ristretto>) {
// TODO: Verify the signature
// Is it:
// The acknowledged message should be > last acknowledged OR
// The acknowledged message should be >=
// It's the first if we save messages as acknowledged before acknowledging them
// It's the second if we acknowledge messages before saving them as acknowledged
// TODO: Check only a proper message is being acked
(*QUEUES).read().unwrap()[&service].write().unwrap().ack_message(id)
}
#[tokio::main]
async fn main() {
// Open the DB
// TODO
let db = serai_db::MemDb::new();
let read_key = |str| {
let Ok(key) = std::env::var(str) else { None? };
let mut repr = <<Ristretto as Ciphersuite>::G as GroupEncoding>::Repr::default();
repr.as_mut().copy_from_slice(&hex::decode(key).unwrap());
Some(<Ristretto as Ciphersuite>::G::from_bytes(&repr).unwrap())
};
let register_service = |service, key| {
(*KEYS).write().unwrap().insert(service, key);
(*QUEUES).write().unwrap().insert(service, RwLock::new(Queue(db.clone(), service)));
};
// Make queues for each NetworkId, other than Serai
for network in [NetworkId::Bitcoin, NetworkId::Ethereum, NetworkId::Monero] {
// Use a match so we error if the list of NetworkIds changes
let Some(key) = read_key(match network {
NetworkId::Serai => unreachable!(),
NetworkId::Bitcoin => "BITCOIN_KEY",
NetworkId::Ethereum => "ETHEREUM_KEY",
NetworkId::Monero => "MONERO_KEY",
}) else { continue };
register_service(Service::Processor(network), key);
}
// And the coordinator's
register_service(Service::Coordinator, read_key("COORDINATOR_KEY").unwrap());
// Start server
let builder = ServerBuilder::new();
// TODO: Set max request/response size
let listen_on: &[std::net::SocketAddr] = &["0.0.0.0".parse().unwrap()];
let server = builder.build(listen_on).await.unwrap();
let mut module = RpcModule::new(());
module
.register_method("queue", |args, _| {
let args = args.parse::<(Metadata, Vec<u8>, Vec<u8>)>().unwrap();
queue_message(
args.0,
args.1,
SchnorrSignature::<Ristretto>::read(&mut args.2.as_slice()).unwrap(),
);
Ok(())
})
.unwrap();
module
.register_method("next", |args, _| {
let args = args.parse::<(Service, u64, Vec<u8>)>().unwrap();
get_next_message(
args.0,
args.1,
SchnorrSignature::<Ristretto>::read(&mut args.2.as_slice()).unwrap(),
);
Ok(())
})
.unwrap();
module
.register_method("ack", |args, _| {
let args = args.parse::<(Service, u64, Vec<u8>)>().unwrap();
ack_message(
args.0,
args.1,
SchnorrSignature::<Ristretto>::read(&mut args.2.as_slice()).unwrap(),
);
Ok(())
})
.unwrap();
server.start(module).unwrap();
}

View File

@@ -0,0 +1,40 @@
use transcript::{Transcript, RecommendedTranscript};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use serde::{Serialize, Deserialize};
use serai_primitives::NetworkId;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
pub enum Service {
Processor(NetworkId),
Coordinator,
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct QueuedMessage {
pub from: Service,
pub msg: Vec<u8>,
pub sig: Vec<u8>,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct Metadata {
pub from: Service,
pub to: Service,
pub from_id: u64,
}
pub fn message_challenge(
from: <Ristretto as Ciphersuite>::G,
msg: &[u8],
nonce: <Ristretto as Ciphersuite>::G,
) -> <Ristretto as Ciphersuite>::F {
let mut transcript = RecommendedTranscript::new(b"Serai Message Queue v0.1");
transcript.domain_separate(b"message");
transcript.append_message(b"from", from.to_bytes());
transcript.append_message(b"msg", msg);
transcript.domain_separate(b"signature");
transcript.append_message(b"nonce", nonce.to_bytes());
<Ristretto as Ciphersuite>::hash_to_F(b"challenge", &transcript.challenge(b"challenge"))
}

View File

@@ -0,0 +1,57 @@
use serai_db::{DbTxn, Db};
use crate::messages::*;
#[derive(Clone, Debug)]
pub(crate) struct Queue<D: Db>(pub(crate) D, pub(crate) Service);
impl<D: Db> Queue<D> {
fn key(domain: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
[&[u8::try_from(domain.len()).unwrap()], domain, key.as_ref()].concat()
}
fn message_count_key(&self) -> Vec<u8> {
Self::key(b"message_count", serde_json::to_vec(&self.1).unwrap())
}
pub(crate) fn message_count(&self) -> u64 {
self
.0
.get(self.message_count_key())
.map(|bytes| u64::from_le_bytes(bytes.try_into().unwrap()))
.unwrap_or(0)
}
fn last_acknowledged_key(&self) -> Vec<u8> {
Self::key(b"last_acknowledged", serde_json::to_vec(&self.1).unwrap())
}
pub(crate) fn last_acknowledged(&self) -> Option<u64> {
self
.0
.get(self.last_acknowledged_key())
.map(|bytes| u64::from_le_bytes(bytes.try_into().unwrap()))
}
fn message_key(&self, id: u64) -> Vec<u8> {
Self::key(b"message", serde_json::to_vec(&(self.1, id)).unwrap())
}
pub(crate) fn queue_message(&mut self, msg: QueuedMessage) {
let id = self.message_count();
let msg_key = self.message_key(id);
let msg_count_key = self.message_count_key();
let mut txn = self.0.txn();
txn.put(msg_key, serde_json::to_vec(&msg).unwrap());
txn.put(msg_count_key, (id + 1).to_le_bytes());
txn.commit();
}
pub(crate) fn get_message(&self, id: u64) -> Option<QueuedMessage> {
self.0.get(self.message_key(id)).map(|bytes| serde_json::from_slice(&bytes).unwrap())
}
pub(crate) fn ack_message(&mut self, id: u64) {
let ack_key = self.last_acknowledged_key();
let mut txn = self.0.txn();
txn.put(ack_key, id.to_le_bytes());
txn.commit();
}
}

View File

@@ -1,5 +1,5 @@
[package]
name = "processor"
name = "serai-processor"
version = "0.1.0"
description = "Multichain processor premised on canonicity to reach distributed consensus automatically"
license = "AGPL-3.0-only"
@@ -57,7 +57,10 @@ tokio = { version = "1", features = ["full"] }
serai-db = { path = "../common/db", default-features = false }
serai-client = { path = "../substrate/client", default-features = false }
messages = { package = "processor-messages", path = "./messages" }
messages = { package = "serai-processor-messages", path = "./messages" }
jsonrpsee = { version = "0.16", features = ["client"] }
message-queue = { package = "serai-message-queue", path = "../message-queue" }
[dev-dependencies]
futures = "0.3"

View File

@@ -1,5 +1,5 @@
[package]
name = "processor-messages"
name = "serai-processor-messages"
version = "0.1.0"
description = "Messages sent and received by the processor"
license = "AGPL-3.0-only"
@@ -22,6 +22,6 @@ bincode = "1"
dkg = { path = "../../crypto/dkg", features = ["serde"] }
serai-primitives = { path = "../../substrate/primitives" }
in-instructions-primitives = { path = "../../substrate/in-instructions/primitives" }
tokens-primitives = { path = "../../substrate/tokens/primitives" }
validator-sets-primitives = { path = "../../substrate/validator-sets/primitives" }
serai-in-instructions-primitives = { path = "../../substrate/in-instructions/primitives" }
serai-tokens-primitives = { path = "../../substrate/tokens/primitives" }
serai-validator-sets-primitives = { path = "../../substrate/validator-sets/primitives" }

View File

@@ -1,3 +1,4 @@
use sp_core::sr25519::Public;
use serai_runtime::{
primitives::{SeraiAddress, SubstrateAmount, Amount, Coin, Balance},
assets::{AssetDetails, AssetAccount},
@@ -42,14 +43,14 @@ impl Serai {
) -> Result<Amount, SeraiError> {
Ok(Amount(
self
.storage::<AssetAccount<SubstrateAmount, SubstrateAmount, ()>>(
.storage::<AssetAccount<SubstrateAmount, SubstrateAmount, (), Public>>(
"Assets",
"Account",
Some(vec![scale_value(coin), scale_value(address)]),
block,
)
.await?
.map(|account| account.balance)
.map(|account| account.balance())
.unwrap_or(0),
))
}

View File

@@ -1,5 +1,5 @@
[package]
name = "in-instructions-pallet"
name = "serai-in-instructions-pallet"
version = "0.1.0"
description = "Execute calls via In Instructions from unsigned transactions"
license = "AGPL-3.0-only"
@@ -24,10 +24,10 @@ frame-system = { git = "https://github.com/serai-dex/substrate", default-feature
frame-support = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-primitives = { path = "../../primitives", default-features = false }
in-instructions-primitives = { path = "../primitives", default-features = false }
in-instructions-primitives = { package = "serai-in-instructions-primitives", path = "../primitives", default-features = false }
tokens-pallet = { path = "../../tokens/pallet", default-features = false }
validator-sets-pallet = { path = "../../validator-sets/pallet", default-features = false }
tokens-pallet = { package = "serai-tokens-pallet", path = "../../tokens/pallet", default-features = false }
validator-sets-pallet = { package = "serai-validator-sets-pallet", path = "../../validator-sets/pallet", default-features = false }
[features]
std = [

View File

@@ -1,5 +1,5 @@
[package]
name = "in-instructions-primitives"
name = "serai-in-instructions-primitives"
version = "0.1.0"
description = "Serai instructions library, enabling encoding and decoding"
license = "MIT"
@@ -23,7 +23,7 @@ sp-std = { git = "https://github.com/serai-dex/substrate", default-features = fa
sp-runtime = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-primitives = { path = "../../primitives", default-features = false }
tokens-primitives = { path = "../../tokens/primitives", default-features = false }
tokens-primitives = { package = "serai-tokens-primitives", path = "../../tokens/primitives", default-features = false }
[features]
std = [

View File

@@ -144,7 +144,7 @@ pub fn new_partial(config: &Configuration) -> Result<PartialComponents, ServiceE
})
}
pub async fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceError> {
pub async fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {
let sc_service::PartialComponents {
client,
backend,
@@ -156,11 +156,15 @@ pub async fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceE
other: (block_import, babe_link, grandpa_link, shared_voter_state, mut telemetry),
} = new_partial(&config)?;
let publish_non_global_ips = config.network.allow_non_globals_in_dht;
let mut net_config = sc_network::config::FullNetworkConfiguration::new(&config.network);
let grandpa_protocol_name =
grandpa::protocol_standard_name(&client.block_hash(0).unwrap().unwrap(), &config.chain_spec);
net_config.add_notification_protocol(sc_consensus_grandpa::grandpa_peers_set_config(
grandpa_protocol_name.clone(),
));
let publish_non_global_ips = config.network.allow_non_globals_in_dht;
config.network.extra_sets.push(grandpa::grandpa_peers_set_config(grandpa_protocol_name.clone()));
let warp_sync = Arc::new(grandpa::warp_proof::NetworkProvider::new(
backend.clone(),
grandpa_link.shared_authority_set().clone(),
@@ -170,6 +174,7 @@ pub async fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceE
let (network, system_rpc_tx, tx_handler_controller, network_starter, sync_service) =
sc_service::build_network(sc_service::BuildNetworkParams {
config: &config,
net_config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
spawn_handle: task_manager.spawn_handle(),

View File

@@ -39,7 +39,7 @@ frame-support = { git = "https://github.com/serai-dex/substrate", default-featur
frame-executive = { git = "https://github.com/serai-dex/substrate", default-features = false }
frame-benchmarking = { git = "https://github.com/serai-dex/substrate", default-features = false, optional = true }
serai-primitives = { path = "..//primitives", default-features = false }
serai-primitives = { path = "../primitives", default-features = false }
pallet-timestamp = { git = "https://github.com/serai-dex/substrate", default-features = false }
@@ -47,10 +47,10 @@ pallet-balances = { git = "https://github.com/serai-dex/substrate", default-feat
pallet-assets = { git = "https://github.com/serai-dex/substrate", default-features = false }
pallet-transaction-payment = { git = "https://github.com/serai-dex/substrate", default-features = false }
tokens-pallet = { path = "../tokens/pallet", default-features = false }
in-instructions-pallet = { path = "../in-instructions/pallet", default-features = false }
tokens-pallet = { package = "serai-tokens-pallet", path = "../tokens/pallet", default-features = false }
in-instructions-pallet = { package = "serai-in-instructions-pallet", path = "../in-instructions/pallet", default-features = false }
validator-sets-pallet = { path = "../validator-sets/pallet", default-features = false }
validator-sets-pallet = { package = "serai-validator-sets-pallet", path = "../validator-sets/pallet", default-features = false }
pallet-session = { git = "https://github.com/serai-dex/substrate", default-features = false }
pallet-babe = { git = "https://github.com/serai-dex/substrate", default-features = false }
pallet-grandpa = { git = "https://github.com/serai-dex/substrate", default-features = false }

View File

@@ -1,5 +1,5 @@
[package]
name = "tokens-pallet"
name = "serai-tokens-pallet"
version = "0.1.0"
description = "Mint and burn Serai tokens"
license = "AGPL-3.0-only"
@@ -21,7 +21,7 @@ frame-support = { git = "https://github.com/serai-dex/substrate", default-featur
pallet-assets = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-primitives = { path = "../../primitives", default-features = false }
tokens-primitives = { path = "../primitives", default-features = false }
tokens-primitives = { package = "serai-tokens-primitives", path = "../primitives", default-features = false }
[features]
std = [

View File

@@ -1,5 +1,5 @@
[package]
name = "tokens-primitives"
name = "serai-tokens-primitives"
version = "0.1.0"
description = "Serai tokens primitives"
license = "MIT"

View File

@@ -1,5 +1,5 @@
[package]
name = "validator-sets-pallet"
name = "serai-validator-sets-pallet"
version = "0.1.0"
description = "Validator sets pallet"
license = "AGPL-3.0-only"
@@ -18,13 +18,14 @@ scale = { package = "parity-scale-codec", version = "3", default-features = fals
scale-info = { version = "2", default-features = false, features = ["derive"] }
sp-core = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-std = { git = "https://github.com/serai-dex/substrate", default-features = false }
sp-application-crypto = { git = "https://github.com/serai-dex/substrate", default-features = false }
frame-system = { git = "https://github.com/serai-dex/substrate", default-features = false }
frame-support = { git = "https://github.com/serai-dex/substrate", default-features = false }
serai-primitives = { path = "../../primitives", default-features = false }
validator-sets-primitives = { path = "../primitives", default-features = false }
validator-sets-primitives = { package = "serai-validator-sets-primitives", path = "../primitives", default-features = false }
[features]
std = [

View File

@@ -6,6 +6,7 @@ pub mod pallet {
use scale_info::TypeInfo;
use sp_core::sr25519::{Public, Signature};
use sp_std::vec::Vec;
use sp_application_crypto::RuntimePublic;
use frame_system::pallet_prelude::*;

View File

@@ -1,5 +1,5 @@
[package]
name = "validator-sets-primitives"
name = "serai-validator-sets-primitives"
version = "0.1.0"
description = "Primitives for validator sets"
license = "MIT"

View File

@@ -30,4 +30,5 @@ dkg = { path = "../../crypto/dkg", default-features = false }
# modular-frost = { path = "../../crypto/frost", default-features = false }
# frost-schnorrkel = { path = "../../crypto/schnorrkel", default-features = false }
monero-generators = { path = "../../coins/monero/generators", default-features = false, features = ["alloc"] }
monero-generators = { path = "../../coins/monero/generators", default-features = false }
monero-serai = { path = "../../coins/monero", default-features = false }