mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-12 22:19:26 +00:00
Compare commits
36 Commits
19422de231
...
22e411981a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
22e411981a | ||
|
|
11d48d0685 | ||
|
|
e4cc23b72d | ||
|
|
52d853c8ba | ||
|
|
9c33a711d7 | ||
|
|
a275023cfc | ||
|
|
258c02ff39 | ||
|
|
3655dc723f | ||
|
|
315d4fb356 | ||
|
|
2bc880e372 | ||
|
|
e9c1235b76 | ||
|
|
dc1b8dfccd | ||
|
|
d0201cf2e5 | ||
|
|
f3d20e60b3 | ||
|
|
dafba81b40 | ||
|
|
91f8ec53d9 | ||
|
|
fc9a4a08b8 | ||
|
|
45fadb21ac | ||
|
|
28619fbee1 | ||
|
|
bbe014c3a7 | ||
|
|
fb3fadb3d3 | ||
|
|
f481d20773 | ||
|
|
599b2dec8f | ||
|
|
435f1d9ae1 | ||
|
|
d7ecab605e | ||
|
|
805fea52ec | ||
|
|
48db06f901 | ||
|
|
e9d0a5e0ed | ||
|
|
44d05518aa | ||
|
|
23b433fe6c | ||
|
|
2e57168a97 | ||
|
|
5c6160c398 | ||
|
|
9eee1d971e | ||
|
|
e6300847d6 | ||
|
|
e0a3e7bea6 | ||
|
|
cbebaa1349 |
5
.github/workflows/monero-tests.yaml
vendored
5
.github/workflows/monero-tests.yaml
vendored
@@ -39,9 +39,6 @@ jobs:
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
|
||||
|
||||
# Doesn't run unit tests with features as the tests workflow will
|
||||
|
||||
@@ -65,7 +62,6 @@ jobs:
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
|
||||
|
||||
- name: Run Integration Tests
|
||||
# Don't run if the the tests workflow also will
|
||||
@@ -74,4 +70,3 @@ jobs:
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'
|
||||
|
||||
3
.github/workflows/networks-tests.yml
vendored
3
.github/workflows/networks-tests.yml
vendored
@@ -46,7 +46,4 @@ jobs:
|
||||
-p monero-simple-request-rpc \
|
||||
-p monero-address \
|
||||
-p monero-wallet \
|
||||
-p monero-seed \
|
||||
-p polyseed \
|
||||
-p monero-wallet-util \
|
||||
-p monero-serai-verify-chain
|
||||
|
||||
37
.github/workflows/pages.yml
vendored
37
.github/workflows/pages.yml
vendored
@@ -1,6 +1,7 @@
|
||||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2022 just-the-docs
|
||||
# Copyright (c) 2022-2024 Luke Parker
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -20,31 +21,21 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
|
||||
name: Deploy Jekyll site to Pages
|
||||
name: Deploy Rust docs and Jekyll site to Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
paths:
|
||||
- "docs/**"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow one concurrent deployment
|
||||
# Only allow one concurrent deployment
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: true
|
||||
@@ -53,9 +44,6 @@ jobs:
|
||||
# Build job
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
@@ -69,11 +57,24 @@ jobs:
|
||||
id: pages
|
||||
uses: actions/configure-pages@v3
|
||||
- name: Build with Jekyll
|
||||
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||
run: cd ${{ github.workspace }}/docs && bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||
env:
|
||||
JEKYLL_ENV: production
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
- name: Buld Rust docs
|
||||
run: |
|
||||
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c rust-docs
|
||||
RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --all-features
|
||||
mv target/doc docs/_site/rust
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v1
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: "docs/_site/"
|
||||
|
||||
@@ -87,4 +88,4 @@ jobs:
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v2
|
||||
uses: actions/deploy-pages@v4
|
||||
|
||||
96
Cargo.lock
generated
96
Cargo.lock
generated
@@ -2470,6 +2470,7 @@ dependencies = [
|
||||
"hex",
|
||||
"pasta_curves",
|
||||
"rand_core",
|
||||
"std-shims",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
@@ -2570,6 +2571,7 @@ dependencies = [
|
||||
"hex-literal",
|
||||
"rand_core",
|
||||
"rustversion",
|
||||
"std-shims",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
@@ -3293,6 +3295,7 @@ dependencies = [
|
||||
"flexible-transcript",
|
||||
"multiexp",
|
||||
"rand_core",
|
||||
"std-shims",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
@@ -3302,6 +3305,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"ciphersuite",
|
||||
"generalized-bulletproofs",
|
||||
"std-shims",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
@@ -3312,6 +3316,7 @@ dependencies = [
|
||||
"ciphersuite",
|
||||
"generalized-bulletproofs-circuit-abstraction",
|
||||
"generic-array 1.1.1",
|
||||
"std-shims",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5625,19 +5630,6 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "monero-seed"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"curve25519-dalek",
|
||||
"hex",
|
||||
"monero-primitives",
|
||||
"rand_core",
|
||||
"std-shims",
|
||||
"thiserror 2.0.9",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "monero-serai"
|
||||
version = "0.1.4-alpha"
|
||||
@@ -5712,21 +5704,6 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "monero-wallet-util"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"curve25519-dalek",
|
||||
"hex",
|
||||
"monero-seed",
|
||||
"monero-wallet",
|
||||
"polyseed",
|
||||
"rand_core",
|
||||
"std-shims",
|
||||
"thiserror 2.0.9",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multiaddr"
|
||||
version = "0.18.1"
|
||||
@@ -6473,17 +6450,6 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7924d1d0ad836f665c9065e26d016c673ece3993f30d340068b16f282afc1156"
|
||||
|
||||
[[package]]
|
||||
name = "password-hash"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
|
||||
dependencies = [
|
||||
"base64ct",
|
||||
"rand_core",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pasta_curves"
|
||||
version = "0.5.1"
|
||||
@@ -6528,9 +6494,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
|
||||
dependencies = [
|
||||
"digest 0.10.7",
|
||||
"hmac",
|
||||
"password-hash",
|
||||
"sha2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6653,20 +6616,6 @@ dependencies = [
|
||||
"universal-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "polyseed"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"hex",
|
||||
"pbkdf2 0.12.2",
|
||||
"rand_core",
|
||||
"sha3",
|
||||
"std-shims",
|
||||
"subtle",
|
||||
"thiserror 2.0.9",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "polyval"
|
||||
version = "0.6.2"
|
||||
@@ -8822,6 +8771,7 @@ dependencies = [
|
||||
"k256",
|
||||
"rand_core",
|
||||
"rustversion",
|
||||
"std-shims",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
@@ -9000,6 +8950,7 @@ dependencies = [
|
||||
"serai-coins-primitives",
|
||||
"serai-primitives",
|
||||
"sp-core",
|
||||
"sp-io",
|
||||
"sp-runtime",
|
||||
"sp-std",
|
||||
]
|
||||
@@ -9182,11 +9133,19 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"frame-support",
|
||||
"frame-system",
|
||||
"pallet-babe",
|
||||
"pallet-grandpa",
|
||||
"pallet-timestamp",
|
||||
"parity-scale-codec",
|
||||
"scale-info",
|
||||
"serai-coins-pallet",
|
||||
"serai-dex-pallet",
|
||||
"serai-primitives",
|
||||
"serai-validator-sets-pallet",
|
||||
"sp-consensus-babe",
|
||||
"sp-core",
|
||||
"sp-io",
|
||||
"sp-runtime",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -9325,10 +9284,14 @@ dependencies = [
|
||||
"bitvec",
|
||||
"frame-support",
|
||||
"frame-system",
|
||||
"pallet-babe",
|
||||
"pallet-grandpa",
|
||||
"pallet-timestamp",
|
||||
"parity-scale-codec",
|
||||
"scale-info",
|
||||
"serai-coins-pallet",
|
||||
"serai-dex-pallet",
|
||||
"serai-economic-security-pallet",
|
||||
"serai-emissions-pallet",
|
||||
"serai-genesis-liquidity-pallet",
|
||||
"serai-in-instructions-primitives",
|
||||
@@ -9432,19 +9395,27 @@ dependencies = [
|
||||
"dalek-ff-group",
|
||||
"dkg",
|
||||
"dleq",
|
||||
"ec-divisors",
|
||||
"embedwards25519",
|
||||
"flexible-transcript",
|
||||
"generalized-bulletproofs",
|
||||
"generalized-bulletproofs-circuit-abstraction",
|
||||
"generalized-bulletproofs-ec-gadgets",
|
||||
"minimal-ed448",
|
||||
"monero-wallet-util",
|
||||
"monero-wallet",
|
||||
"multiexp",
|
||||
"schnorr-signatures",
|
||||
"secq256k1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serai-node"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bitcoin-serai",
|
||||
"ciphersuite",
|
||||
"clap",
|
||||
"curve25519-dalek",
|
||||
"embedwards25519",
|
||||
"frame-benchmarking",
|
||||
"futures-util",
|
||||
@@ -9452,7 +9423,9 @@ dependencies = [
|
||||
"jsonrpsee",
|
||||
"libp2p 0.52.4",
|
||||
"log",
|
||||
"monero-address",
|
||||
"pallet-transaction-payment-rpc",
|
||||
"parity-scale-codec",
|
||||
"rand_core",
|
||||
"sc-authority-discovery",
|
||||
"sc-basic-authorship",
|
||||
@@ -9519,6 +9492,7 @@ dependencies = [
|
||||
"sp-core",
|
||||
"sp-io",
|
||||
"sp-runtime",
|
||||
"sp-std",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
@@ -9907,24 +9881,32 @@ name = "serai-validator-sets-pallet"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bitvec",
|
||||
"ciphersuite",
|
||||
"frame-support",
|
||||
"frame-system",
|
||||
"frost-schnorrkel",
|
||||
"modular-frost",
|
||||
"pallet-babe",
|
||||
"pallet-grandpa",
|
||||
"pallet-timestamp",
|
||||
"parity-scale-codec",
|
||||
"rand_core",
|
||||
"scale-info",
|
||||
"serai-coins-pallet",
|
||||
"serai-dex-pallet",
|
||||
"serai-primitives",
|
||||
"serai-validator-sets-primitives",
|
||||
"serde",
|
||||
"sp-api",
|
||||
"sp-application-crypto",
|
||||
"sp-consensus-babe",
|
||||
"sp-core",
|
||||
"sp-io",
|
||||
"sp-runtime",
|
||||
"sp-session",
|
||||
"sp-staking",
|
||||
"sp-std",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -64,9 +64,6 @@ members = [
|
||||
"networks/monero/rpc/simple-request",
|
||||
"networks/monero/wallet/address",
|
||||
"networks/monero/wallet",
|
||||
"networks/monero/wallet/seed",
|
||||
"networks/monero/wallet/polyseed",
|
||||
"networks/monero/wallet/util",
|
||||
"networks/monero/verify-chain",
|
||||
|
||||
"message-queue",
|
||||
@@ -208,9 +205,6 @@ matches = { path = "patches/matches" }
|
||||
option-ext = { path = "patches/option-ext" }
|
||||
directories-next = { path = "patches/directories-next" }
|
||||
|
||||
# The official pasta_curves repo doesn't support Zeroize
|
||||
pasta_curves = { git = "https://github.com/kayabaNerve/pasta_curves", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616" }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
unwrap_or_default = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -5,4 +5,4 @@ a full copy of the AGPL-3.0 License is included in the root of this repository
|
||||
as a reference text. This copy should be provided with any distribution of a
|
||||
crate licensed under the AGPL-3.0, as per its terms.
|
||||
|
||||
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
||||
The GitHub actions/workflows (`.github`) are licensed under the MIT license.
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::{sync::Arc, collections::HashMap};
|
||||
|
||||
use serai_client::{
|
||||
primitives::{SeraiAddress, Amount},
|
||||
validator_sets::primitives::ValidatorSet,
|
||||
validator_sets::primitives::ExternalValidatorSet,
|
||||
Serai,
|
||||
};
|
||||
|
||||
@@ -28,7 +28,7 @@ db_channel! {
|
||||
CosignIntendChannels {
|
||||
GlobalSessionsChannel: () -> ([u8; 32], GlobalSession),
|
||||
BlockEvents: () -> BlockEventData,
|
||||
IntendedCosigns: (set: ValidatorSet) -> CosignIntent,
|
||||
IntendedCosigns: (set: ExternalValidatorSet) -> CosignIntent,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,7 +110,7 @@ impl<D: Db> ContinuallyRan for CosignIntendTask<D> {
|
||||
keys.insert(set.network, SeraiAddress::from(*key));
|
||||
let stake = serai
|
||||
.validator_sets()
|
||||
.total_allocated_stake(set.network)
|
||||
.total_allocated_stake(set.network.into())
|
||||
.await
|
||||
.map_err(|e| format!("{e:?}"))?
|
||||
.unwrap_or(Amount(0))
|
||||
|
||||
@@ -11,8 +11,8 @@ use scale::{Encode, Decode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{
|
||||
primitives::{NetworkId, SeraiAddress},
|
||||
validator_sets::primitives::{Session, ValidatorSet, KeyPair},
|
||||
primitives::{ExternalNetworkId, SeraiAddress},
|
||||
validator_sets::primitives::{Session, ExternalValidatorSet, KeyPair},
|
||||
Public, Block, Serai, TemporalSerai,
|
||||
};
|
||||
|
||||
@@ -52,13 +52,13 @@ pub const COSIGN_CONTEXT: &[u8] = b"/serai/coordinator/cosign";
|
||||
#[derive(Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub(crate) struct GlobalSession {
|
||||
pub(crate) start_block_number: u64,
|
||||
pub(crate) sets: Vec<ValidatorSet>,
|
||||
pub(crate) keys: HashMap<NetworkId, SeraiAddress>,
|
||||
pub(crate) stakes: HashMap<NetworkId, u64>,
|
||||
pub(crate) sets: Vec<ExternalValidatorSet>,
|
||||
pub(crate) keys: HashMap<ExternalNetworkId, SeraiAddress>,
|
||||
pub(crate) stakes: HashMap<ExternalNetworkId, u64>,
|
||||
pub(crate) total_stake: u64,
|
||||
}
|
||||
impl GlobalSession {
|
||||
fn id(mut cosigners: Vec<ValidatorSet>) -> [u8; 32] {
|
||||
fn id(mut cosigners: Vec<ExternalValidatorSet>) -> [u8; 32] {
|
||||
cosigners.sort_by_key(|a| borsh::to_vec(a).unwrap());
|
||||
Blake2s256::digest(borsh::to_vec(&cosigners).unwrap()).into()
|
||||
}
|
||||
@@ -101,12 +101,12 @@ pub struct Cosign {
|
||||
/// The hash of the block to cosign.
|
||||
pub block_hash: [u8; 32],
|
||||
/// The actual cosigner.
|
||||
pub cosigner: NetworkId,
|
||||
pub cosigner: ExternalNetworkId,
|
||||
}
|
||||
|
||||
impl CosignIntent {
|
||||
/// Convert this into a `Cosign`.
|
||||
pub fn into_cosign(self, cosigner: NetworkId) -> Cosign {
|
||||
pub fn into_cosign(self, cosigner: ExternalNetworkId) -> Cosign {
|
||||
let CosignIntent { global_session, block_number, block_hash, notable: _ } = self;
|
||||
Cosign { global_session, block_number, block_hash, cosigner }
|
||||
}
|
||||
@@ -166,7 +166,10 @@ create_db! {
|
||||
// one notable block. All validator sets will explicitly produce a cosign for their notable
|
||||
// block, causing the latest cosigned block for a global session to either be the global
|
||||
// session's notable cosigns or the network's latest cosigns.
|
||||
NetworksLatestCosignedBlock: (global_session: [u8; 32], network: NetworkId) -> SignedCosign,
|
||||
NetworksLatestCosignedBlock: (
|
||||
global_session: [u8; 32],
|
||||
network: ExternalNetworkId
|
||||
) -> SignedCosign,
|
||||
// Cosigns received for blocks not locally recognized as finalized.
|
||||
Faults: (global_session: [u8; 32]) -> Vec<SignedCosign>,
|
||||
// The global session which faulted.
|
||||
@@ -177,15 +180,10 @@ create_db! {
|
||||
/// Fetch the keys used for cosigning by a specific network.
|
||||
async fn keys_for_network(
|
||||
serai: &TemporalSerai<'_>,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
) -> Result<Option<(Session, KeyPair)>, String> {
|
||||
// The Serai network never cosigns so it has no keys for cosigning
|
||||
if network == NetworkId::Serai {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let Some(latest_session) =
|
||||
serai.validator_sets().session(network).await.map_err(|e| format!("{e:?}"))?
|
||||
serai.validator_sets().session(network.into()).await.map_err(|e| format!("{e:?}"))?
|
||||
else {
|
||||
// If this network hasn't had a session declared, move on
|
||||
return Ok(None);
|
||||
@@ -194,7 +192,7 @@ async fn keys_for_network(
|
||||
// Get the keys for the latest session
|
||||
if let Some(keys) = serai
|
||||
.validator_sets()
|
||||
.keys(ValidatorSet { network, session: latest_session })
|
||||
.keys(ExternalValidatorSet { network, session: latest_session })
|
||||
.await
|
||||
.map_err(|e| format!("{e:?}"))?
|
||||
{
|
||||
@@ -205,7 +203,7 @@ async fn keys_for_network(
|
||||
if let Some(prior_session) = latest_session.0.checked_sub(1).map(Session) {
|
||||
if let Some(keys) = serai
|
||||
.validator_sets()
|
||||
.keys(ValidatorSet { network, session: prior_session })
|
||||
.keys(ExternalValidatorSet { network, session: prior_session })
|
||||
.await
|
||||
.map_err(|e| format!("{e:?}"))?
|
||||
{
|
||||
@@ -216,16 +214,19 @@ async fn keys_for_network(
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Fetch the `ValidatorSet`s, and their associated keys, used for cosigning as of this block.
|
||||
async fn cosigning_sets(serai: &TemporalSerai<'_>) -> Result<Vec<(ValidatorSet, Public)>, String> {
|
||||
let mut sets = Vec::with_capacity(serai_client::primitives::NETWORKS.len());
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
/// Fetch the `ExternalValidatorSet`s, and their associated keys, used for cosigning as of this
|
||||
/// block.
|
||||
async fn cosigning_sets(
|
||||
serai: &TemporalSerai<'_>,
|
||||
) -> Result<Vec<(ExternalValidatorSet, Public)>, String> {
|
||||
let mut sets = Vec::with_capacity(serai_client::primitives::EXTERNAL_NETWORKS.len());
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let Some((session, keys)) = keys_for_network(serai, network).await? else {
|
||||
// If this network doesn't have usable keys, move on
|
||||
continue;
|
||||
};
|
||||
|
||||
sets.push((ValidatorSet { network, session }, keys.0));
|
||||
sets.push((ExternalValidatorSet { network, session }, keys.0));
|
||||
}
|
||||
Ok(sets)
|
||||
}
|
||||
@@ -345,8 +346,8 @@ impl<D: Db> Cosigning<D> {
|
||||
/// If this global session hasn't produced any notable cosigns, this will return the latest
|
||||
/// cosigns for this session.
|
||||
pub fn notable_cosigns(getter: &impl Get, global_session: [u8; 32]) -> Vec<SignedCosign> {
|
||||
let mut cosigns = Vec::with_capacity(serai_client::primitives::NETWORKS.len());
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
let mut cosigns = Vec::with_capacity(serai_client::primitives::EXTERNAL_NETWORKS.len());
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
if let Some(cosign) = NetworksLatestCosignedBlock::get(getter, global_session, network) {
|
||||
cosigns.push(cosign);
|
||||
}
|
||||
@@ -363,7 +364,7 @@ impl<D: Db> Cosigning<D> {
|
||||
let mut cosigns = Faults::get(&self.db, faulted).expect("faulted with no faults");
|
||||
// Also include all of our recognized-as-honest cosigns in an attempt to induce fault
|
||||
// identification in those who see the faulty cosigns as honest
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
if let Some(cosign) = NetworksLatestCosignedBlock::get(&self.db, faulted, network) {
|
||||
if cosign.cosign.global_session == faulted {
|
||||
cosigns.push(cosign);
|
||||
@@ -375,8 +376,8 @@ impl<D: Db> Cosigning<D> {
|
||||
let Some(global_session) = evaluator::currently_evaluated_global_session(&self.db) else {
|
||||
return vec![];
|
||||
};
|
||||
let mut cosigns = Vec::with_capacity(serai_client::primitives::NETWORKS.len());
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
let mut cosigns = Vec::with_capacity(serai_client::primitives::EXTERNAL_NETWORKS.len());
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
if let Some(cosign) = NetworksLatestCosignedBlock::get(&self.db, global_session, network) {
|
||||
cosigns.push(cosign);
|
||||
}
|
||||
@@ -487,12 +488,12 @@ impl<D: Db> Cosigning<D> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Receive intended cosigns to produce for this ValidatorSet.
|
||||
/// Receive intended cosigns to produce for this ExternalValidatorSet.
|
||||
///
|
||||
/// All cosigns intended, up to and including the next notable cosign, are returned.
|
||||
///
|
||||
/// This will drain the internal channel and not re-yield these intentions again.
|
||||
pub fn intended_cosigns(txn: &mut impl DbTxn, set: ValidatorSet) -> Vec<CosignIntent> {
|
||||
pub fn intended_cosigns(txn: &mut impl DbTxn, set: ExternalValidatorSet) -> Vec<CosignIntent> {
|
||||
let mut res: Vec<CosignIntent> = vec![];
|
||||
// While we have yet to find a notable cosign...
|
||||
while !res.last().map(|cosign| cosign.notable).unwrap_or(false) {
|
||||
|
||||
@@ -14,8 +14,8 @@ use zeroize::Zeroizing;
|
||||
use schnorrkel::Keypair;
|
||||
|
||||
use serai_client::{
|
||||
primitives::{NetworkId, PublicKey},
|
||||
validator_sets::primitives::ValidatorSet,
|
||||
primitives::{ExternalNetworkId, PublicKey},
|
||||
validator_sets::primitives::ExternalValidatorSet,
|
||||
Serai,
|
||||
};
|
||||
|
||||
@@ -104,7 +104,7 @@ impl serai_coordinator_p2p::Peer<'_> for Peer<'_> {
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Peers {
|
||||
peers: Arc<RwLock<HashMap<NetworkId, HashSet<PeerId>>>>,
|
||||
peers: Arc<RwLock<HashMap<ExternalNetworkId, HashSet<PeerId>>>>,
|
||||
}
|
||||
|
||||
// Consider adding identify/kad/autonat/rendevous/(relay + dcutr). While we currently use the Serai
|
||||
@@ -135,7 +135,8 @@ struct Libp2pInner {
|
||||
signed_cosigns: Mutex<mpsc::UnboundedReceiver<SignedCosign>>,
|
||||
signed_cosigns_send: mpsc::UnboundedSender<SignedCosign>,
|
||||
|
||||
heartbeat_requests: Mutex<mpsc::UnboundedReceiver<(InboundRequestId, ValidatorSet, [u8; 32])>>,
|
||||
heartbeat_requests:
|
||||
Mutex<mpsc::UnboundedReceiver<(InboundRequestId, ExternalValidatorSet, [u8; 32])>>,
|
||||
notable_cosign_requests: Mutex<mpsc::UnboundedReceiver<(InboundRequestId, [u8; 32])>>,
|
||||
inbound_request_responses: mpsc::UnboundedSender<(InboundRequestId, Response)>,
|
||||
}
|
||||
@@ -312,7 +313,7 @@ impl serai_cosign::RequestNotableCosigns for Libp2p {
|
||||
impl serai_coordinator_p2p::P2p for Libp2p {
|
||||
type Peer<'a> = Peer<'a>;
|
||||
|
||||
fn peers(&self, network: NetworkId) -> impl Send + Future<Output = Vec<Self::Peer<'_>>> {
|
||||
fn peers(&self, network: ExternalNetworkId) -> impl Send + Future<Output = Vec<Self::Peer<'_>>> {
|
||||
async move {
|
||||
let Some(peer_ids) = self.0.peers.peers.read().await.get(&network).cloned() else {
|
||||
return vec![];
|
||||
|
||||
@@ -6,7 +6,7 @@ use std::{
|
||||
|
||||
use borsh::BorshDeserialize;
|
||||
|
||||
use serai_client::validator_sets::primitives::ValidatorSet;
|
||||
use serai_client::validator_sets::primitives::ExternalValidatorSet;
|
||||
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
|
||||
@@ -68,7 +68,7 @@ pub(crate) struct SwarmTask {
|
||||
outbound_request_responses: HashMap<OutboundRequestId, oneshot::Sender<Response>>,
|
||||
|
||||
inbound_request_response_channels: HashMap<InboundRequestId, ResponseChannel<Response>>,
|
||||
heartbeat_requests: mpsc::UnboundedSender<(InboundRequestId, ValidatorSet, [u8; 32])>,
|
||||
heartbeat_requests: mpsc::UnboundedSender<(InboundRequestId, ExternalValidatorSet, [u8; 32])>,
|
||||
notable_cosign_requests: mpsc::UnboundedSender<(InboundRequestId, [u8; 32])>,
|
||||
inbound_request_responses: mpsc::UnboundedReceiver<(InboundRequestId, Response)>,
|
||||
}
|
||||
@@ -324,7 +324,7 @@ impl SwarmTask {
|
||||
|
||||
outbound_requests: mpsc::UnboundedReceiver<(PeerId, Request, oneshot::Sender<Response>)>,
|
||||
|
||||
heartbeat_requests: mpsc::UnboundedSender<(InboundRequestId, ValidatorSet, [u8; 32])>,
|
||||
heartbeat_requests: mpsc::UnboundedSender<(InboundRequestId, ExternalValidatorSet, [u8; 32])>,
|
||||
notable_cosign_requests: mpsc::UnboundedSender<(InboundRequestId, [u8; 32])>,
|
||||
inbound_request_responses: mpsc::UnboundedReceiver<(InboundRequestId, Response)>,
|
||||
) {
|
||||
|
||||
@@ -4,7 +4,9 @@ use std::{
|
||||
collections::{HashSet, HashMap},
|
||||
};
|
||||
|
||||
use serai_client::{primitives::NetworkId, validator_sets::primitives::Session, SeraiError, Serai};
|
||||
use serai_client::{
|
||||
primitives::ExternalNetworkId, validator_sets::primitives::Session, SeraiError, Serai,
|
||||
};
|
||||
|
||||
use serai_task::{Task, ContinuallyRan};
|
||||
|
||||
@@ -24,11 +26,11 @@ pub(crate) struct Validators {
|
||||
serai: Arc<Serai>,
|
||||
|
||||
// A cache for which session we're populated with the validators of
|
||||
sessions: HashMap<NetworkId, Session>,
|
||||
sessions: HashMap<ExternalNetworkId, Session>,
|
||||
// The validators by network
|
||||
by_network: HashMap<NetworkId, HashSet<PeerId>>,
|
||||
by_network: HashMap<ExternalNetworkId, HashSet<PeerId>>,
|
||||
// The validators and their networks
|
||||
validators: HashMap<PeerId, HashSet<NetworkId>>,
|
||||
validators: HashMap<PeerId, HashSet<ExternalNetworkId>>,
|
||||
|
||||
// The channel to send the changes down
|
||||
changes: mpsc::UnboundedSender<Changes>,
|
||||
@@ -49,8 +51,8 @@ impl Validators {
|
||||
|
||||
async fn session_changes(
|
||||
serai: impl Borrow<Serai>,
|
||||
sessions: impl Borrow<HashMap<NetworkId, Session>>,
|
||||
) -> Result<Vec<(NetworkId, Session, HashSet<PeerId>)>, SeraiError> {
|
||||
sessions: impl Borrow<HashMap<ExternalNetworkId, Session>>,
|
||||
) -> Result<Vec<(ExternalNetworkId, Session, HashSet<PeerId>)>, SeraiError> {
|
||||
/*
|
||||
This uses the latest finalized block, not the latest cosigned block, which should be fine as
|
||||
in the worst case, we'd connect to unexpected validators. They still shouldn't be able to
|
||||
@@ -67,13 +69,10 @@ impl Validators {
|
||||
// FuturesUnordered can be bad practice as it'll cause timeouts if infrequently polled, but
|
||||
// we poll it till it yields all futures with the most minimal processing possible
|
||||
let mut futures = FuturesUnordered::new();
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let sessions = sessions.borrow();
|
||||
futures.push(async move {
|
||||
let session = match temporal_serai.session(network).await {
|
||||
let session = match temporal_serai.session(network.into()).await {
|
||||
Ok(Some(session)) => session,
|
||||
Ok(None) => return Ok(None),
|
||||
Err(e) => return Err(e),
|
||||
@@ -82,7 +81,7 @@ impl Validators {
|
||||
if sessions.get(&network) == Some(&session) {
|
||||
Ok(None)
|
||||
} else {
|
||||
match temporal_serai.active_network_validators(network).await {
|
||||
match temporal_serai.active_network_validators(network.into()).await {
|
||||
Ok(validators) => Ok(Some((
|
||||
network,
|
||||
session,
|
||||
@@ -105,7 +104,7 @@ impl Validators {
|
||||
|
||||
fn incorporate_session_changes(
|
||||
&mut self,
|
||||
session_changes: Vec<(NetworkId, Session, HashSet<PeerId>)>,
|
||||
session_changes: Vec<(ExternalNetworkId, Session, HashSet<PeerId>)>,
|
||||
) {
|
||||
let mut removed = HashSet::new();
|
||||
let mut added = HashSet::new();
|
||||
@@ -160,11 +159,11 @@ impl Validators {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn by_network(&self) -> &HashMap<NetworkId, HashSet<PeerId>> {
|
||||
pub(crate) fn by_network(&self) -> &HashMap<ExternalNetworkId, HashSet<PeerId>> {
|
||||
&self.by_network
|
||||
}
|
||||
|
||||
pub(crate) fn networks(&self, peer_id: &PeerId) -> Option<&HashSet<NetworkId>> {
|
||||
pub(crate) fn networks(&self, peer_id: &PeerId) -> Option<&HashSet<ExternalNetworkId>> {
|
||||
self.validators.get(peer_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use core::future::Future;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use serai_client::validator_sets::primitives::{MAX_KEY_SHARES_PER_SET, ValidatorSet};
|
||||
use serai_client::validator_sets::primitives::{MAX_KEY_SHARES_PER_SET, ExternalValidatorSet};
|
||||
|
||||
use futures_lite::FutureExt;
|
||||
|
||||
@@ -38,7 +38,7 @@ pub const BATCH_SIZE_LIMIT: usize = MIN_BLOCKS_PER_BATCH *
|
||||
/// If the other validator has more blocks then we do, they're expected to inform us. This forms
|
||||
/// the sync protocol for our Tributaries.
|
||||
pub(crate) struct HeartbeatTask<TD: Db, Tx: TransactionTrait, P: P2p> {
|
||||
pub(crate) set: ValidatorSet,
|
||||
pub(crate) set: ExternalValidatorSet,
|
||||
pub(crate) tributary: Tributary<TD, Tx, P>,
|
||||
pub(crate) reader: TributaryReader<TD, Tx>,
|
||||
pub(crate) p2p: P,
|
||||
|
||||
@@ -7,7 +7,7 @@ use std::collections::HashMap;
|
||||
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{primitives::NetworkId, validator_sets::primitives::ValidatorSet};
|
||||
use serai_client::{primitives::ExternalNetworkId, validator_sets::primitives::ExternalValidatorSet};
|
||||
|
||||
use serai_db::Db;
|
||||
use tributary_sdk::{ReadWrite, TransactionTrait, Tributary, TributaryReader};
|
||||
@@ -25,7 +25,7 @@ use crate::heartbeat::HeartbeatTask;
|
||||
#[derive(Clone, Copy, BorshSerialize, BorshDeserialize, Debug)]
|
||||
pub struct Heartbeat {
|
||||
/// The Tributary this is the heartbeat of.
|
||||
pub set: ValidatorSet,
|
||||
pub set: ExternalValidatorSet,
|
||||
/// The hash of the latest block added to the Tributary.
|
||||
pub latest_block_hash: [u8; 32],
|
||||
}
|
||||
@@ -56,7 +56,7 @@ pub trait P2p:
|
||||
type Peer<'a>: Peer<'a>;
|
||||
|
||||
/// Fetch the peers for this network.
|
||||
fn peers(&self, network: NetworkId) -> impl Send + Future<Output = Vec<Self::Peer<'_>>>;
|
||||
fn peers(&self, network: ExternalNetworkId) -> impl Send + Future<Output = Vec<Self::Peer<'_>>>;
|
||||
|
||||
/// Broadcast a cosign.
|
||||
fn publish_cosign(&self, cosign: SignedCosign) -> impl Send + Future<Output = ()>;
|
||||
@@ -131,13 +131,13 @@ fn handle_heartbeat<D: Db, T: TransactionTrait>(
|
||||
pub async fn run<TD: Db, Tx: TransactionTrait, P: P2p>(
|
||||
db: impl Db,
|
||||
p2p: P,
|
||||
mut add_tributary: mpsc::UnboundedReceiver<(ValidatorSet, Tributary<TD, Tx, P>)>,
|
||||
mut retire_tributary: mpsc::UnboundedReceiver<ValidatorSet>,
|
||||
mut add_tributary: mpsc::UnboundedReceiver<(ExternalValidatorSet, Tributary<TD, Tx, P>)>,
|
||||
mut retire_tributary: mpsc::UnboundedReceiver<ExternalValidatorSet>,
|
||||
send_cosigns: mpsc::UnboundedSender<SignedCosign>,
|
||||
) {
|
||||
let mut readers = HashMap::<ValidatorSet, TributaryReader<TD, Tx>>::new();
|
||||
let mut readers = HashMap::<ExternalValidatorSet, TributaryReader<TD, Tx>>::new();
|
||||
let mut tributaries = HashMap::<[u8; 32], mpsc::UnboundedSender<Vec<u8>>>::new();
|
||||
let mut heartbeat_tasks = HashMap::<ValidatorSet, _>::new();
|
||||
let mut heartbeat_tasks = HashMap::<ExternalValidatorSet, _>::new();
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
|
||||
@@ -6,8 +6,8 @@ use serai_db::{create_db, db_channel};
|
||||
use dkg::Participant;
|
||||
|
||||
use serai_client::{
|
||||
primitives::NetworkId,
|
||||
validator_sets::primitives::{Session, ValidatorSet, KeyPair},
|
||||
primitives::ExternalNetworkId,
|
||||
validator_sets::primitives::{Session, ExternalValidatorSet, KeyPair},
|
||||
};
|
||||
|
||||
use serai_cosign::SignedCosign;
|
||||
@@ -43,22 +43,21 @@ pub(crate) fn coordinator_db() -> Db {
|
||||
db(&format!("{root_path}/coordinator/db"))
|
||||
}
|
||||
|
||||
fn tributary_db_folder(set: ValidatorSet) -> String {
|
||||
fn tributary_db_folder(set: ExternalValidatorSet) -> String {
|
||||
let root_path = serai_env::var("DB_PATH").expect("path to DB wasn't specified");
|
||||
let network = match set.network {
|
||||
NetworkId::Serai => panic!("creating Tributary for the Serai network"),
|
||||
NetworkId::Bitcoin => "Bitcoin",
|
||||
NetworkId::Ethereum => "Ethereum",
|
||||
NetworkId::Monero => "Monero",
|
||||
ExternalNetworkId::Bitcoin => "Bitcoin",
|
||||
ExternalNetworkId::Ethereum => "Ethereum",
|
||||
ExternalNetworkId::Monero => "Monero",
|
||||
};
|
||||
format!("{root_path}/tributary-{network}-{}", set.session.0)
|
||||
}
|
||||
|
||||
pub(crate) fn tributary_db(set: ValidatorSet) -> Db {
|
||||
pub(crate) fn tributary_db(set: ExternalValidatorSet) -> Db {
|
||||
db(&format!("{}/db", tributary_db_folder(set)))
|
||||
}
|
||||
|
||||
pub(crate) fn prune_tributary_db(set: ValidatorSet) {
|
||||
pub(crate) fn prune_tributary_db(set: ExternalValidatorSet) {
|
||||
log::info!("pruning data directory for tributary {set:?}");
|
||||
let db = tributary_db_folder(set);
|
||||
if fs::exists(&db).expect("couldn't check if tributary DB exists") {
|
||||
@@ -73,15 +72,15 @@ create_db! {
|
||||
// The latest Tributary to have been retired for a network
|
||||
// Since Tributaries are retired sequentially, this is informative to if any Tributary has been
|
||||
// retired
|
||||
RetiredTributary: (network: NetworkId) -> Session,
|
||||
RetiredTributary: (network: ExternalNetworkId) -> Session,
|
||||
// The last handled message from a Processor
|
||||
LastProcessorMessage: (network: NetworkId) -> u64,
|
||||
LastProcessorMessage: (network: ExternalNetworkId) -> u64,
|
||||
// Cosigns we produced and tried to intake yet incurred an error while doing so
|
||||
ErroneousCosigns: () -> Vec<SignedCosign>,
|
||||
// The keys to confirm and set on the Serai network
|
||||
KeysToConfirm: (set: ValidatorSet) -> KeyPair,
|
||||
KeysToConfirm: (set: ExternalValidatorSet) -> KeyPair,
|
||||
// The key was set on the Serai network
|
||||
KeySet: (set: ValidatorSet) -> (),
|
||||
KeySet: (set: ExternalValidatorSet) -> (),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +89,7 @@ db_channel! {
|
||||
// Cosigns we produced
|
||||
SignedCosigns: () -> SignedCosign,
|
||||
// Tributaries to clean up upon reboot
|
||||
TributaryCleanup: () -> ValidatorSet,
|
||||
TributaryCleanup: () -> ExternalValidatorSet,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,50 +99,50 @@ mod _internal_db {
|
||||
db_channel! {
|
||||
Coordinator {
|
||||
// Tributary transactions to publish from the Processor messages
|
||||
TributaryTransactionsFromProcessorMessages: (set: ValidatorSet) -> Transaction,
|
||||
TributaryTransactionsFromProcessorMessages: (set: ExternalValidatorSet) -> Transaction,
|
||||
// Tributary transactions to publish from the DKG confirmation task
|
||||
TributaryTransactionsFromDkgConfirmation: (set: ValidatorSet) -> Transaction,
|
||||
TributaryTransactionsFromDkgConfirmation: (set: ExternalValidatorSet) -> Transaction,
|
||||
// Participants to remove
|
||||
RemoveParticipant: (set: ValidatorSet) -> Participant,
|
||||
RemoveParticipant: (set: ExternalValidatorSet) -> Participant,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TributaryTransactionsFromProcessorMessages;
|
||||
impl TributaryTransactionsFromProcessorMessages {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ValidatorSet, tx: &Transaction) {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ExternalValidatorSet, tx: &Transaction) {
|
||||
// If this set has yet to be retired, send this transaction
|
||||
if RetiredTributary::get(txn, set.network).map(|session| session.0) < Some(set.session.0) {
|
||||
_internal_db::TributaryTransactionsFromProcessorMessages::send(txn, set, tx);
|
||||
}
|
||||
}
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, set: ValidatorSet) -> Option<Transaction> {
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, set: ExternalValidatorSet) -> Option<Transaction> {
|
||||
_internal_db::TributaryTransactionsFromProcessorMessages::try_recv(txn, set)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TributaryTransactionsFromDkgConfirmation;
|
||||
impl TributaryTransactionsFromDkgConfirmation {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ValidatorSet, tx: &Transaction) {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ExternalValidatorSet, tx: &Transaction) {
|
||||
// If this set has yet to be retired, send this transaction
|
||||
if RetiredTributary::get(txn, set.network).map(|session| session.0) < Some(set.session.0) {
|
||||
_internal_db::TributaryTransactionsFromDkgConfirmation::send(txn, set, tx);
|
||||
}
|
||||
}
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, set: ValidatorSet) -> Option<Transaction> {
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, set: ExternalValidatorSet) -> Option<Transaction> {
|
||||
_internal_db::TributaryTransactionsFromDkgConfirmation::try_recv(txn, set)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct RemoveParticipant;
|
||||
impl RemoveParticipant {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ValidatorSet, participant: Participant) {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ExternalValidatorSet, participant: Participant) {
|
||||
// If this set has yet to be retired, send this transaction
|
||||
if RetiredTributary::get(txn, set.network).map(|session| session.0) < Some(set.session.0) {
|
||||
_internal_db::RemoveParticipant::send(txn, set, &participant);
|
||||
}
|
||||
}
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, set: ValidatorSet) -> Option<Participant> {
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, set: ExternalValidatorSet) -> Option<Participant> {
|
||||
_internal_db::RemoveParticipant::try_recv(txn, set)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ use serai_db::{DbTxn, Db as DbTrait};
|
||||
|
||||
use serai_client::{
|
||||
primitives::SeraiAddress,
|
||||
validator_sets::primitives::{ValidatorSet, musig_context, set_keys_message},
|
||||
validator_sets::primitives::{ExternalValidatorSet, musig_context, set_keys_message},
|
||||
};
|
||||
|
||||
use serai_task::{DoesNotError, ContinuallyRan};
|
||||
@@ -141,7 +141,7 @@ impl<CD: DbTrait, TD: DbTrait> ConfirmDkgTask<CD, TD> {
|
||||
Self { db, set, tributary_db, key, signer: None }
|
||||
}
|
||||
|
||||
fn slash(db: &mut CD, set: ValidatorSet, validator: SeraiAddress) {
|
||||
fn slash(db: &mut CD, set: ExternalValidatorSet, validator: SeraiAddress) {
|
||||
let mut txn = db.txn();
|
||||
TributaryTransactionsFromDkgConfirmation::send(
|
||||
&mut txn,
|
||||
@@ -153,7 +153,7 @@ impl<CD: DbTrait, TD: DbTrait> ConfirmDkgTask<CD, TD> {
|
||||
|
||||
fn preprocess(
|
||||
db: &mut CD,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
attempt: u32,
|
||||
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||
signer: &mut Option<Signer>,
|
||||
@@ -162,7 +162,9 @@ impl<CD: DbTrait, TD: DbTrait> ConfirmDkgTask<CD, TD> {
|
||||
let (machine, preprocess) = AlgorithmMachine::new(
|
||||
schnorrkel(),
|
||||
// We use a 1-of-1 Musig here as we don't know who will actually be in this Musig yet
|
||||
musig(&musig_context(set), key, &[Ristretto::generator() * key.deref()]).unwrap().into(),
|
||||
musig(&musig_context(set.into()), key, &[Ristretto::generator() * key.deref()])
|
||||
.unwrap()
|
||||
.into(),
|
||||
)
|
||||
.preprocess(&mut OsRng);
|
||||
// We take the preprocess so we can use it in a distinct machine with the actual Musig
|
||||
@@ -256,8 +258,9 @@ impl<CD: DbTrait, TD: DbTrait> ContinuallyRan for ConfirmDkgTask<CD, TD> {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let keys =
|
||||
musig(&musig_context(self.set.set), &self.key, &musig_public_keys).unwrap().into();
|
||||
let keys = musig(&musig_context(self.set.set.into()), &self.key, &musig_public_keys)
|
||||
.unwrap()
|
||||
.into();
|
||||
|
||||
// Rebuild the machine
|
||||
let (machine, preprocess_from_cache) =
|
||||
|
||||
@@ -14,8 +14,8 @@ use borsh::BorshDeserialize;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use serai_client::{
|
||||
primitives::{NetworkId, PublicKey, SeraiAddress, Signature},
|
||||
validator_sets::primitives::{ValidatorSet, KeyPair},
|
||||
primitives::{ExternalNetworkId, PublicKey, SeraiAddress, Signature},
|
||||
validator_sets::primitives::{ExternalValidatorSet, KeyPair},
|
||||
Serai,
|
||||
};
|
||||
use message_queue::{Service, client::MessageQueue};
|
||||
@@ -153,14 +153,13 @@ async fn handle_network(
|
||||
mut db: impl serai_db::Db,
|
||||
message_queue: Arc<MessageQueue>,
|
||||
serai: Arc<Serai>,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
) {
|
||||
// Spawn the task to publish batches for this network
|
||||
{
|
||||
let (publish_batch_task_def, publish_batch_task) = Task::new();
|
||||
tokio::spawn(
|
||||
PublishBatchTask::new(db.clone(), serai.clone(), network)
|
||||
.unwrap()
|
||||
.continually_run(publish_batch_task_def, vec![]),
|
||||
);
|
||||
// Forget its handle so it always runs in the background
|
||||
@@ -197,7 +196,7 @@ async fn handle_network(
|
||||
match msg {
|
||||
messages::ProcessorMessage::KeyGen(msg) => match msg {
|
||||
messages::key_gen::ProcessorMessage::Participation { session, participation } => {
|
||||
let set = ValidatorSet { network, session };
|
||||
let set = ExternalValidatorSet { network, session };
|
||||
TributaryTransactionsFromProcessorMessages::send(
|
||||
&mut txn,
|
||||
set,
|
||||
@@ -211,7 +210,7 @@ async fn handle_network(
|
||||
} => {
|
||||
KeysToConfirm::set(
|
||||
&mut txn,
|
||||
ValidatorSet { network, session },
|
||||
ExternalValidatorSet { network, session },
|
||||
&KeyPair(
|
||||
PublicKey::from_raw(substrate_key),
|
||||
network_key
|
||||
@@ -221,15 +220,15 @@ async fn handle_network(
|
||||
);
|
||||
}
|
||||
messages::key_gen::ProcessorMessage::Blame { session, participant } => {
|
||||
RemoveParticipant::send(&mut txn, ValidatorSet { network, session }, participant);
|
||||
RemoveParticipant::send(&mut txn, ExternalValidatorSet { network, session }, participant);
|
||||
}
|
||||
},
|
||||
messages::ProcessorMessage::Sign(msg) => match msg {
|
||||
messages::sign::ProcessorMessage::InvalidParticipant { session, participant } => {
|
||||
RemoveParticipant::send(&mut txn, ValidatorSet { network, session }, participant);
|
||||
RemoveParticipant::send(&mut txn, ExternalValidatorSet { network, session }, participant);
|
||||
}
|
||||
messages::sign::ProcessorMessage::Preprocesses { id, preprocesses } => {
|
||||
let set = ValidatorSet { network, session: id.session };
|
||||
let set = ExternalValidatorSet { network, session: id.session };
|
||||
if id.attempt == 0 {
|
||||
// Batches are declared by their intent to be signed
|
||||
if let messages::sign::VariantSignId::Batch(hash) = id.id {
|
||||
@@ -254,7 +253,7 @@ async fn handle_network(
|
||||
);
|
||||
}
|
||||
messages::sign::ProcessorMessage::Shares { id, shares } => {
|
||||
let set = ValidatorSet { network, session: id.session };
|
||||
let set = ExternalValidatorSet { network, session: id.session };
|
||||
TributaryTransactionsFromProcessorMessages::send(
|
||||
&mut txn,
|
||||
set,
|
||||
@@ -282,7 +281,7 @@ async fn handle_network(
|
||||
} => {
|
||||
SlashReports::set(
|
||||
&mut txn,
|
||||
ValidatorSet { network, session },
|
||||
ExternalValidatorSet { network, session },
|
||||
slash_report,
|
||||
Signature(signature),
|
||||
);
|
||||
@@ -298,7 +297,7 @@ async fn handle_network(
|
||||
.push(plan.transaction_plan_id);
|
||||
}
|
||||
for (session, plans) in by_session {
|
||||
let set = ValidatorSet { network, session };
|
||||
let set = ExternalValidatorSet { network, session };
|
||||
SubstrateBlockPlans::set(&mut txn, set, block, &plans);
|
||||
TributaryTransactionsFromProcessorMessages::send(
|
||||
&mut txn,
|
||||
@@ -481,10 +480,7 @@ async fn main() {
|
||||
);
|
||||
|
||||
// Handle each of the networks
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
tokio::spawn(handle_network(db.clone(), message_queue.clone(), serai.clone(), network));
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use tokio::sync::mpsc;
|
||||
|
||||
use serai_db::{DbTxn, Db as DbTrait};
|
||||
|
||||
use serai_client::validator_sets::primitives::{Session, ValidatorSet};
|
||||
use serai_client::validator_sets::primitives::{Session, ExternalValidatorSet};
|
||||
use message_queue::{Service, Metadata, client::MessageQueue};
|
||||
|
||||
use tributary_sdk::Tributary;
|
||||
@@ -27,8 +27,8 @@ pub(crate) struct SubstrateTask<P: P2p> {
|
||||
pub(crate) message_queue: Arc<MessageQueue>,
|
||||
pub(crate) p2p: P,
|
||||
pub(crate) p2p_add_tributary:
|
||||
mpsc::UnboundedSender<(ValidatorSet, Tributary<Db, Transaction, P>)>,
|
||||
pub(crate) p2p_retire_tributary: mpsc::UnboundedSender<ValidatorSet>,
|
||||
mpsc::UnboundedSender<(ExternalValidatorSet, Tributary<Db, Transaction, P>)>,
|
||||
pub(crate) p2p_retire_tributary: mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
}
|
||||
|
||||
impl<P: P2p> ContinuallyRan for SubstrateTask<P> {
|
||||
@@ -38,7 +38,7 @@ impl<P: P2p> ContinuallyRan for SubstrateTask<P> {
|
||||
let mut made_progress = false;
|
||||
|
||||
// Handle the Canonical events
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
loop {
|
||||
let mut txn = self.db.txn();
|
||||
let Some(msg) = serai_coordinator_substrate::Canonical::try_recv(&mut txn, network)
|
||||
@@ -48,7 +48,7 @@ impl<P: P2p> ContinuallyRan for SubstrateTask<P> {
|
||||
|
||||
match msg {
|
||||
messages::substrate::CoordinatorMessage::SetKeys { session, .. } => {
|
||||
KeySet::set(&mut txn, ValidatorSet { network, session }, &());
|
||||
KeySet::set(&mut txn, ExternalValidatorSet { network, session }, &());
|
||||
}
|
||||
messages::substrate::CoordinatorMessage::SlashesReported { session } => {
|
||||
let prior_retired = crate::db::RetiredTributary::get(&txn, network);
|
||||
@@ -58,7 +58,7 @@ impl<P: P2p> ContinuallyRan for SubstrateTask<P> {
|
||||
crate::db::RetiredTributary::set(&mut txn, network, &session);
|
||||
self
|
||||
.p2p_retire_tributary
|
||||
.send(ValidatorSet { network, session })
|
||||
.send(ExternalValidatorSet { network, session })
|
||||
.expect("p2p retire_tributary channel dropped?");
|
||||
}
|
||||
messages::substrate::CoordinatorMessage::Block { .. } => {}
|
||||
@@ -108,7 +108,10 @@ impl<P: P2p> ContinuallyRan for SubstrateTask<P> {
|
||||
*/
|
||||
crate::db::TributaryCleanup::send(
|
||||
&mut txn,
|
||||
&ValidatorSet { network: new_set.set.network, session: Session(historic_session) },
|
||||
&ExternalValidatorSet {
|
||||
network: new_set.set.network,
|
||||
session: Session(historic_session),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ use tokio::sync::mpsc;
|
||||
use serai_db::{Get, DbTxn, Db as DbTrait, create_db, db_channel};
|
||||
|
||||
use scale::Encode;
|
||||
use serai_client::validator_sets::primitives::ValidatorSet;
|
||||
use serai_client::validator_sets::primitives::ExternalValidatorSet;
|
||||
|
||||
use tributary_sdk::{TransactionKind, TransactionError, ProvidedError, TransactionTrait, Tributary};
|
||||
|
||||
@@ -33,13 +33,13 @@ use crate::{
|
||||
|
||||
create_db! {
|
||||
Coordinator {
|
||||
PublishOnRecognition: (set: ValidatorSet, topic: Topic) -> Transaction,
|
||||
PublishOnRecognition: (set: ExternalValidatorSet, topic: Topic) -> Transaction,
|
||||
}
|
||||
}
|
||||
|
||||
db_channel! {
|
||||
Coordinator {
|
||||
PendingCosigns: (set: ValidatorSet) -> CosignIntent,
|
||||
PendingCosigns: (set: ExternalValidatorSet) -> CosignIntent,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ db_channel! {
|
||||
/// This is not a well-designed function. This is specific to the context in which its called,
|
||||
/// within this file. It should only be considered an internal helper for this domain alone.
|
||||
async fn provide_transaction<TD: DbTrait, P: P2p>(
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
tributary: &Tributary<TD, Transaction, P>,
|
||||
tx: Transaction,
|
||||
) {
|
||||
@@ -211,7 +211,7 @@ async fn add_signed_unsigned_transaction<TD: DbTrait, P: P2p>(
|
||||
}
|
||||
|
||||
async fn add_with_recognition_check<TD: DbTrait, P: P2p>(
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
tributary_db: &mut TD,
|
||||
tributary: &Tributary<TD, Transaction, P>,
|
||||
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||
@@ -350,7 +350,7 @@ impl<CD: DbTrait, TD: DbTrait, P: P2p> ContinuallyRan for AddTributaryTransactio
|
||||
/// Takes the messages from ScanTributaryTask and publishes them to the message-queue.
|
||||
pub(crate) struct TributaryProcessorMessagesTask<TD: DbTrait> {
|
||||
tributary_db: TD,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
message_queue: Arc<MessageQueue>,
|
||||
}
|
||||
impl<TD: DbTrait> ContinuallyRan for TributaryProcessorMessagesTask<TD> {
|
||||
@@ -430,7 +430,7 @@ impl<CD: DbTrait, TD: DbTrait, P: P2p> ContinuallyRan for SignSlashReportTask<CD
|
||||
/// Run the scan task whenever the Tributary adds a new block.
|
||||
async fn scan_on_new_block<CD: DbTrait, TD: DbTrait, P: P2p>(
|
||||
db: CD,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
tributary: Tributary<TD, Transaction, P>,
|
||||
scan_tributary_task: TaskHandle,
|
||||
tasks_to_keep_alive: Vec<TaskHandle>,
|
||||
@@ -469,7 +469,7 @@ pub(crate) async fn spawn_tributary<P: P2p>(
|
||||
db: Db,
|
||||
message_queue: Arc<MessageQueue>,
|
||||
p2p: P,
|
||||
p2p_add_tributary: &mpsc::UnboundedSender<(ValidatorSet, Tributary<Db, Transaction, P>)>,
|
||||
p2p_add_tributary: &mpsc::UnboundedSender<(ExternalValidatorSet, Tributary<Db, Transaction, P>)>,
|
||||
set: NewSetInformation,
|
||||
serai_key: Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||
) {
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
|
||||
use futures::stream::{StreamExt, FuturesOrdered};
|
||||
|
||||
use serai_client::Serai;
|
||||
use serai_client::{validator_sets::primitives::ExternalValidatorSet, Serai};
|
||||
|
||||
use messages::substrate::{InInstructionResult, ExecutedBatch, CoordinatorMessage};
|
||||
|
||||
@@ -152,6 +152,7 @@ impl<D: Db> ContinuallyRan for CanonicalEventStream<D> {
|
||||
else {
|
||||
panic!("SetRetired event wasn't a SetRetired event: {set_retired:?}");
|
||||
};
|
||||
let Ok(set) = ExternalValidatorSet::try_from(*set) else { continue };
|
||||
crate::Canonical::send(
|
||||
&mut txn,
|
||||
set.network,
|
||||
@@ -159,7 +160,7 @@ impl<D: Db> ContinuallyRan for CanonicalEventStream<D> {
|
||||
);
|
||||
}
|
||||
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let mut batch = None;
|
||||
for this_batch in &block.batch_events {
|
||||
let serai_client::in_instructions::InInstructionsEvent::Batch {
|
||||
@@ -201,7 +202,7 @@ impl<D: Db> ContinuallyRan for CanonicalEventStream<D> {
|
||||
let serai_client::coins::CoinsEvent::BurnWithInstruction { from: _, instruction } =
|
||||
&burn
|
||||
else {
|
||||
panic!("Burn event wasn't a Burn.in event: {burn:?}");
|
||||
panic!("BurnWithInstruction event wasn't a BurnWithInstruction event: {burn:?}");
|
||||
};
|
||||
if instruction.balance.coin.network() == network {
|
||||
burns.push(instruction.clone());
|
||||
|
||||
@@ -4,8 +4,8 @@ use std::sync::Arc;
|
||||
use futures::stream::{StreamExt, FuturesOrdered};
|
||||
|
||||
use serai_client::{
|
||||
primitives::{NetworkId, SeraiAddress, EmbeddedEllipticCurve},
|
||||
validator_sets::primitives::MAX_KEY_SHARES_PER_SET,
|
||||
primitives::{SeraiAddress, EmbeddedEllipticCurve},
|
||||
validator_sets::primitives::{MAX_KEY_SHARES_PER_SET, ExternalValidatorSet},
|
||||
Serai,
|
||||
};
|
||||
|
||||
@@ -130,16 +130,13 @@ impl<D: Db> ContinuallyRan for EphemeralEventStream<D> {
|
||||
let serai_client::validator_sets::ValidatorSetsEvent::NewSet { set } = &new_set else {
|
||||
panic!("NewSet event wasn't a NewSet event: {new_set:?}");
|
||||
};
|
||||
|
||||
// We only coordinate over external networks
|
||||
if set.network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
let Ok(set) = ExternalValidatorSet::try_from(*set) else { continue };
|
||||
|
||||
let serai = self.serai.as_of(block.block_hash);
|
||||
let serai = serai.validator_sets();
|
||||
let Some(validators) =
|
||||
serai.participants(set.network).await.map_err(|e| format!("{e:?}"))?
|
||||
serai.participants(set.network.into()).await.map_err(|e| format!("{e:?}"))?
|
||||
else {
|
||||
Err(format!(
|
||||
"block #{block_number} declared a new set but didn't have the participants"
|
||||
@@ -222,11 +219,11 @@ impl<D: Db> ContinuallyRan for EphemeralEventStream<D> {
|
||||
}
|
||||
|
||||
let mut new_set = NewSetInformation {
|
||||
set: *set,
|
||||
set,
|
||||
serai_block: block.block_hash,
|
||||
declaration_time: block.time,
|
||||
// TODO: Why do we have this as an explicit field here?
|
||||
// Shouldn't this be inlined into the Processor's key gen code, where it's used?
|
||||
// TODO: This should be inlined into the Processor's key gen code
|
||||
// It's legacy from when we removed participants from the key gen
|
||||
threshold: ((total_weight * 2) / 3) + 1,
|
||||
validators,
|
||||
evrf_public_keys,
|
||||
@@ -246,7 +243,8 @@ impl<D: Db> ContinuallyRan for EphemeralEventStream<D> {
|
||||
else {
|
||||
panic!("AcceptedHandover event wasn't a AcceptedHandover event: {accepted_handover:?}");
|
||||
};
|
||||
crate::SignSlashReport::send(&mut txn, *set);
|
||||
let Ok(set) = ExternalValidatorSet::try_from(*set) else { continue };
|
||||
crate::SignSlashReport::send(&mut txn, set);
|
||||
}
|
||||
|
||||
txn.commit();
|
||||
|
||||
@@ -10,8 +10,8 @@ use borsh::{BorshSerialize, BorshDeserialize};
|
||||
use dkg::Participant;
|
||||
|
||||
use serai_client::{
|
||||
primitives::{NetworkId, SeraiAddress, Signature},
|
||||
validator_sets::primitives::{Session, ValidatorSet, KeyPair, SlashReport},
|
||||
primitives::{ExternalNetworkId, SeraiAddress, Signature},
|
||||
validator_sets::primitives::{Session, ExternalValidatorSet, KeyPair, SlashReport},
|
||||
in_instructions::primitives::SignedBatch,
|
||||
Transaction,
|
||||
};
|
||||
@@ -35,7 +35,7 @@ pub use publish_slash_report::PublishSlashReportTask;
|
||||
#[borsh(init = init_participant_indexes)]
|
||||
pub struct NewSetInformation {
|
||||
/// The set.
|
||||
pub set: ValidatorSet,
|
||||
pub set: ExternalValidatorSet,
|
||||
/// The Serai block which declared it.
|
||||
pub serai_block: [u8; 32],
|
||||
/// The time of the block which declared it, in seconds.
|
||||
@@ -82,24 +82,24 @@ mod _public_db {
|
||||
db_channel!(
|
||||
CoordinatorSubstrate {
|
||||
// Canonical messages to send to the processor
|
||||
Canonical: (network: NetworkId) -> messages::substrate::CoordinatorMessage,
|
||||
Canonical: (network: ExternalNetworkId) -> messages::substrate::CoordinatorMessage,
|
||||
|
||||
// Relevant new set, from an ephemeral event stream
|
||||
NewSet: () -> NewSetInformation,
|
||||
// Potentially relevant sign slash report, from an ephemeral event stream
|
||||
SignSlashReport: (set: ValidatorSet) -> (),
|
||||
SignSlashReport: (set: ExternalValidatorSet) -> (),
|
||||
|
||||
// Signed batches to publish onto the Serai network
|
||||
SignedBatches: (network: NetworkId) -> SignedBatch,
|
||||
SignedBatches: (network: ExternalNetworkId) -> SignedBatch,
|
||||
}
|
||||
);
|
||||
|
||||
create_db!(
|
||||
CoordinatorSubstrate {
|
||||
// Keys to set on the Serai network
|
||||
Keys: (network: NetworkId) -> (Session, Vec<u8>),
|
||||
Keys: (network: ExternalNetworkId) -> (Session, Vec<u8>),
|
||||
// Slash reports to publish onto the Serai network
|
||||
SlashReports: (network: NetworkId) -> (Session, Vec<u8>),
|
||||
SlashReports: (network: ExternalNetworkId) -> (Session, Vec<u8>),
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -109,7 +109,7 @@ pub struct Canonical;
|
||||
impl Canonical {
|
||||
pub(crate) fn send(
|
||||
txn: &mut impl DbTxn,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
msg: &messages::substrate::CoordinatorMessage,
|
||||
) {
|
||||
_public_db::Canonical::send(txn, network, msg);
|
||||
@@ -117,7 +117,7 @@ impl Canonical {
|
||||
/// Try to receive a canonical event, returning `None` if there is none to receive.
|
||||
pub fn try_recv(
|
||||
txn: &mut impl DbTxn,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
) -> Option<messages::substrate::CoordinatorMessage> {
|
||||
_public_db::Canonical::try_recv(txn, network)
|
||||
}
|
||||
@@ -141,12 +141,12 @@ impl NewSet {
|
||||
/// notifications for all relevant validator sets will be included.
|
||||
pub struct SignSlashReport;
|
||||
impl SignSlashReport {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ValidatorSet) {
|
||||
pub(crate) fn send(txn: &mut impl DbTxn, set: ExternalValidatorSet) {
|
||||
_public_db::SignSlashReport::send(txn, set, &());
|
||||
}
|
||||
/// Try to receive a notification to sign a slash report, returning `None` if there is none to
|
||||
/// receive.
|
||||
pub fn try_recv(txn: &mut impl DbTxn, set: ValidatorSet) -> Option<()> {
|
||||
pub fn try_recv(txn: &mut impl DbTxn, set: ExternalValidatorSet) -> Option<()> {
|
||||
_public_db::SignSlashReport::try_recv(txn, set)
|
||||
}
|
||||
}
|
||||
@@ -160,7 +160,7 @@ impl Keys {
|
||||
/// reported at once.
|
||||
pub fn set(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
key_pair: KeyPair,
|
||||
signature_participants: bitvec::vec::BitVec<u8, bitvec::order::Lsb0>,
|
||||
signature: Signature,
|
||||
@@ -180,7 +180,10 @@ impl Keys {
|
||||
);
|
||||
_public_db::Keys::set(txn, set.network, &(set.session, tx.encode()));
|
||||
}
|
||||
pub(crate) fn take(txn: &mut impl DbTxn, network: NetworkId) -> Option<(Session, Transaction)> {
|
||||
pub(crate) fn take(
|
||||
txn: &mut impl DbTxn,
|
||||
network: ExternalNetworkId,
|
||||
) -> Option<(Session, Transaction)> {
|
||||
let (session, tx) = _public_db::Keys::take(txn, network)?;
|
||||
Some((session, <_>::decode(&mut tx.as_slice()).unwrap()))
|
||||
}
|
||||
@@ -193,7 +196,7 @@ impl SignedBatches {
|
||||
pub fn send(txn: &mut impl DbTxn, batch: &SignedBatch) {
|
||||
_public_db::SignedBatches::send(txn, batch.batch.network, batch);
|
||||
}
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, network: NetworkId) -> Option<SignedBatch> {
|
||||
pub(crate) fn try_recv(txn: &mut impl DbTxn, network: ExternalNetworkId) -> Option<SignedBatch> {
|
||||
_public_db::SignedBatches::try_recv(txn, network)
|
||||
}
|
||||
}
|
||||
@@ -207,7 +210,7 @@ impl SlashReports {
|
||||
/// slashes reported at once.
|
||||
pub fn set(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
slash_report: SlashReport,
|
||||
signature: Signature,
|
||||
) {
|
||||
@@ -225,7 +228,10 @@ impl SlashReports {
|
||||
);
|
||||
_public_db::SlashReports::set(txn, set.network, &(set.session, tx.encode()));
|
||||
}
|
||||
pub(crate) fn take(txn: &mut impl DbTxn, network: NetworkId) -> Option<(Session, Transaction)> {
|
||||
pub(crate) fn take(
|
||||
txn: &mut impl DbTxn,
|
||||
network: ExternalNetworkId,
|
||||
) -> Option<(Session, Transaction)> {
|
||||
let (session, tx) = _public_db::SlashReports::take(txn, network)?;
|
||||
Some((session, <_>::decode(&mut tx.as_slice()).unwrap()))
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use core::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[rustfmt::skip]
|
||||
use serai_client::{primitives::NetworkId, in_instructions::primitives::SignedBatch, SeraiError, Serai};
|
||||
use serai_client::{primitives::ExternalNetworkId, in_instructions::primitives::SignedBatch, SeraiError, Serai};
|
||||
|
||||
use serai_db::{Get, DbTxn, Db, create_db};
|
||||
use serai_task::ContinuallyRan;
|
||||
@@ -11,8 +11,8 @@ use crate::SignedBatches;
|
||||
|
||||
create_db!(
|
||||
CoordinatorSubstrate {
|
||||
LastPublishedBatch: (network: NetworkId) -> u32,
|
||||
BatchesToPublish: (network: NetworkId, batch: u32) -> SignedBatch,
|
||||
LastPublishedBatch: (network: ExternalNetworkId) -> u32,
|
||||
BatchesToPublish: (network: ExternalNetworkId, batch: u32) -> SignedBatch,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -20,19 +20,13 @@ create_db!(
|
||||
pub struct PublishBatchTask<D: Db> {
|
||||
db: D,
|
||||
serai: Arc<Serai>,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
}
|
||||
|
||||
impl<D: Db> PublishBatchTask<D> {
|
||||
/// Create a task to publish `SignedBatch`s onto Serai.
|
||||
///
|
||||
/// Returns None if `network == NetworkId::Serai`.
|
||||
// TODO: ExternalNetworkId
|
||||
pub fn new(db: D, serai: Arc<Serai>, network: NetworkId) -> Option<Self> {
|
||||
if network == NetworkId::Serai {
|
||||
None?
|
||||
};
|
||||
Some(Self { db, serai, network })
|
||||
pub fn new(db: D, serai: Arc<Serai>, network: ExternalNetworkId) -> Self {
|
||||
Self { db, serai, network }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
|
||||
use serai_db::{DbTxn, Db};
|
||||
|
||||
use serai_client::{primitives::NetworkId, validator_sets::primitives::Session, Serai};
|
||||
use serai_client::{primitives::ExternalNetworkId, validator_sets::primitives::Session, Serai};
|
||||
|
||||
use serai_task::ContinuallyRan;
|
||||
|
||||
@@ -24,7 +24,7 @@ impl<D: Db> PublishSlashReportTask<D> {
|
||||
|
||||
impl<D: Db> PublishSlashReportTask<D> {
|
||||
// Returns if a slash report was successfully published
|
||||
async fn publish(&mut self, network: NetworkId) -> Result<bool, String> {
|
||||
async fn publish(&mut self, network: ExternalNetworkId) -> Result<bool, String> {
|
||||
let mut txn = self.db.txn();
|
||||
let Some((session, slash_report)) = SlashReports::take(&mut txn, network) else {
|
||||
// No slash report to publish
|
||||
@@ -36,7 +36,7 @@ impl<D: Db> PublishSlashReportTask<D> {
|
||||
let serai = self.serai.as_of_latest_finalized_block().await.map_err(|e| format!("{e:?}"))?;
|
||||
let serai = serai.validator_sets();
|
||||
let session_after_slash_report = Session(session.0 + 1);
|
||||
let current_session = serai.session(network).await.map_err(|e| format!("{e:?}"))?;
|
||||
let current_session = serai.session(network.into()).await.map_err(|e| format!("{e:?}"))?;
|
||||
let current_session = current_session.map(|session| session.0);
|
||||
// Only attempt to publish the slash report for session #n while session #n+1 is still
|
||||
// active
|
||||
@@ -84,11 +84,7 @@ impl<D: Db> ContinuallyRan for PublishSlashReportTask<D> {
|
||||
async move {
|
||||
let mut made_progress = false;
|
||||
let mut error = None;
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
};
|
||||
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let network_res = self.publish(network).await;
|
||||
// We made progress if any network successfully published their slash report
|
||||
made_progress |= network_res == Ok(true);
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
|
||||
use serai_db::{DbTxn, Db};
|
||||
|
||||
use serai_client::{primitives::NetworkId, validator_sets::primitives::ValidatorSet, Serai};
|
||||
use serai_client::{validator_sets::primitives::ExternalValidatorSet, Serai};
|
||||
|
||||
use serai_task::ContinuallyRan;
|
||||
|
||||
@@ -28,11 +28,7 @@ impl<D: Db> ContinuallyRan for SetKeysTask<D> {
|
||||
fn run_iteration(&mut self) -> impl Send + Future<Output = Result<bool, Self::Error>> {
|
||||
async move {
|
||||
let mut made_progress = false;
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
};
|
||||
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let mut txn = self.db.txn();
|
||||
let Some((session, keys)) = Keys::take(&mut txn, network) else {
|
||||
// No keys to set
|
||||
@@ -44,7 +40,7 @@ impl<D: Db> ContinuallyRan for SetKeysTask<D> {
|
||||
let serai =
|
||||
self.serai.as_of_latest_finalized_block().await.map_err(|e| format!("{e:?}"))?;
|
||||
let serai = serai.validator_sets();
|
||||
let current_session = serai.session(network).await.map_err(|e| format!("{e:?}"))?;
|
||||
let current_session = serai.session(network.into()).await.map_err(|e| format!("{e:?}"))?;
|
||||
let current_session = current_session.map(|session| session.0);
|
||||
// Only attempt to set these keys if this isn't a retired session
|
||||
if Some(session.0) < current_session {
|
||||
@@ -62,7 +58,7 @@ impl<D: Db> ContinuallyRan for SetKeysTask<D> {
|
||||
|
||||
// If this session already has had its keys set, move on
|
||||
if serai
|
||||
.keys(ValidatorSet { network, session })
|
||||
.keys(ExternalValidatorSet { network, session })
|
||||
.await
|
||||
.map_err(|e| format!("{e:?}"))?
|
||||
.is_some()
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::collections::HashMap;
|
||||
use scale::Encode;
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{primitives::SeraiAddress, validator_sets::primitives::ValidatorSet};
|
||||
use serai_client::{primitives::SeraiAddress, validator_sets::primitives::ExternalValidatorSet};
|
||||
|
||||
use messages::sign::{VariantSignId, SignId};
|
||||
|
||||
@@ -97,7 +97,7 @@ impl Topic {
|
||||
/// The SignId for this topic
|
||||
///
|
||||
/// Returns None if Topic isn't Topic::Sign
|
||||
pub(crate) fn sign_id(self, set: ValidatorSet) -> Option<messages::sign::SignId> {
|
||||
pub(crate) fn sign_id(self, set: ExternalValidatorSet) -> Option<messages::sign::SignId> {
|
||||
#[allow(clippy::match_same_arms)]
|
||||
match self {
|
||||
Topic::RemoveParticipant { .. } => None,
|
||||
@@ -115,7 +115,7 @@ impl Topic {
|
||||
/// Returns None if Topic isn't Topic::DkgConfirmation.
|
||||
pub(crate) fn dkg_confirmation_sign_id(
|
||||
self,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<messages::sign::SignId> {
|
||||
#[allow(clippy::match_same_arms)]
|
||||
match self {
|
||||
@@ -227,41 +227,48 @@ pub(crate) enum DataSet<D: Borshy> {
|
||||
create_db!(
|
||||
CoordinatorTributary {
|
||||
// The last handled tributary block's (number, hash)
|
||||
LastHandledTributaryBlock: (set: ValidatorSet) -> (u64, [u8; 32]),
|
||||
LastHandledTributaryBlock: (set: ExternalValidatorSet) -> (u64, [u8; 32]),
|
||||
|
||||
// The slash points a validator has accrued, with u32::MAX representing a fatal slash.
|
||||
SlashPoints: (set: ValidatorSet, validator: SeraiAddress) -> u32,
|
||||
SlashPoints: (set: ExternalValidatorSet, validator: SeraiAddress) -> u32,
|
||||
|
||||
// The cosign intent for a Substrate block
|
||||
CosignIntents: (set: ValidatorSet, substrate_block_hash: [u8; 32]) -> CosignIntent,
|
||||
CosignIntents: (set: ExternalValidatorSet, substrate_block_hash: [u8; 32]) -> CosignIntent,
|
||||
// The latest Substrate block to cosign.
|
||||
LatestSubstrateBlockToCosign: (set: ValidatorSet) -> [u8; 32],
|
||||
LatestSubstrateBlockToCosign: (set: ExternalValidatorSet) -> [u8; 32],
|
||||
// The hash of the block we're actively cosigning.
|
||||
ActivelyCosigning: (set: ValidatorSet) -> [u8; 32],
|
||||
ActivelyCosigning: (set: ExternalValidatorSet) -> [u8; 32],
|
||||
// If this block has already been cosigned.
|
||||
Cosigned: (set: ValidatorSet, substrate_block_hash: [u8; 32]) -> (),
|
||||
Cosigned: (set: ExternalValidatorSet, substrate_block_hash: [u8; 32]) -> (),
|
||||
|
||||
// The plans to recognize upon a `Transaction::SubstrateBlock` being included on-chain.
|
||||
SubstrateBlockPlans: (set: ValidatorSet, substrate_block_hash: [u8; 32]) -> Vec<[u8; 32]>,
|
||||
SubstrateBlockPlans: (
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32]
|
||||
) -> Vec<[u8; 32]>,
|
||||
|
||||
// The weight accumulated for a topic.
|
||||
AccumulatedWeight: (set: ValidatorSet, topic: Topic) -> u16,
|
||||
AccumulatedWeight: (set: ExternalValidatorSet, topic: Topic) -> u16,
|
||||
// The entries accumulated for a topic, by validator.
|
||||
Accumulated: <D: Borshy>(set: ValidatorSet, topic: Topic, validator: SeraiAddress) -> D,
|
||||
Accumulated: <D: Borshy>(
|
||||
set: ExternalValidatorSet,
|
||||
topic: Topic,
|
||||
validator: SeraiAddress
|
||||
) -> D,
|
||||
|
||||
// Topics to be recognized as of a certain block number due to the reattempt protocol.
|
||||
Reattempt: (set: ValidatorSet, block_number: u64) -> Vec<Topic>,
|
||||
Reattempt: (set: ExternalValidatorSet, block_number: u64) -> Vec<Topic>,
|
||||
}
|
||||
);
|
||||
|
||||
db_channel!(
|
||||
CoordinatorTributary {
|
||||
// Messages to send to the processor
|
||||
ProcessorMessages: (set: ValidatorSet) -> messages::CoordinatorMessage,
|
||||
ProcessorMessages: (set: ExternalValidatorSet) -> messages::CoordinatorMessage,
|
||||
// Messages for the DKG confirmation
|
||||
DkgConfirmationMessages: (set: ValidatorSet) -> messages::sign::CoordinatorMessage,
|
||||
DkgConfirmationMessages: (set: ExternalValidatorSet) -> messages::sign::CoordinatorMessage,
|
||||
// Topics which have been explicitly recognized
|
||||
RecognizedTopics: (set: ValidatorSet) -> Topic,
|
||||
RecognizedTopics: (set: ExternalValidatorSet) -> Topic,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -269,13 +276,13 @@ pub(crate) struct TributaryDb;
|
||||
impl TributaryDb {
|
||||
pub(crate) fn last_handled_tributary_block(
|
||||
getter: &impl Get,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<(u64, [u8; 32])> {
|
||||
LastHandledTributaryBlock::get(getter, set)
|
||||
}
|
||||
pub(crate) fn set_last_handled_tributary_block(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
block_number: u64,
|
||||
block_hash: [u8; 32],
|
||||
) {
|
||||
@@ -284,23 +291,26 @@ impl TributaryDb {
|
||||
|
||||
pub(crate) fn latest_substrate_block_to_cosign(
|
||||
getter: &impl Get,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<[u8; 32]> {
|
||||
LatestSubstrateBlockToCosign::get(getter, set)
|
||||
}
|
||||
pub(crate) fn set_latest_substrate_block_to_cosign(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
) {
|
||||
LatestSubstrateBlockToCosign::set(txn, set, &substrate_block_hash);
|
||||
}
|
||||
pub(crate) fn actively_cosigning(txn: &mut impl DbTxn, set: ValidatorSet) -> Option<[u8; 32]> {
|
||||
pub(crate) fn actively_cosigning(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<[u8; 32]> {
|
||||
ActivelyCosigning::get(txn, set)
|
||||
}
|
||||
pub(crate) fn start_cosigning(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
substrate_block_number: u64,
|
||||
) {
|
||||
@@ -320,33 +330,33 @@ impl TributaryDb {
|
||||
},
|
||||
);
|
||||
}
|
||||
pub(crate) fn finish_cosigning(txn: &mut impl DbTxn, set: ValidatorSet) {
|
||||
pub(crate) fn finish_cosigning(txn: &mut impl DbTxn, set: ExternalValidatorSet) {
|
||||
assert!(ActivelyCosigning::take(txn, set).is_some(), "finished cosigning but not cosigning");
|
||||
}
|
||||
pub(crate) fn mark_cosigned(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
) {
|
||||
Cosigned::set(txn, set, substrate_block_hash, &());
|
||||
}
|
||||
pub(crate) fn cosigned(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
) -> bool {
|
||||
Cosigned::get(txn, set, substrate_block_hash).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn recognize_topic(txn: &mut impl DbTxn, set: ValidatorSet, topic: Topic) {
|
||||
pub(crate) fn recognize_topic(txn: &mut impl DbTxn, set: ExternalValidatorSet, topic: Topic) {
|
||||
AccumulatedWeight::set(txn, set, topic, &0);
|
||||
RecognizedTopics::send(txn, set, &topic);
|
||||
}
|
||||
pub(crate) fn recognized(getter: &impl Get, set: ValidatorSet, topic: Topic) -> bool {
|
||||
pub(crate) fn recognized(getter: &impl Get, set: ExternalValidatorSet, topic: Topic) -> bool {
|
||||
AccumulatedWeight::get(getter, set, topic).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn start_of_block(txn: &mut impl DbTxn, set: ValidatorSet, block_number: u64) {
|
||||
pub(crate) fn start_of_block(txn: &mut impl DbTxn, set: ExternalValidatorSet, block_number: u64) {
|
||||
for topic in Reattempt::take(txn, set, block_number).unwrap_or(vec![]) {
|
||||
/*
|
||||
TODO: Slash all people who preprocessed but didn't share, and add a delay to their
|
||||
@@ -376,7 +386,7 @@ impl TributaryDb {
|
||||
|
||||
pub(crate) fn fatal_slash(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
validator: SeraiAddress,
|
||||
reason: &str,
|
||||
) {
|
||||
@@ -386,7 +396,7 @@ impl TributaryDb {
|
||||
|
||||
pub(crate) fn is_fatally_slashed(
|
||||
getter: &impl Get,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
validator: SeraiAddress,
|
||||
) -> bool {
|
||||
SlashPoints::get(getter, set, validator).unwrap_or(0) == u32::MAX
|
||||
@@ -395,7 +405,7 @@ impl TributaryDb {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn accumulate<D: Borshy>(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
validators: &[SeraiAddress],
|
||||
total_weight: u16,
|
||||
block_number: u64,
|
||||
@@ -511,7 +521,7 @@ impl TributaryDb {
|
||||
|
||||
pub(crate) fn send_message(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
message: impl Into<messages::CoordinatorMessage>,
|
||||
) {
|
||||
ProcessorMessages::send(txn, set, &message.into());
|
||||
|
||||
@@ -10,7 +10,7 @@ use dkg::Participant;
|
||||
|
||||
use serai_client::{
|
||||
primitives::SeraiAddress,
|
||||
validator_sets::primitives::{ValidatorSet, Slash},
|
||||
validator_sets::primitives::{ExternalValidatorSet, Slash},
|
||||
};
|
||||
|
||||
use serai_db::*;
|
||||
@@ -41,7 +41,10 @@ pub use db::Topic;
|
||||
pub struct ProcessorMessages;
|
||||
impl ProcessorMessages {
|
||||
/// Try to receive a message to send to a Processor.
|
||||
pub fn try_recv(txn: &mut impl DbTxn, set: ValidatorSet) -> Option<messages::CoordinatorMessage> {
|
||||
pub fn try_recv(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<messages::CoordinatorMessage> {
|
||||
db::ProcessorMessages::try_recv(txn, set)
|
||||
}
|
||||
}
|
||||
@@ -58,7 +61,7 @@ impl DkgConfirmationMessages {
|
||||
/// across validator sets, with no guarantees of uniqueness across contexts.
|
||||
pub fn try_recv(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<messages::sign::CoordinatorMessage> {
|
||||
db::DkgConfirmationMessages::try_recv(txn, set)
|
||||
}
|
||||
@@ -70,12 +73,12 @@ impl CosignIntents {
|
||||
/// Provide a CosignIntent for this Tributary.
|
||||
///
|
||||
/// This must be done before the associated `Transaction::Cosign` is provided.
|
||||
pub fn provide(txn: &mut impl DbTxn, set: ValidatorSet, intent: &CosignIntent) {
|
||||
pub fn provide(txn: &mut impl DbTxn, set: ExternalValidatorSet, intent: &CosignIntent) {
|
||||
db::CosignIntents::set(txn, set, intent.block_hash, intent);
|
||||
}
|
||||
fn take(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
) -> Option<CosignIntent> {
|
||||
db::CosignIntents::take(txn, set, substrate_block_hash)
|
||||
@@ -88,13 +91,13 @@ impl RecognizedTopics {
|
||||
/// If this topic has been recognized by this Tributary.
|
||||
///
|
||||
/// This will either be by explicit recognition or participation.
|
||||
pub fn recognized(getter: &impl Get, set: ValidatorSet, topic: Topic) -> bool {
|
||||
pub fn recognized(getter: &impl Get, set: ExternalValidatorSet, topic: Topic) -> bool {
|
||||
TributaryDb::recognized(getter, set, topic)
|
||||
}
|
||||
/// The next topic requiring recognition which has been recognized by this Tributary.
|
||||
pub fn try_recv_topic_requiring_recognition(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Option<Topic> {
|
||||
db::RecognizedTopics::try_recv(txn, set)
|
||||
}
|
||||
@@ -109,7 +112,7 @@ impl SubstrateBlockPlans {
|
||||
/// This must be done before the associated `Transaction::Cosign` is provided.
|
||||
pub fn set(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
plans: &Vec<[u8; 32]>,
|
||||
) {
|
||||
@@ -117,7 +120,7 @@ impl SubstrateBlockPlans {
|
||||
}
|
||||
fn take(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
substrate_block_hash: [u8; 32],
|
||||
) -> Option<Vec<[u8; 32]>> {
|
||||
db::SubstrateBlockPlans::take(txn, set, substrate_block_hash)
|
||||
|
||||
@@ -28,6 +28,12 @@ macro_rules! dalek_curve {
|
||||
$Point::generator()
|
||||
}
|
||||
|
||||
fn reduce_512(mut scalar: [u8; 64]) -> Self::F {
|
||||
let res = Scalar::from_bytes_mod_order_wide(&scalar);
|
||||
scalar.zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
Scalar::from_hash(Sha512::new_with_prefix(&[dst, data].concat()))
|
||||
}
|
||||
|
||||
@@ -66,6 +66,12 @@ impl Ciphersuite for Ed448 {
|
||||
Point::generator()
|
||||
}
|
||||
|
||||
fn reduce_512(mut scalar: [u8; 64]) -> Self::F {
|
||||
let res = Self::hash_to_F(b"Ciphersuite-reduce_512", &scalar);
|
||||
scalar.zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_ref().try_into().unwrap())
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use group::ff::PrimeField;
|
||||
|
||||
use elliptic_curve::{
|
||||
generic_array::GenericArray,
|
||||
bigint::{NonZero, CheckedAdd, Encoding, U384},
|
||||
bigint::{NonZero, CheckedAdd, Encoding, U384, U512},
|
||||
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
|
||||
};
|
||||
|
||||
@@ -31,6 +31,22 @@ macro_rules! kp_curve {
|
||||
$lib::ProjectivePoint::GENERATOR
|
||||
}
|
||||
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F {
|
||||
let mut modulus = [0; 64];
|
||||
modulus[32 ..].copy_from_slice(&(Self::F::ZERO - Self::F::ONE).to_bytes());
|
||||
let modulus = U512::from_be_slice(&modulus).checked_add(&U512::ONE).unwrap();
|
||||
|
||||
let mut wide =
|
||||
U512::from_be_bytes(scalar).rem(&NonZero::new(modulus).unwrap()).to_be_bytes();
|
||||
|
||||
let mut array = *GenericArray::from_slice(&wide[32 ..]);
|
||||
let res = $lib::Scalar::from_repr(array).unwrap();
|
||||
|
||||
wide.zeroize();
|
||||
array.zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
// While one of these two libraries does support directly hashing to the Scalar field, the
|
||||
// other doesn't. While that's probably an oversight, this is a universally working method
|
||||
|
||||
@@ -62,6 +62,12 @@ pub trait Ciphersuite:
|
||||
// While group does provide this in its API, privacy coins may want to use a custom basepoint
|
||||
fn generator() -> Self::G;
|
||||
|
||||
/// Reduce 512 bits into a uniform scalar.
|
||||
///
|
||||
/// If 512 bits is insufficient to perform a reduction into a uniform scalar, the ciphersuite
|
||||
/// will perform a hash to sample the necessary bits.
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F;
|
||||
|
||||
/// Hash the provided domain-separation tag and message to a scalar. Ciphersuites MAY naively
|
||||
/// prefix the tag to the message, enabling transpotion between the two. Accordingly, this
|
||||
/// function should NOT be used in any scheme where one tag is a valid substring of another
|
||||
@@ -99,6 +105,9 @@ pub trait Ciphersuite:
|
||||
}
|
||||
|
||||
/// Read a canonical point from something implementing std::io::Read.
|
||||
///
|
||||
/// The provided implementation is safe so long as `GroupEncoding::to_bytes` always returns a
|
||||
/// canonical serialization.
|
||||
#[cfg(any(feature = "alloc", feature = "std"))]
|
||||
#[allow(non_snake_case)]
|
||||
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
|
||||
|
||||
@@ -244,7 +244,16 @@ impl FieldElement {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
|
||||
let mut scale_by = FieldElement::ONE;
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
scale_by = <_>::conditional_select(&scale_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res *= scale_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -208,7 +208,16 @@ impl Scalar {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
|
||||
let mut scale_by = Scalar::ONE;
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
scale_by = <_>::conditional_select(&scale_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res *= scale_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ rand = { version = "0.8", default-features = false, features = ["std"] }
|
||||
ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] }
|
||||
generalized-bulletproofs = { path = "../evrf/generalized-bulletproofs", features = ["tests"] }
|
||||
ec-divisors = { path = "../evrf/divisors", features = ["pasta"] }
|
||||
pasta_curves = "0.5"
|
||||
pasta_curves = { git = "https://github.com/kayabaNerve/pasta_curves", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616" }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
|
||||
@@ -85,7 +85,7 @@ use ciphersuite::{
|
||||
};
|
||||
use multiexp::multiexp_vartime;
|
||||
|
||||
use generalized_bulletproofs::arithmetic_circuit_proof::*;
|
||||
use generalized_bulletproofs::{Generators, arithmetic_circuit_proof::*};
|
||||
use ec_divisors::DivisorCurve;
|
||||
|
||||
use crate::{Participant, ThresholdParams, Interpolation, ThresholdCore, ThresholdKeys};
|
||||
@@ -277,6 +277,7 @@ impl<C: EvrfCurve> EvrfDkg<C> {
|
||||
if evrf_public_keys.iter().any(|key| bool::from(key.is_identity())) {
|
||||
Err(EvrfError::PublicKeyWasIdentity)?;
|
||||
};
|
||||
// This also checks the private key is not 0
|
||||
let evrf_public_key = <C::EmbeddedCurve as Ciphersuite>::generator() * evrf_private_key.deref();
|
||||
if !evrf_public_keys.iter().any(|key| *key == evrf_public_key) {
|
||||
Err(EvrfError::NotAParticipant)?;
|
||||
@@ -359,7 +360,7 @@ impl<C: EvrfCurve> EvrfDkg<C> {
|
||||
|
||||
let transcript = Self::initial_transcript(context, evrf_public_keys, t);
|
||||
|
||||
let mut evrf_verifier = generators.0.batch_verifier();
|
||||
let mut evrf_verifier = Generators::batch_verifier();
|
||||
for (i, participation) in participations {
|
||||
let evrf_public_key = evrf_public_keys[usize::from(u16::from(*i)) - 1];
|
||||
|
||||
@@ -395,7 +396,7 @@ impl<C: EvrfCurve> EvrfDkg<C> {
|
||||
if faulty.contains(i) {
|
||||
continue;
|
||||
}
|
||||
let mut evrf_verifier = generators.0.batch_verifier();
|
||||
let mut evrf_verifier = Generators::batch_verifier();
|
||||
Evrf::<C>::verify(
|
||||
rng,
|
||||
&generators.0,
|
||||
|
||||
@@ -129,15 +129,11 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
/// Read a Variable from a theoretical vector commitment tape
|
||||
fn read_one_from_tape(generators_to_use: usize, start: &mut usize) -> Variable {
|
||||
// Each commitment has twice as many variables as generators in use
|
||||
let commitment = *start / (2 * generators_to_use);
|
||||
let commitment = *start / generators_to_use;
|
||||
// The index will be less than the amount of generators in use, as half are left and half are
|
||||
// right
|
||||
let index = *start % generators_to_use;
|
||||
let res = if (*start / generators_to_use) % 2 == 0 {
|
||||
Variable::CG { commitment, index }
|
||||
} else {
|
||||
Variable::CH { commitment, index }
|
||||
};
|
||||
let res = Variable::CG { commitment, index };
|
||||
*start += 1;
|
||||
res
|
||||
}
|
||||
@@ -202,8 +198,8 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
padded_pow_of_2 <<= 1;
|
||||
}
|
||||
// This may as small as 16, which would create an excessive amount of vector commitments
|
||||
// We set a floor of 1024 rows for bandwidth reasons
|
||||
padded_pow_of_2.max(1024)
|
||||
// We set a floor of 2048 rows for bandwidth reasons
|
||||
padded_pow_of_2.max(2048)
|
||||
};
|
||||
(expected_muls, generators_to_use)
|
||||
}
|
||||
@@ -213,7 +209,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
evrf_public_key: (C::F, C::F),
|
||||
coefficients: usize,
|
||||
ecdh_commitments: &[[(C::F, C::F); 2]],
|
||||
generator_tables: &[GeneratorTable<C::F, C::EmbeddedCurveParameters>],
|
||||
generator_tables: &[&GeneratorTable<C::F, C::EmbeddedCurveParameters>],
|
||||
circuit: &mut Circuit<C>,
|
||||
transcript: &mut impl Transcript,
|
||||
) {
|
||||
@@ -376,8 +372,10 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
let evrf_public_key;
|
||||
let mut actual_coefficients = Vec::with_capacity(coefficients);
|
||||
{
|
||||
// This is checked at a higher level
|
||||
let dlog =
|
||||
ScalarDecomposition::<<C::EmbeddedCurve as Ciphersuite>::F>::new(**evrf_private_key);
|
||||
ScalarDecomposition::<<C::EmbeddedCurve as Ciphersuite>::F>::new(**evrf_private_key)
|
||||
.expect("eVRF private key was zero");
|
||||
let points = Self::transcript_to_points(transcript, coefficients);
|
||||
|
||||
// Start by pushing the discrete logarithm onto the tape
|
||||
@@ -431,7 +429,8 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
}
|
||||
}
|
||||
let dlog =
|
||||
ScalarDecomposition::<<C::EmbeddedCurve as Ciphersuite>::F>::new(ecdh_private_key);
|
||||
ScalarDecomposition::<<C::EmbeddedCurve as Ciphersuite>::F>::new(ecdh_private_key)
|
||||
.expect("ECDH private key was zero");
|
||||
let ecdh_commitment = <C::EmbeddedCurve as Ciphersuite>::generator() * ecdh_private_key;
|
||||
ecdh_commitments.push(ecdh_commitment);
|
||||
ecdh_commitments_xy.last_mut().unwrap()[j] =
|
||||
@@ -471,15 +470,10 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
Self::muls_and_generators_to_use(coefficients, ecdh_public_keys.len());
|
||||
|
||||
let mut vector_commitments =
|
||||
Vec::with_capacity(vector_commitment_tape.len().div_ceil(2 * generators_to_use));
|
||||
for chunk in vector_commitment_tape.chunks(2 * generators_to_use) {
|
||||
Vec::with_capacity(vector_commitment_tape.len().div_ceil(generators_to_use));
|
||||
for chunk in vector_commitment_tape.chunks(generators_to_use) {
|
||||
let g_values = chunk[.. generators_to_use.min(chunk.len())].to_vec().into();
|
||||
let h_values = chunk[generators_to_use.min(chunk.len()) ..].to_vec().into();
|
||||
vector_commitments.push(PedersenVectorCommitment {
|
||||
g_values,
|
||||
h_values,
|
||||
mask: C::F::random(&mut *rng),
|
||||
});
|
||||
vector_commitments.push(PedersenVectorCommitment { g_values, mask: C::F::random(&mut *rng) });
|
||||
}
|
||||
|
||||
vector_commitment_tape.zeroize();
|
||||
@@ -499,7 +493,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
.iter()
|
||||
.map(|commitment| {
|
||||
commitment
|
||||
.commit(generators.g_bold_slice(), generators.h_bold_slice(), generators.h())
|
||||
.commit(generators.g_bold_slice(), generators.h())
|
||||
.ok_or(AcError::NotEnoughGenerators)
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
@@ -518,7 +512,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
evrf_public_key,
|
||||
coefficients,
|
||||
&ecdh_commitments_xy,
|
||||
&generator_tables,
|
||||
&generator_tables.iter().collect::<Vec<_>>(),
|
||||
&mut circuit,
|
||||
&mut transcript,
|
||||
);
|
||||
@@ -543,7 +537,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
let mut agg_weights = Vec::with_capacity(commitments.len());
|
||||
agg_weights.push(C::F::ONE);
|
||||
while agg_weights.len() < commitments.len() {
|
||||
agg_weights.push(transcript.challenge::<C::F>());
|
||||
agg_weights.push(transcript.challenge::<C>());
|
||||
}
|
||||
let mut x = commitments
|
||||
.iter()
|
||||
@@ -554,7 +548,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
// Do a Schnorr PoK for the randomness of the aggregated Pedersen commitment
|
||||
let mut r = C::F::random(&mut *rng);
|
||||
transcript.push_point(generators.h() * r);
|
||||
let c = transcript.challenge::<C::F>();
|
||||
let c = transcript.challenge::<C>();
|
||||
transcript.push_scalar(r + (c * x));
|
||||
r.zeroize();
|
||||
x.zeroize();
|
||||
@@ -615,7 +609,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
let coeffs_vc_variables = dlog_len + ((1 + (2 * coefficients)) * dlog_proof_len);
|
||||
let ecdhs_vc_variables = ((2 * ecdh_public_keys.len()) * dlog_len) +
|
||||
((2 * 2 * ecdh_public_keys.len()) * dlog_proof_len);
|
||||
let vcs = (coeffs_vc_variables + ecdhs_vc_variables).div_ceil(2 * generators_to_use);
|
||||
let vcs = (coeffs_vc_variables + ecdhs_vc_variables).div_ceil(generators_to_use);
|
||||
|
||||
let all_commitments =
|
||||
transcript.read_commitments(vcs, coefficients + ecdh_public_keys.len()).map_err(|_| ())?;
|
||||
@@ -642,7 +636,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::to_xy(evrf_public_key).ok_or(())?,
|
||||
coefficients,
|
||||
&ecdh_keys_xy,
|
||||
&generator_tables,
|
||||
&generator_tables.iter().collect::<Vec<_>>(),
|
||||
&mut circuit,
|
||||
&mut transcript,
|
||||
);
|
||||
@@ -665,7 +659,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
let mut agg_weights = Vec::with_capacity(commitments.len());
|
||||
agg_weights.push(C::F::ONE);
|
||||
while agg_weights.len() < commitments.len() {
|
||||
agg_weights.push(transcript.challenge::<C::F>());
|
||||
agg_weights.push(transcript.challenge::<C>());
|
||||
}
|
||||
|
||||
let sum_points =
|
||||
@@ -677,7 +671,7 @@ impl<C: EvrfCurve> Evrf<C> {
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = transcript.read_point::<C>().map_err(|_| ())?;
|
||||
let c = transcript.challenge::<C::F>();
|
||||
let c = transcript.challenge::<C>();
|
||||
let s = transcript.read_scalar::<C>().map_err(|_| ())?;
|
||||
|
||||
// Doesn't batch verify this as we can't access the internals of the GBP batch verifier
|
||||
|
||||
@@ -15,7 +15,7 @@ use ciphersuite::{
|
||||
};
|
||||
use pasta_curves::{Ep, Eq, Fp, Fq};
|
||||
|
||||
use generalized_bulletproofs::tests::generators;
|
||||
use generalized_bulletproofs::{Generators, tests::generators};
|
||||
use generalized_bulletproofs_ec_gadgets::DiscreteLogParameters;
|
||||
|
||||
use crate::evrf::proof::*;
|
||||
@@ -35,6 +35,9 @@ impl Ciphersuite for Pallas {
|
||||
// This is solely test code so it's fine
|
||||
Self::F::from_uniform_bytes(&Self::H::digest([dst, msg].concat()).into())
|
||||
}
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F {
|
||||
Self::F::from_uniform_bytes(&scalar)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
@@ -52,6 +55,9 @@ impl Ciphersuite for Vesta {
|
||||
// This is solely test code so it's fine
|
||||
Self::F::from_uniform_bytes(&Self::H::digest([dst, msg].concat()).into())
|
||||
}
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F {
|
||||
Self::F::from_uniform_bytes(&scalar)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VestaParams;
|
||||
@@ -68,7 +74,7 @@ impl EvrfCurve for Pallas {
|
||||
}
|
||||
|
||||
fn evrf_proof_test<C: EvrfCurve>() {
|
||||
let generators = generators(1024);
|
||||
let generators = generators(2048);
|
||||
let vesta_private_key = Zeroizing::new(<C::EmbeddedCurve as Ciphersuite>::F::random(&mut OsRng));
|
||||
let ecdh_public_keys = [
|
||||
<C::EmbeddedCurve as Ciphersuite>::G::random(&mut OsRng),
|
||||
@@ -81,7 +87,7 @@ fn evrf_proof_test<C: EvrfCurve>() {
|
||||
println!("Proving time: {:?}", time.elapsed());
|
||||
|
||||
let time = Instant::now();
|
||||
let mut verifier = generators.batch_verifier();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
Evrf::<C>::verify(
|
||||
&mut OsRng,
|
||||
&generators,
|
||||
|
||||
@@ -28,6 +28,10 @@ impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {
|
||||
C::G::generator() * <C as Ciphersuite>::hash_to_F(b"DKG Promotion Test", b"generator")
|
||||
}
|
||||
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F {
|
||||
<C as Ciphersuite>::reduce_512(scalar)
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
<C as Ciphersuite>::hash_to_F(dst, data)
|
||||
}
|
||||
|
||||
@@ -161,7 +161,16 @@ macro_rules! field {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
|
||||
let mut scale_by = $FieldName(Residue::ONE);
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
scale_by = <_>::conditional_select(&scale_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res *= scale_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -242,7 +242,16 @@ impl Mul<Scalar> for Point {
|
||||
res = res.double();
|
||||
}
|
||||
}
|
||||
res += table[usize::from(bits)];
|
||||
|
||||
let mut add_by = Point::identity();
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
add_by = <_>::conditional_select(&add_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res += add_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,19 +3,25 @@ name = "generalized-bulletproofs-circuit-abstraction"
|
||||
version = "0.1.0"
|
||||
description = "An abstraction for arithmetic circuits over Generalized Bulletproofs"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/evrf/circuit-abstraction"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/fcmps/circuit-abstraction"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["bulletproofs", "circuit"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.69"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false }
|
||||
|
||||
generalized-bulletproofs = { path = "../generalized-bulletproofs" }
|
||||
generalized-bulletproofs = { path = "../generalized-bulletproofs", default-features = false }
|
||||
|
||||
[features]
|
||||
std = ["std-shims/std", "zeroize/std", "ciphersuite/std", "generalized-bulletproofs/std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use ciphersuite::{
|
||||
group::ff::{Field, PrimeField},
|
||||
Ciphersuite,
|
||||
};
|
||||
use ciphersuite::{group::ff::Field, Ciphersuite};
|
||||
|
||||
use generalized_bulletproofs::{
|
||||
ScalarVector, PedersenCommitment, PedersenVectorCommitment, ProofGenerators,
|
||||
@@ -26,16 +26,28 @@ pub trait Transcript {
|
||||
///
|
||||
/// It is the caller's responsibility to have properly transcripted all variables prior to
|
||||
/// sampling this challenge.
|
||||
fn challenge<F: PrimeField>(&mut self) -> F;
|
||||
fn challenge<C: Ciphersuite>(&mut self) -> C::F;
|
||||
|
||||
/// Sample a challenge as a byte array.
|
||||
///
|
||||
/// It is the caller's responsibility to have properly transcripted all variables prior to
|
||||
/// sampling this challenge.
|
||||
fn challenge_bytes(&mut self) -> [u8; 64];
|
||||
}
|
||||
impl Transcript for ProverTranscript {
|
||||
fn challenge<F: PrimeField>(&mut self) -> F {
|
||||
self.challenge()
|
||||
fn challenge<C: Ciphersuite>(&mut self) -> C::F {
|
||||
self.challenge::<C>()
|
||||
}
|
||||
fn challenge_bytes(&mut self) -> [u8; 64] {
|
||||
self.challenge_bytes()
|
||||
}
|
||||
}
|
||||
impl Transcript for VerifierTranscript<'_> {
|
||||
fn challenge<F: PrimeField>(&mut self) -> F {
|
||||
self.challenge()
|
||||
fn challenge<C: Ciphersuite>(&mut self) -> C::F {
|
||||
self.challenge::<C>()
|
||||
}
|
||||
fn challenge_bytes(&mut self) -> [u8; 64] {
|
||||
self.challenge_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,7 +76,6 @@ impl<C: Ciphersuite> Circuit<C> {
|
||||
}
|
||||
|
||||
/// Create an instance to prove satisfaction of a circuit with.
|
||||
// TODO: Take the transcript here
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn prove(
|
||||
vector_commitments: Vec<PedersenVectorCommitment<C>>,
|
||||
@@ -78,14 +89,13 @@ impl<C: Ciphersuite> Circuit<C> {
|
||||
}
|
||||
|
||||
/// Create an instance to verify a proof with.
|
||||
// TODO: Take the transcript here
|
||||
pub fn verify() -> Self {
|
||||
Self { muls: 0, constraints: vec![], prover: None }
|
||||
}
|
||||
|
||||
/// Evaluate a linear combination.
|
||||
///
|
||||
/// Yields WL aL + WR aR + WO aO + WCG CG + WCH CH + WV V + c.
|
||||
/// Yields WL aL + WR aR + WO aO + WCG CG + WV V + c.
|
||||
///
|
||||
/// May panic if the linear combination references non-existent terms.
|
||||
///
|
||||
@@ -107,11 +117,6 @@ impl<C: Ciphersuite> Circuit<C> {
|
||||
res += C.g_values[*j] * weight;
|
||||
}
|
||||
}
|
||||
for (WCH, C) in lincomb.WCH().iter().zip(&prover.C) {
|
||||
for (j, weight) in WCH {
|
||||
res += C.h_values[*j] * weight;
|
||||
}
|
||||
}
|
||||
for (index, weight) in lincomb.WV() {
|
||||
res += prover.V[*index].value * weight;
|
||||
}
|
||||
@@ -176,13 +181,13 @@ impl<C: Ciphersuite> Circuit<C> {
|
||||
// We can't deconstruct the witness as it implements Drop (per ZeroizeOnDrop)
|
||||
// Accordingly, we take the values within it and move forward with those
|
||||
let mut aL = vec![];
|
||||
std::mem::swap(&mut prover.aL, &mut aL);
|
||||
core::mem::swap(&mut prover.aL, &mut aL);
|
||||
let mut aR = vec![];
|
||||
std::mem::swap(&mut prover.aR, &mut aR);
|
||||
core::mem::swap(&mut prover.aR, &mut aR);
|
||||
let mut C = vec![];
|
||||
std::mem::swap(&mut prover.C, &mut C);
|
||||
core::mem::swap(&mut prover.C, &mut C);
|
||||
let mut V = vec![];
|
||||
std::mem::swap(&mut prover.V, &mut V);
|
||||
core::mem::swap(&mut prover.V, &mut V);
|
||||
ArithmeticCircuitWitness::new(ScalarVector::from(aL), ScalarVector::from(aR), C, V)
|
||||
})
|
||||
.transpose()?;
|
||||
|
||||
@@ -3,35 +3,39 @@ name = "ec-divisors"
|
||||
version = "0.1.0"
|
||||
description = "A library for calculating elliptic curve divisors"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/evrf/divisors"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/divisors"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["ciphersuite", "ff", "group"]
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
rust-version = "1.69"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
|
||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
subtle = { version = "2", default-features = false, features = ["std"] }
|
||||
ff = { version = "0.13", default-features = false, features = ["std", "bits"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
|
||||
subtle = { version = "2", default-features = false }
|
||||
ff = { version = "0.13", default-features = false, features = ["bits"] }
|
||||
group = { version = "0.13", default-features = false }
|
||||
|
||||
hex = { version = "0.4", optional = true }
|
||||
dalek-ff-group = { path = "../../dalek-ff-group", features = ["std"], optional = true }
|
||||
pasta_curves = { version = "0.5", default-features = false, features = ["bits", "alloc"], optional = true }
|
||||
hex = { version = "0.4", default-features = false, optional = true }
|
||||
dalek-ff-group = { path = "../../dalek-ff-group", default-features = false, optional = true }
|
||||
pasta_curves = { version = "0.5", git = "https://github.com/kayabaNerve/pasta_curves.git", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616", default-features = false, features = ["bits", "alloc"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_core = { version = "0.6", features = ["getrandom"] }
|
||||
|
||||
hex = "0.4"
|
||||
dalek-ff-group = { path = "../../dalek-ff-group", features = ["std"] }
|
||||
pasta_curves = { version = "0.5", default-features = false, features = ["bits", "alloc"] }
|
||||
pasta_curves = { version = "0.5", git = "https://github.com/kayabaNerve/pasta_curves.git", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616", default-features = false, features = ["bits", "alloc"] }
|
||||
|
||||
[features]
|
||||
ed25519 = ["hex", "dalek-ff-group"]
|
||||
std = ["std-shims/std", "zeroize/std", "subtle/std", "ff/std", "dalek-ff-group?/std"]
|
||||
ed25519 = ["hex/alloc", "dalek-ff-group"]
|
||||
pasta = ["pasta_curves"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use subtle::{Choice, ConstantTimeEq, ConstantTimeGreater, ConditionallySelectable};
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
@@ -18,7 +21,7 @@ pub use poly::Poly;
|
||||
mod tests;
|
||||
|
||||
/// A curve usable with this library.
|
||||
pub trait DivisorCurve: Group + ConstantTimeEq + ConditionallySelectable {
|
||||
pub trait DivisorCurve: Group + ConstantTimeEq + ConditionallySelectable + Zeroize {
|
||||
/// An element of the field this curve is defined over.
|
||||
type FieldElement: Zeroize + PrimeField + ConditionallySelectable;
|
||||
|
||||
@@ -54,6 +57,8 @@ pub trait DivisorCurve: Group + ConstantTimeEq + ConditionallySelectable {
|
||||
/// Convert a point to its x and y coordinates.
|
||||
///
|
||||
/// Returns None if passed the point at infinity.
|
||||
///
|
||||
/// This function may run in time variable to if the point is the identity.
|
||||
fn to_xy(point: Self) -> Option<(Self::FieldElement, Self::FieldElement)>;
|
||||
}
|
||||
|
||||
@@ -271,8 +276,16 @@ pub struct ScalarDecomposition<F: Zeroize + PrimeFieldBits> {
|
||||
}
|
||||
|
||||
impl<F: Zeroize + PrimeFieldBits> ScalarDecomposition<F> {
|
||||
/// Decompose a scalar.
|
||||
pub fn new(scalar: F) -> Self {
|
||||
/// Decompose a non-zero scalar.
|
||||
///
|
||||
/// Returns `None` if the scalar is zero.
|
||||
///
|
||||
/// This function is constant time if the scalar is non-zero.
|
||||
pub fn new(scalar: F) -> Option<Self> {
|
||||
if bool::from(scalar.is_zero()) {
|
||||
None?;
|
||||
}
|
||||
|
||||
/*
|
||||
We need the sum of the coefficients to equal F::NUM_BITS. The scalar's bits will be less than
|
||||
F::NUM_BITS. Accordingly, we need to increment the sum of the coefficients without
|
||||
@@ -400,7 +413,12 @@ impl<F: Zeroize + PrimeFieldBits> ScalarDecomposition<F> {
|
||||
}
|
||||
debug_assert!(bool::from(decomposition.iter().sum::<u64>().ct_eq(&num_bits)));
|
||||
|
||||
ScalarDecomposition { scalar, decomposition }
|
||||
Some(ScalarDecomposition { scalar, decomposition })
|
||||
}
|
||||
|
||||
/// The scalar.
|
||||
pub fn scalar(&self) -> &F {
|
||||
&self.scalar
|
||||
}
|
||||
|
||||
/// The decomposition of the scalar.
|
||||
@@ -414,7 +432,7 @@ impl<F: Zeroize + PrimeFieldBits> ScalarDecomposition<F> {
|
||||
///
|
||||
/// This function executes in constant time with regards to the scalar.
|
||||
///
|
||||
/// This function MAY panic if this scalar is zero.
|
||||
/// This function MAY panic if the generator is the point at infinity.
|
||||
pub fn scalar_mul_divisor<C: Zeroize + DivisorCurve<Scalar = F>>(
|
||||
&self,
|
||||
mut generator: C,
|
||||
@@ -430,37 +448,19 @@ impl<F: Zeroize + PrimeFieldBits> ScalarDecomposition<F> {
|
||||
divisor_points[0] = -generator * self.scalar;
|
||||
|
||||
// Write the decomposition
|
||||
let mut write_to: u32 = 1;
|
||||
let mut write_above: u64 = 0;
|
||||
for coefficient in &self.decomposition {
|
||||
let mut coefficient = *coefficient;
|
||||
// Iterate over the maximum amount of iters for this value to be constant time regardless of
|
||||
// any branch prediction algorithms
|
||||
for _ in 0 .. <C::Scalar as PrimeField>::NUM_BITS {
|
||||
// Write the generator to the slot we're supposed to
|
||||
/*
|
||||
Without this loop, we'd increment this dependent on the distribution within the
|
||||
decomposition. If the distribution is bottom-heavy, we won't access the tail of
|
||||
`divisor_points` for a while, risking it being ejected out of the cache (causing a cache
|
||||
miss which may not occur with a top-heavy distribution which quickly moves to the tail).
|
||||
|
||||
This is O(log2(NUM_BITS) ** 3) though, as this the third loop, which is horrific.
|
||||
*/
|
||||
for i in 1 ..= <C::Scalar as PrimeField>::NUM_BITS {
|
||||
divisor_points[i as usize] =
|
||||
<_>::conditional_select(&divisor_points[i as usize], &generator, i.ct_eq(&write_to));
|
||||
}
|
||||
// If the coefficient isn't zero, increment write_to (so we don't overwrite this generator
|
||||
// when it should be there)
|
||||
let coefficient_not_zero = !coefficient.ct_eq(&0);
|
||||
write_to = <_>::conditional_select(&write_to, &(write_to + 1), coefficient_not_zero);
|
||||
// Subtract one from the coefficient, if it's not zero and won't underflow
|
||||
coefficient =
|
||||
<_>::conditional_select(&coefficient, &coefficient.wrapping_sub(1), coefficient_not_zero);
|
||||
// Write the generator to every slot except the slots we have already written to.
|
||||
for i in 1 ..= (<C::Scalar as PrimeField>::NUM_BITS as u64) {
|
||||
divisor_points[i as usize].conditional_assign(&generator, i.ct_gt(&write_above));
|
||||
}
|
||||
|
||||
// Increase the next write start by the coefficient.
|
||||
write_above += coefficient;
|
||||
generator = generator.double();
|
||||
}
|
||||
|
||||
// Create a divisor out of all points except the last point which is solely scratch
|
||||
// Create a divisor out of the points
|
||||
let res = new_divisor(&divisor_points).unwrap();
|
||||
divisor_points.zeroize();
|
||||
res
|
||||
@@ -511,6 +511,7 @@ mod pasta {
|
||||
|
||||
#[cfg(any(test, feature = "ed25519"))]
|
||||
mod ed25519 {
|
||||
use subtle::{Choice, ConditionallySelectable};
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
Group, GroupEncoding,
|
||||
@@ -558,9 +559,13 @@ mod ed25519 {
|
||||
((D * edwards_y_sq) + Self::FieldElement::ONE).invert().unwrap())
|
||||
.sqrt()
|
||||
.unwrap();
|
||||
if u8::from(bool::from(edwards_x.is_odd())) != x_is_odd {
|
||||
edwards_x = -edwards_x;
|
||||
}
|
||||
|
||||
// Negate the x coordinate if the sign doesn't match
|
||||
edwards_x = <_>::conditional_select(
|
||||
&edwards_x,
|
||||
&-edwards_x,
|
||||
edwards_x.is_odd() ^ Choice::from(x_is_odd),
|
||||
);
|
||||
|
||||
// Calculate the x and y coordinates for Wei25519
|
||||
let edwards_y_plus_one = Self::FieldElement::ONE + edwards_y;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use core::ops::{Add, Neg, Sub, Mul, Rem};
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use subtle::{Choice, ConstantTimeEq, ConstantTimeGreater, ConditionallySelectable};
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
@@ -257,7 +258,7 @@ impl<F: From<u64> + Zeroize + PrimeField> Poly<F> {
|
||||
self.zero_coefficient = F::ZERO;
|
||||
|
||||
// Move the x coefficients
|
||||
std::mem::swap(&mut self.yx_coefficients[power_of_y - 1], &mut self.x_coefficients);
|
||||
core::mem::swap(&mut self.yx_coefficients[power_of_y - 1], &mut self.x_coefficients);
|
||||
self.x_coefficients = vec![];
|
||||
|
||||
self
|
||||
@@ -564,7 +565,7 @@ impl<F: From<u64> + Zeroize + PrimeField> Poly<F> {
|
||||
quotient = conditional_select_poly(
|
||||
quotient,
|
||||
// If the dividing coefficient was for y**0 x**0, we return the poly scaled by its inverse
|
||||
self.clone() * denominator_dividing_coefficient_inv,
|
||||
self * denominator_dividing_coefficient_inv,
|
||||
denominator_dividing_coefficient.ct_eq(&CoefficientIndex { y_pow: 0, x_pow: 0 }),
|
||||
);
|
||||
remainder = conditional_select_poly(
|
||||
|
||||
@@ -3,19 +3,25 @@ name = "generalized-bulletproofs-ec-gadgets"
|
||||
version = "0.1.0"
|
||||
description = "Gadgets for working with an embedded Elliptic Curve in a Generalized Bulletproofs circuit"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/evrf/ec-gadgets"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/fcmps/ec-gadgets"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["bulletproofs", "circuit", "divisors"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.69"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
generic-array = { version = "1", default-features = false, features = ["alloc"] }
|
||||
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false }
|
||||
|
||||
generalized-bulletproofs-circuit-abstraction = { path = "../circuit-abstraction" }
|
||||
generalized-bulletproofs-circuit-abstraction = { path = "../circuit-abstraction", default-features = false }
|
||||
|
||||
[features]
|
||||
std = ["std-shims/std", "ciphersuite/std", "generalized-bulletproofs-circuit-abstraction/std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use core::fmt;
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use ciphersuite::{
|
||||
group::ff::{Field, PrimeField, BatchInverter},
|
||||
@@ -10,11 +11,6 @@ use generalized_bulletproofs_circuit_abstraction::*;
|
||||
use crate::*;
|
||||
|
||||
/// Parameters for a discrete logarithm proof.
|
||||
///
|
||||
/// This isn't required to be implemented by the Field/Group/Ciphersuite, solely a struct, to
|
||||
/// enable parameterization of discrete log proofs to the bitlength of the discrete logarithm.
|
||||
/// While that may be F::NUM_BITS, a discrete log proof a for a full scalar, it could also be 64,
|
||||
/// a discrete log proof for a u64 (such as if opening a Pedersen commitment in-circuit).
|
||||
pub trait DiscreteLogParameters {
|
||||
/// The amount of bits used to represent a scalar.
|
||||
type ScalarBits: ArrayLength;
|
||||
@@ -30,8 +26,8 @@ pub trait DiscreteLogParameters {
|
||||
|
||||
/// The amount of y x**i coefficients in a divisor.
|
||||
///
|
||||
/// This is the amount of points in a divisor (the amount of bits in a scalar, plus one) plus
|
||||
/// one, divided by two, minus two.
|
||||
/// This is the amount of points in a divisor (the amount of bits in a scalar, plus one) divided
|
||||
/// by two, minus two.
|
||||
type YxCoefficients: ArrayLength;
|
||||
}
|
||||
|
||||
@@ -106,8 +102,6 @@ pub struct Divisor<Parameters: DiscreteLogParameters> {
|
||||
/// exceeding trivial complexity.
|
||||
pub y: Variable,
|
||||
/// The coefficients for the `y**1 x**i` terms of the polynomial.
|
||||
// This subtraction enforces the divisor to have at least 4 points which is acceptable.
|
||||
// TODO: Double check these constants
|
||||
pub yx: GenericArray<Variable, Parameters::YxCoefficients>,
|
||||
/// The coefficients for the `x**i` terms of the polynomial, skipping x**1.
|
||||
///
|
||||
@@ -324,7 +318,7 @@ pub trait EcDlogGadgets<C: Ciphersuite> {
|
||||
&self,
|
||||
transcript: &mut T,
|
||||
curve: &CurveSpec<C::F>,
|
||||
generators: &[GeneratorTable<C::F, Parameters>],
|
||||
generators: &[&GeneratorTable<C::F, Parameters>],
|
||||
) -> (DiscreteLogChallenge<C::F, Parameters>, Vec<ChallengedGenerator<C::F, Parameters>>);
|
||||
|
||||
/// Prove this point has the specified discrete logarithm over the specified generator.
|
||||
@@ -355,12 +349,14 @@ impl<C: Ciphersuite> EcDlogGadgets<C> for Circuit<C> {
|
||||
&self,
|
||||
transcript: &mut T,
|
||||
curve: &CurveSpec<C::F>,
|
||||
generators: &[GeneratorTable<C::F, Parameters>],
|
||||
generators: &[&GeneratorTable<C::F, Parameters>],
|
||||
) -> (DiscreteLogChallenge<C::F, Parameters>, Vec<ChallengedGenerator<C::F, Parameters>>) {
|
||||
// Get the challenge points
|
||||
// TODO: Implement a proper hash to curve
|
||||
let sign_of_points = transcript.challenge_bytes();
|
||||
let sign_of_point_0 = (sign_of_points[0] & 1) == 1;
|
||||
let sign_of_point_1 = ((sign_of_points[0] >> 1) & 1) == 1;
|
||||
let (c0_x, c0_y) = loop {
|
||||
let c0_x: C::F = transcript.challenge();
|
||||
let c0_x = transcript.challenge::<C>();
|
||||
let Some(c0_y) =
|
||||
Option::<C::F>::from(((c0_x.square() * c0_x) + (curve.a * c0_x) + curve.b).sqrt())
|
||||
else {
|
||||
@@ -368,17 +364,16 @@ impl<C: Ciphersuite> EcDlogGadgets<C> for Circuit<C> {
|
||||
};
|
||||
// Takes the even y coordinate as to not be dependent on whatever root the above sqrt
|
||||
// happens to returns
|
||||
// TODO: Randomly select which to take
|
||||
break (c0_x, if bool::from(c0_y.is_odd()) { -c0_y } else { c0_y });
|
||||
break (c0_x, if bool::from(c0_y.is_odd()) != sign_of_point_0 { -c0_y } else { c0_y });
|
||||
};
|
||||
let (c1_x, c1_y) = loop {
|
||||
let c1_x: C::F = transcript.challenge();
|
||||
let c1_x = transcript.challenge::<C>();
|
||||
let Some(c1_y) =
|
||||
Option::<C::F>::from(((c1_x.square() * c1_x) + (curve.a * c1_x) + curve.b).sqrt())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
break (c1_x, if bool::from(c1_y.is_odd()) { -c1_y } else { c1_y });
|
||||
break (c1_x, if bool::from(c1_y.is_odd()) != sign_of_point_1 { -c1_y } else { c1_y });
|
||||
};
|
||||
|
||||
// mmadd-1998-cmo
|
||||
@@ -483,7 +478,7 @@ impl<C: Ciphersuite> EcDlogGadgets<C> for Circuit<C> {
|
||||
let arg_iter = arg_iter.chain(dlog.iter());
|
||||
for variable in arg_iter {
|
||||
debug_assert!(
|
||||
matches!(variable, Variable::CG { .. } | Variable::CH { .. } | Variable::V(_)),
|
||||
matches!(variable, Variable::CG { .. } | Variable::V(_)),
|
||||
"discrete log proofs requires all arguments belong to commitments",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
|
||||
@@ -17,20 +17,22 @@ rustdoc-args = ["--cfg", "docsrs"]
|
||||
rustversion = "1"
|
||||
hex-literal = { version = "0.4", default-features = false }
|
||||
|
||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||
std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false, optional = true }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
|
||||
subtle = { version = "^2.4", default-features = false, features = ["std"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
|
||||
generic-array = { version = "1", default-features = false }
|
||||
crypto-bigint = { version = "0.5", default-features = false, features = ["zeroize"] }
|
||||
|
||||
dalek-ff-group = { path = "../../dalek-ff-group", version = "0.4", default-features = false }
|
||||
|
||||
blake2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
||||
ec-divisors = { path = "../divisors" }
|
||||
generalized-bulletproofs-ec-gadgets = { path = "../ec-gadgets" }
|
||||
blake2 = { version = "0.10", default-features = false }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false }
|
||||
ec-divisors = { path = "../divisors", default-features = false }
|
||||
generalized-bulletproofs-ec-gadgets = { path = "../ec-gadgets", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = "0.4"
|
||||
@@ -38,3 +40,8 @@ hex = "0.4"
|
||||
rand_core = { version = "0.6", features = ["std"] }
|
||||
|
||||
ff-group-tests = { path = "../../ff-group-tests" }
|
||||
|
||||
[features]
|
||||
alloc = ["std-shims", "zeroize/alloc", "ciphersuite/alloc"]
|
||||
std = ["std-shims/std", "rand_core/std", "zeroize/std", "subtle/std", "blake2/std", "ciphersuite/std", "ec-divisors/std", "generalized-bulletproofs-ec-gadgets/std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -7,7 +7,7 @@ This curve was found via
|
||||
for finding curves (specifically, curve cycles), modified to search for curves
|
||||
whose field is the Ed25519 scalar field (not the Ed25519 field).
|
||||
|
||||
```
|
||||
```ignore
|
||||
p = 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed
|
||||
q = 0x0fffffffffffffffffffffffffffffffe53f4debb78ff96877063f0306eef96b
|
||||
D = -420435
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(any(feature = "alloc", feature = "std"))]
|
||||
use std_shims::io::{self, Read};
|
||||
|
||||
use generic_array::typenum::{Sum, Diff, Quot, U, U1, U2};
|
||||
use ciphersuite::group::{ff::PrimeField, Group};
|
||||
@@ -33,10 +37,29 @@ impl ciphersuite::Ciphersuite for Embedwards25519 {
|
||||
Point::generator()
|
||||
}
|
||||
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F {
|
||||
Scalar::wide_reduce(scalar)
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
use blake2::Digest;
|
||||
Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_slice().try_into().unwrap())
|
||||
}
|
||||
|
||||
// We override the provided impl, which compares against the reserialization, because
|
||||
// we already require canonicity
|
||||
#[cfg(any(feature = "alloc", feature = "std"))]
|
||||
#[allow(non_snake_case)]
|
||||
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
|
||||
use ciphersuite::group::GroupEncoding;
|
||||
|
||||
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
|
||||
reader.read_exact(encoding.as_mut())?;
|
||||
|
||||
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
|
||||
Ok(point)
|
||||
}
|
||||
}
|
||||
|
||||
impl generalized_bulletproofs_ec_gadgets::DiscreteLogParameters for Embedwards25519 {
|
||||
|
||||
@@ -46,7 +46,8 @@ impl ConstantTimeEq for Point {
|
||||
let y1 = self.y * other.z;
|
||||
let y2 = other.y * self.z;
|
||||
|
||||
(self.x.is_zero() & other.x.is_zero()) | (x1.ct_eq(&x2) & y1.ct_eq(&y2))
|
||||
// Both identity or equivalent over their denominators
|
||||
(self.z.is_zero() & other.z.is_zero()) | (x1.ct_eq(&x2) & y1.ct_eq(&y2))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,6 +199,7 @@ impl Group for Point {
|
||||
Point { x: FieldElement::ZERO, y: FieldElement::ONE, z: FieldElement::ZERO }
|
||||
}
|
||||
fn generator() -> Self {
|
||||
// Point with the lowest valid x-coordinate
|
||||
Point {
|
||||
x: FieldElement::from_repr(hex_literal::hex!(
|
||||
"0100000000000000000000000000000000000000000000000000000000000000"
|
||||
@@ -335,8 +337,10 @@ impl GroupEncoding for Point {
|
||||
// If this the identity, set y to 1
|
||||
let y =
|
||||
CtOption::conditional_select(&y, &CtOption::new(FieldElement::ONE, 1.into()), is_identity);
|
||||
// If this the identity, set y to 1 and z to 0 (instead of 1)
|
||||
let z = <_>::conditional_select(&FieldElement::ONE, &FieldElement::ZERO, is_identity);
|
||||
// Create the point if we have a y solution
|
||||
let point = y.map(|y| Point { x, y, z: FieldElement::ONE });
|
||||
let point = y.map(|y| Point { x, y, z });
|
||||
|
||||
let not_negative_zero = !(is_identity & sign);
|
||||
// Only return the point if it isn't -0
|
||||
|
||||
@@ -3,25 +3,27 @@ name = "generalized-bulletproofs"
|
||||
version = "0.1.0"
|
||||
description = "Generalized Bulletproofs"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/evrf/generalized-bulletproofs"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/generalized-bulletproofs"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["ciphersuite", "ff", "group"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.69"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
blake2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
|
||||
multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["std", "batch"] }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
||||
blake2 = { version = "0.10", default-features = false }
|
||||
|
||||
multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["batch"] }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_core = { version = "0.6", features = ["getrandom"] }
|
||||
@@ -31,4 +33,6 @@ transcript = { package = "flexible-transcript", path = "../../transcript", featu
|
||||
ciphersuite = { path = "../../ciphersuite", features = ["ristretto"] }
|
||||
|
||||
[features]
|
||||
tests = []
|
||||
std = ["std-shims/std", "rand_core/std", "zeroize/std", "blake2/std", "multiexp/std", "ciphersuite/std"]
|
||||
tests = ["std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
@@ -20,10 +22,10 @@ pub use crate::lincomb::{Variable, LinComb};
|
||||
/// `aL * aR = aO, WL * aL + WR * aR + WO * aO = WV * V + c`.
|
||||
///
|
||||
/// Generalized Bulletproofs modifies this to
|
||||
/// `aL * aR = aO, WL * aL + WR * aR + WO * aO + WCG * C_G + WCH * C_H = WV * V + c`.
|
||||
/// `aL * aR = aO, WL * aL + WR * aR + WO * aO + WCG * C_G = WV * V + c`.
|
||||
///
|
||||
/// We implement the latter, yet represented (for simplicity) as
|
||||
/// `aL * aR = aO, WL * aL + WR * aR + WO * aO + WCG * C_G + WCH * C_H + WV * V + c = 0`.
|
||||
/// `aL * aR = aO, WL * aL + WR * aR + WO * aO + WCG * C_G + WV * V + c = 0`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ArithmeticCircuitStatement<'a, C: Ciphersuite> {
|
||||
generators: ProofGenerators<'a, C>,
|
||||
@@ -202,16 +204,10 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
if c.g_values.len() > n {
|
||||
Err(AcError::NotEnoughGenerators)?;
|
||||
}
|
||||
if c.h_values.len() > n {
|
||||
Err(AcError::NotEnoughGenerators)?;
|
||||
}
|
||||
// The Pedersen vector commitments internally have n terms
|
||||
while c.g_values.len() < n {
|
||||
c.g_values.0.push(C::F::ZERO);
|
||||
}
|
||||
while c.h_values.len() < n {
|
||||
c.h_values.0.push(C::F::ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
// Check the witness's consistency with the statement
|
||||
@@ -227,12 +223,7 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
}
|
||||
}
|
||||
for (commitment, opening) in self.C.0.iter().zip(witness.c.iter()) {
|
||||
if Some(*commitment) !=
|
||||
opening.commit(
|
||||
self.generators.g_bold_slice(),
|
||||
self.generators.h_bold_slice(),
|
||||
self.generators.h(),
|
||||
)
|
||||
if Some(*commitment) != opening.commit(self.generators.g_bold_slice(), self.generators.h())
|
||||
{
|
||||
Err(AcError::InconsistentWitness)?;
|
||||
}
|
||||
@@ -250,11 +241,6 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
weights.iter().map(|(j, weight)| *weight * c.g_values[*j])
|
||||
}),
|
||||
)
|
||||
.chain(
|
||||
constraint.WCH.iter().zip(&witness.c).flat_map(|(weights, c)| {
|
||||
weights.iter().map(|(j, weight)| *weight * c.h_values[*j])
|
||||
}),
|
||||
)
|
||||
.chain(constraint.WV.iter().map(|(i, weight)| *weight * witness.v[*i].value))
|
||||
.chain(core::iter::once(constraint.c))
|
||||
.sum::<C::F>();
|
||||
@@ -306,8 +292,8 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
transcript.push_point(AI);
|
||||
transcript.push_point(AO);
|
||||
transcript.push_point(S);
|
||||
let y = transcript.challenge();
|
||||
let z = transcript.challenge();
|
||||
let y = transcript.challenge::<C>();
|
||||
let z = transcript.challenge::<C>();
|
||||
let YzChallenges { y_inv, z } = self.yz_challenges(y, z);
|
||||
let y = ScalarVector::powers(y, n);
|
||||
|
||||
@@ -318,7 +304,7 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
// polynomial).
|
||||
|
||||
// ni = n'
|
||||
let ni = 2 * (c + 1);
|
||||
let ni = 2 + (2 * (c / 2));
|
||||
// These indexes are from the Generalized Bulletproofs paper
|
||||
#[rustfmt::skip]
|
||||
let ilr = ni / 2; // 1 if c = 0
|
||||
@@ -379,32 +365,25 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
// r decreasing from n' (skipping jlr)
|
||||
|
||||
let mut cg_weights = Vec::with_capacity(witness.c.len());
|
||||
let mut ch_weights = Vec::with_capacity(witness.c.len());
|
||||
for i in 0 .. witness.c.len() {
|
||||
let mut cg = ScalarVector::new(n);
|
||||
let mut ch = ScalarVector::new(n);
|
||||
for (constraint, z) in self.constraints.iter().zip(&z.0) {
|
||||
if let Some(WCG) = constraint.WCG.get(i) {
|
||||
accumulate_vector(&mut cg, WCG, *z);
|
||||
}
|
||||
if let Some(WCH) = constraint.WCH.get(i) {
|
||||
accumulate_vector(&mut ch, WCH, *z);
|
||||
}
|
||||
}
|
||||
cg_weights.push(cg);
|
||||
ch_weights.push(ch);
|
||||
}
|
||||
|
||||
for (i, (c, (cg_weights, ch_weights))) in
|
||||
witness.c.iter().zip(cg_weights.into_iter().zip(ch_weights)).enumerate()
|
||||
{
|
||||
let i = i + 1;
|
||||
for (mut i, (c, cg_weights)) in witness.c.iter().zip(cg_weights).enumerate() {
|
||||
if i >= ilr {
|
||||
i += 1;
|
||||
}
|
||||
// Because i has skipped ilr, j will skip jlr
|
||||
let j = ni - i;
|
||||
|
||||
l[i] = c.g_values.clone();
|
||||
l[j] = ch_weights * &y_inv;
|
||||
r[j] = cg_weights;
|
||||
r[i] = (c.h_values.clone() * &y) + &r[i];
|
||||
}
|
||||
|
||||
// Multiply them to obtain t
|
||||
@@ -437,7 +416,7 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
transcript.push_point(multiexp(&[(*t, self.generators.g()), (*tau, self.generators.h())]));
|
||||
}
|
||||
|
||||
let x: ScalarVector<C::F> = ScalarVector::powers(transcript.challenge(), t.len());
|
||||
let x: ScalarVector<C::F> = ScalarVector::powers(transcript.challenge::<C>(), t.len());
|
||||
|
||||
let poly_eval = |poly: &[ScalarVector<C::F>], x: &ScalarVector<_>| -> ScalarVector<_> {
|
||||
let mut res = ScalarVector::<C::F>::new(poly[0].0.len());
|
||||
@@ -477,8 +456,11 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
let mut u = (alpha * x[ilr]) + (beta * x[io]) + (rho * x[is]);
|
||||
|
||||
// Incorporate the commitment masks multiplied by the associated power of x
|
||||
for (i, commitment) in witness.c.iter().enumerate() {
|
||||
let i = i + 1;
|
||||
for (mut i, commitment) in witness.c.iter().enumerate() {
|
||||
// If this index is ni / 2, skip it
|
||||
if i >= (ni / 2) {
|
||||
i += 1;
|
||||
}
|
||||
u += x[i] * commitment.mask;
|
||||
}
|
||||
u
|
||||
@@ -498,7 +480,7 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
transcript.push_scalar(tau_x);
|
||||
transcript.push_scalar(u);
|
||||
transcript.push_scalar(t_caret);
|
||||
let ip_x = transcript.challenge();
|
||||
let ip_x = transcript.challenge::<C>();
|
||||
P_terms.push((ip_x * t_caret, self.generators.g()));
|
||||
IpStatement::new(
|
||||
self.generators,
|
||||
@@ -513,16 +495,27 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
}
|
||||
|
||||
/// Verify a proof for this statement.
|
||||
///
|
||||
/// This solely queues the statement for batch verification. The resulting BatchVerifier MUST
|
||||
/// still be verified.
|
||||
///
|
||||
/// If this proof returns an error, the BatchVerifier MUST be assumed corrupted and discarded.
|
||||
pub fn verify<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<C>,
|
||||
transcript: &mut VerifierTranscript,
|
||||
) -> Result<(), AcError> {
|
||||
if verifier.g_bold.len() < self.generators.len() {
|
||||
verifier.g_bold.resize(self.generators.len(), C::F::ZERO);
|
||||
verifier.h_bold.resize(self.generators.len(), C::F::ZERO);
|
||||
verifier.h_sum.resize(self.generators.len(), C::F::ZERO);
|
||||
}
|
||||
|
||||
let n = self.n();
|
||||
let c = self.c();
|
||||
|
||||
let ni = 2 * (c + 1);
|
||||
let ni = 2 + (2 * (c / 2));
|
||||
|
||||
let ilr = ni / 2;
|
||||
let io = ni;
|
||||
@@ -535,8 +528,8 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
let AI = transcript.read_point::<C>().map_err(|_| AcError::IncompleteProof)?;
|
||||
let AO = transcript.read_point::<C>().map_err(|_| AcError::IncompleteProof)?;
|
||||
let S = transcript.read_point::<C>().map_err(|_| AcError::IncompleteProof)?;
|
||||
let y = transcript.challenge();
|
||||
let z = transcript.challenge();
|
||||
let y = transcript.challenge::<C>();
|
||||
let z = transcript.challenge::<C>();
|
||||
let YzChallenges { y_inv, z } = self.yz_challenges(y, z);
|
||||
|
||||
let mut l_weights = ScalarVector::new(n);
|
||||
@@ -559,7 +552,7 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
for _ in 0 .. (t_poly_len - ni - 1) {
|
||||
T_after_ni.push(transcript.read_point::<C>().map_err(|_| AcError::IncompleteProof)?);
|
||||
}
|
||||
let x: ScalarVector<C::F> = ScalarVector::powers(transcript.challenge(), t_poly_len);
|
||||
let x: ScalarVector<C::F> = ScalarVector::powers(transcript.challenge::<C>(), t_poly_len);
|
||||
|
||||
let tau_x = transcript.read_scalar::<C>().map_err(|_| AcError::IncompleteProof)?;
|
||||
let u = transcript.read_scalar::<C>().map_err(|_| AcError::IncompleteProof)?;
|
||||
@@ -624,34 +617,25 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
h_bold_scalars = h_bold_scalars + &(o_weights * verifier_weight);
|
||||
|
||||
let mut cg_weights = Vec::with_capacity(self.C.len());
|
||||
let mut ch_weights = Vec::with_capacity(self.C.len());
|
||||
for i in 0 .. self.C.len() {
|
||||
let mut cg = ScalarVector::new(n);
|
||||
let mut ch = ScalarVector::new(n);
|
||||
for (constraint, z) in self.constraints.iter().zip(&z.0) {
|
||||
if let Some(WCG) = constraint.WCG.get(i) {
|
||||
accumulate_vector(&mut cg, WCG, *z);
|
||||
}
|
||||
if let Some(WCH) = constraint.WCH.get(i) {
|
||||
accumulate_vector(&mut ch, WCH, *z);
|
||||
}
|
||||
}
|
||||
cg_weights.push(cg);
|
||||
ch_weights.push(ch);
|
||||
}
|
||||
|
||||
// Push the terms for C, which increment from 0, and the terms for WC, which decrement from
|
||||
// n'
|
||||
for (i, (C, (WCG, WCH))) in
|
||||
self.C.0.into_iter().zip(cg_weights.into_iter().zip(ch_weights)).enumerate()
|
||||
{
|
||||
let i = i + 1;
|
||||
for (mut i, (C, WCG)) in self.C.0.into_iter().zip(cg_weights).enumerate() {
|
||||
if i >= (ni / 2) {
|
||||
i += 1;
|
||||
}
|
||||
let j = ni - i;
|
||||
verifier.additional.push((x[i], C));
|
||||
h_bold_scalars = h_bold_scalars + &(WCG * x[j]);
|
||||
for (i, scalar) in (WCH * &y_inv * x[j]).0.into_iter().enumerate() {
|
||||
verifier.g_bold[i] += scalar;
|
||||
}
|
||||
}
|
||||
|
||||
// All terms for h_bold here have actually been for h_bold', h_bold * y_inv
|
||||
@@ -666,7 +650,7 @@ impl<'a, C: Ciphersuite> ArithmeticCircuitStatement<'a, C> {
|
||||
|
||||
// Prove for lines 88, 92 with an Inner-Product statement
|
||||
// This inlines Protocol 1, as our IpStatement implements Protocol 2
|
||||
let ip_x = transcript.challenge();
|
||||
let ip_x = transcript.challenge::<C>();
|
||||
// P is amended with this additional term
|
||||
verifier.g += verifier_weight * ip_x * t_caret;
|
||||
IpStatement::new(self.generators, y_inv, ip_x, P::Verifier { verifier_weight })
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use multiexp::multiexp_vartime;
|
||||
use ciphersuite::{group::ff::Field, Ciphersuite};
|
||||
|
||||
@@ -186,7 +188,7 @@ impl<'a, C: Ciphersuite> IpStatement<'a, C> {
|
||||
// Now that we've calculate L, R, transcript them to receive x (26-27)
|
||||
transcript.push_point(L);
|
||||
transcript.push_point(R);
|
||||
let x: C::F = transcript.challenge();
|
||||
let x: C::F = transcript.challenge::<C>();
|
||||
let x_inv = x.invert().unwrap();
|
||||
|
||||
// The prover and verifier now calculate the following (28-31)
|
||||
@@ -269,11 +271,19 @@ impl<'a, C: Ciphersuite> IpStatement<'a, C> {
|
||||
/// This will return Err if there is an error. This will return Ok if the proof was successfully
|
||||
/// queued for batch verification. The caller is required to verify the batch in order to ensure
|
||||
/// the proof is actually correct.
|
||||
///
|
||||
/// If this proof returns an error, the BatchVerifier MUST be assumed corrupted and discarded.
|
||||
pub(crate) fn verify(
|
||||
self,
|
||||
verifier: &mut BatchVerifier<C>,
|
||||
transcript: &mut VerifierTranscript,
|
||||
) -> Result<(), IpError> {
|
||||
if verifier.g_bold.len() < self.generators.len() {
|
||||
verifier.g_bold.resize(self.generators.len(), C::F::ZERO);
|
||||
verifier.h_bold.resize(self.generators.len(), C::F::ZERO);
|
||||
verifier.h_sum.resize(self.generators.len(), C::F::ZERO);
|
||||
}
|
||||
|
||||
let IpStatement { generators, h_bold_weights, u, P } = self;
|
||||
|
||||
// Calculate the discrete log w.r.t. 2 for the amount of generators present
|
||||
@@ -296,7 +306,7 @@ impl<'a, C: Ciphersuite> IpStatement<'a, C> {
|
||||
for _ in 0 .. lr_len {
|
||||
L.push(transcript.read_point::<C>().map_err(|_| IpError::IncompleteProof)?);
|
||||
R.push(transcript.read_point::<C>().map_err(|_| IpError::IncompleteProof)?);
|
||||
xs.push(transcript.challenge());
|
||||
xs.push(transcript.challenge::<C>());
|
||||
}
|
||||
|
||||
// We calculate their inverse in batch
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use core::fmt;
|
||||
use std::collections::HashSet;
|
||||
use std_shims::{vec, vec::Vec, collections::HashSet};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
@@ -70,14 +71,26 @@ pub struct Generators<C: Ciphersuite> {
|
||||
#[must_use]
|
||||
#[derive(Clone)]
|
||||
pub struct BatchVerifier<C: Ciphersuite> {
|
||||
g: C::F,
|
||||
h: C::F,
|
||||
/// The summed scalar for the G generator.
|
||||
pub g: C::F,
|
||||
/// The summed scalar for the G generator.
|
||||
pub h: C::F,
|
||||
|
||||
g_bold: Vec<C::F>,
|
||||
h_bold: Vec<C::F>,
|
||||
h_sum: Vec<C::F>,
|
||||
/// The summed scalars for the G_bold generators.
|
||||
pub g_bold: Vec<C::F>,
|
||||
/// The summed scalars for the H_bold generators.
|
||||
pub h_bold: Vec<C::F>,
|
||||
/// The summed scalars for the sums of all H generators prior to the index.
|
||||
///
|
||||
/// This is not populated with the full set of summed H generators. This is only populated with
|
||||
/// the powers of 2. Accordingly, an index i specifies a scalar for the sum of all H generators
|
||||
/// from H**2**0 ..= H**2**i.
|
||||
pub h_sum: Vec<C::F>,
|
||||
|
||||
additional: Vec<(C::F, C::G)>,
|
||||
/// Additional (non-fixed) points to include in the multiexp.
|
||||
///
|
||||
/// This is used for proof-specific elements.
|
||||
pub additional: Vec<(C::F, C::G)>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> fmt::Debug for Generators<C> {
|
||||
@@ -171,15 +184,15 @@ impl<C: Ciphersuite> Generators<C> {
|
||||
Ok(Generators { g, h, g_bold, h_bold, h_sum })
|
||||
}
|
||||
|
||||
/// Create a BatchVerifier for proofs which use these generators.
|
||||
pub fn batch_verifier(&self) -> BatchVerifier<C> {
|
||||
/// Create a BatchVerifier for proofs which use a consistent set of generators.
|
||||
pub fn batch_verifier() -> BatchVerifier<C> {
|
||||
BatchVerifier {
|
||||
g: C::F::ZERO,
|
||||
h: C::F::ZERO,
|
||||
|
||||
g_bold: vec![C::F::ZERO; self.g_bold.len()],
|
||||
h_bold: vec![C::F::ZERO; self.h_bold.len()],
|
||||
h_sum: vec![C::F::ZERO; self.h_sum.len()],
|
||||
g_bold: vec![],
|
||||
h_bold: vec![],
|
||||
h_sum: vec![],
|
||||
|
||||
additional: Vec::with_capacity(128),
|
||||
}
|
||||
@@ -298,8 +311,6 @@ impl<C: Ciphersuite> PedersenCommitment<C> {
|
||||
pub struct PedersenVectorCommitment<C: Ciphersuite> {
|
||||
/// The values committed to across the `g` (bold) generators.
|
||||
pub g_values: ScalarVector<C::F>,
|
||||
/// The values committed to across the `h` (bold) generators.
|
||||
pub h_values: ScalarVector<C::F>,
|
||||
/// The mask blinding the values committed to.
|
||||
pub mask: C::F,
|
||||
}
|
||||
@@ -309,8 +320,8 @@ impl<C: Ciphersuite> PedersenVectorCommitment<C> {
|
||||
///
|
||||
/// This function returns None if the amount of generators is less than the amount of values
|
||||
/// within the relevant vector.
|
||||
pub fn commit(&self, g_bold: &[C::G], h_bold: &[C::G], h: C::G) -> Option<C::G> {
|
||||
if (g_bold.len() < self.g_values.len()) || (h_bold.len() < self.h_values.len()) {
|
||||
pub fn commit(&self, g_bold: &[C::G], h: C::G) -> Option<C::G> {
|
||||
if g_bold.len() < self.g_values.len() {
|
||||
None?;
|
||||
};
|
||||
|
||||
@@ -318,9 +329,6 @@ impl<C: Ciphersuite> PedersenVectorCommitment<C> {
|
||||
for pair in self.g_values.0.iter().cloned().zip(g_bold.iter().cloned()) {
|
||||
terms.push(pair);
|
||||
}
|
||||
for pair in self.h_values.0.iter().cloned().zip(h_bold.iter().cloned()) {
|
||||
terms.push(pair);
|
||||
}
|
||||
let res = multiexp(&terms);
|
||||
terms.zeroize();
|
||||
Some(res)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use core::ops::{Add, Sub, Mul};
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
@@ -23,13 +24,6 @@ pub enum Variable {
|
||||
/// The index of the variable.
|
||||
index: usize,
|
||||
},
|
||||
/// A variable within a Pedersen vector commitment, committed to with a generator from `h` (bold).
|
||||
CH {
|
||||
/// The commitment being indexed.
|
||||
commitment: usize,
|
||||
/// The index of the variable.
|
||||
index: usize,
|
||||
},
|
||||
/// A variable within a Pedersen commitment.
|
||||
V(usize),
|
||||
}
|
||||
@@ -41,7 +35,7 @@ impl Zeroize for Variable {
|
||||
|
||||
/// A linear combination.
|
||||
///
|
||||
/// Specifically, `WL aL + WR aR + WO aO + WCG C_G + WCH C_H + WV V + c`.
|
||||
/// Specifically, `WL aL + WR aR + WO aO + WCG C_G + WV V + c`.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
#[must_use]
|
||||
pub struct LinComb<F: PrimeField> {
|
||||
@@ -55,7 +49,6 @@ pub struct LinComb<F: PrimeField> {
|
||||
pub(crate) WO: Vec<(usize, F)>,
|
||||
// Sparse representation once within a commitment
|
||||
pub(crate) WCG: Vec<Vec<(usize, F)>>,
|
||||
pub(crate) WCH: Vec<Vec<(usize, F)>>,
|
||||
// Sparse representation of WV
|
||||
pub(crate) WV: Vec<(usize, F)>,
|
||||
pub(crate) c: F,
|
||||
@@ -81,15 +74,9 @@ impl<F: PrimeField> Add<&LinComb<F>> for LinComb<F> {
|
||||
while self.WCG.len() < constraint.WCG.len() {
|
||||
self.WCG.push(vec![]);
|
||||
}
|
||||
while self.WCH.len() < constraint.WCH.len() {
|
||||
self.WCH.push(vec![]);
|
||||
}
|
||||
for (sWC, cWC) in self.WCG.iter_mut().zip(&constraint.WCG) {
|
||||
sWC.extend(cWC);
|
||||
}
|
||||
for (sWC, cWC) in self.WCH.iter_mut().zip(&constraint.WCH) {
|
||||
sWC.extend(cWC);
|
||||
}
|
||||
self.WV.extend(&constraint.WV);
|
||||
self.c += constraint.c;
|
||||
self
|
||||
@@ -110,15 +97,9 @@ impl<F: PrimeField> Sub<&LinComb<F>> for LinComb<F> {
|
||||
while self.WCG.len() < constraint.WCG.len() {
|
||||
self.WCG.push(vec![]);
|
||||
}
|
||||
while self.WCH.len() < constraint.WCH.len() {
|
||||
self.WCH.push(vec![]);
|
||||
}
|
||||
for (sWC, cWC) in self.WCG.iter_mut().zip(&constraint.WCG) {
|
||||
sWC.extend(cWC.iter().map(|(i, weight)| (*i, -*weight)));
|
||||
}
|
||||
for (sWC, cWC) in self.WCH.iter_mut().zip(&constraint.WCH) {
|
||||
sWC.extend(cWC.iter().map(|(i, weight)| (*i, -*weight)));
|
||||
}
|
||||
self.WV.extend(constraint.WV.iter().map(|(i, weight)| (*i, -*weight)));
|
||||
self.c -= constraint.c;
|
||||
self
|
||||
@@ -143,11 +124,6 @@ impl<F: PrimeField> Mul<F> for LinComb<F> {
|
||||
*weight *= scalar;
|
||||
}
|
||||
}
|
||||
for WC in self.WCH.iter_mut() {
|
||||
for (_, weight) in WC {
|
||||
*weight *= scalar;
|
||||
}
|
||||
}
|
||||
for (_, weight) in self.WV.iter_mut() {
|
||||
*weight *= scalar;
|
||||
}
|
||||
@@ -167,7 +143,6 @@ impl<F: PrimeField> LinComb<F> {
|
||||
WR: vec![],
|
||||
WO: vec![],
|
||||
WCG: vec![],
|
||||
WCH: vec![],
|
||||
WV: vec![],
|
||||
c: F::ZERO,
|
||||
}
|
||||
@@ -196,14 +171,6 @@ impl<F: PrimeField> LinComb<F> {
|
||||
}
|
||||
self.WCG[i].push((j, scalar))
|
||||
}
|
||||
Variable::CH { commitment: i, index: j } => {
|
||||
self.highest_c_index = self.highest_c_index.max(Some(i));
|
||||
self.highest_a_index = self.highest_a_index.max(Some(j));
|
||||
while self.WCH.len() <= i {
|
||||
self.WCH.push(vec![]);
|
||||
}
|
||||
self.WCH[i].push((j, scalar))
|
||||
}
|
||||
Variable::V(i) => {
|
||||
self.highest_v_index = self.highest_v_index.max(Some(i));
|
||||
self.WV.push((i, scalar));
|
||||
@@ -238,11 +205,6 @@ impl<F: PrimeField> LinComb<F> {
|
||||
&self.WCG
|
||||
}
|
||||
|
||||
/// View the current weights for CH.
|
||||
pub fn WCH(&self) -> &[Vec<(usize, F)>] {
|
||||
&self.WCH
|
||||
}
|
||||
|
||||
/// View the current weights for V.
|
||||
pub fn WV(&self) -> &[(usize, F)] {
|
||||
&self.WV
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use core::ops::{Index, IndexMut};
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use core::ops::{Index, IndexMut, Add, Sub, Mul};
|
||||
use std_shims::{vec, vec::Vec};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ use rand_core::{RngCore, OsRng};
|
||||
use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto};
|
||||
|
||||
use crate::{
|
||||
ScalarVector, PedersenCommitment, PedersenVectorCommitment,
|
||||
ScalarVector, PedersenCommitment, PedersenVectorCommitment, Generators,
|
||||
transcript::*,
|
||||
arithmetic_circuit_proof::{
|
||||
Variable, LinComb, ArithmeticCircuitStatement, ArithmeticCircuitWitness,
|
||||
@@ -43,7 +43,7 @@ fn test_zero_arithmetic_circuit() {
|
||||
statement.clone().prove(&mut OsRng, &mut transcript, witness).unwrap();
|
||||
transcript.complete()
|
||||
};
|
||||
let mut verifier = generators.batch_verifier();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
|
||||
let mut transcript = VerifierTranscript::new([0; 32], &proof);
|
||||
let verifier_commmitments = transcript.read_commitments(0, 1);
|
||||
@@ -59,14 +59,8 @@ fn test_vector_commitment_arithmetic_circuit() {
|
||||
|
||||
let v1 = <Ristretto as Ciphersuite>::F::random(&mut OsRng);
|
||||
let v2 = <Ristretto as Ciphersuite>::F::random(&mut OsRng);
|
||||
let v3 = <Ristretto as Ciphersuite>::F::random(&mut OsRng);
|
||||
let v4 = <Ristretto as Ciphersuite>::F::random(&mut OsRng);
|
||||
let gamma = <Ristretto as Ciphersuite>::F::random(&mut OsRng);
|
||||
let commitment = (reduced.g_bold(0) * v1) +
|
||||
(reduced.g_bold(1) * v2) +
|
||||
(reduced.h_bold(0) * v3) +
|
||||
(reduced.h_bold(1) * v4) +
|
||||
(generators.h() * gamma);
|
||||
let commitment = (reduced.g_bold(0) * v1) + (reduced.g_bold(1) * v2) + (generators.h() * gamma);
|
||||
let V = vec![];
|
||||
let C = vec![commitment];
|
||||
|
||||
@@ -83,20 +77,14 @@ fn test_vector_commitment_arithmetic_circuit() {
|
||||
vec![LinComb::empty()
|
||||
.term(<Ristretto as Ciphersuite>::F::ONE, Variable::CG { commitment: 0, index: 0 })
|
||||
.term(<Ristretto as Ciphersuite>::F::from(2u64), Variable::CG { commitment: 0, index: 1 })
|
||||
.term(<Ristretto as Ciphersuite>::F::from(3u64), Variable::CH { commitment: 0, index: 0 })
|
||||
.term(<Ristretto as Ciphersuite>::F::from(4u64), Variable::CH { commitment: 0, index: 1 })
|
||||
.constant(-(v1 + (v2 + v2) + (v3 + v3 + v3) + (v4 + v4 + v4 + v4)))],
|
||||
.constant(-(v1 + (v2 + v2)))],
|
||||
commitments.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
let witness = ArithmeticCircuitWitness::<Ristretto>::new(
|
||||
aL,
|
||||
aR,
|
||||
vec![PedersenVectorCommitment {
|
||||
g_values: ScalarVector(vec![v1, v2]),
|
||||
h_values: ScalarVector(vec![v3, v4]),
|
||||
mask: gamma,
|
||||
}],
|
||||
vec![PedersenVectorCommitment { g_values: ScalarVector(vec![v1, v2]), mask: gamma }],
|
||||
vec![],
|
||||
)
|
||||
.unwrap();
|
||||
@@ -105,7 +93,7 @@ fn test_vector_commitment_arithmetic_circuit() {
|
||||
statement.clone().prove(&mut OsRng, &mut transcript, witness).unwrap();
|
||||
transcript.complete()
|
||||
};
|
||||
let mut verifier = generators.batch_verifier();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
|
||||
let mut transcript = VerifierTranscript::new([0; 32], &proof);
|
||||
let verifier_commmitments = transcript.read_commitments(1, 0);
|
||||
@@ -139,13 +127,8 @@ fn fuzz_test_arithmetic_circuit() {
|
||||
while g_values.0.len() < ((OsRng.next_u64() % 8) + 1).try_into().unwrap() {
|
||||
g_values.0.push(<Ristretto as Ciphersuite>::F::random(&mut OsRng));
|
||||
}
|
||||
let mut h_values = ScalarVector(vec![]);
|
||||
while h_values.0.len() < ((OsRng.next_u64() % 8) + 1).try_into().unwrap() {
|
||||
h_values.0.push(<Ristretto as Ciphersuite>::F::random(&mut OsRng));
|
||||
}
|
||||
C.push(PedersenVectorCommitment {
|
||||
g_values,
|
||||
h_values,
|
||||
mask: <Ristretto as Ciphersuite>::F::random(&mut OsRng),
|
||||
});
|
||||
}
|
||||
@@ -193,13 +176,6 @@ fn fuzz_test_arithmetic_circuit() {
|
||||
constraint = constraint.term(weight, Variable::CG { commitment, index });
|
||||
eval += weight * C.g_values[index];
|
||||
}
|
||||
|
||||
for _ in 0 .. (OsRng.next_u64() % 4) {
|
||||
let index = usize::try_from(OsRng.next_u64()).unwrap() % C.h_values.len();
|
||||
let weight = <Ristretto as Ciphersuite>::F::random(&mut OsRng);
|
||||
constraint = constraint.term(weight, Variable::CH { commitment, index });
|
||||
eval += weight * C.h_values[index];
|
||||
}
|
||||
}
|
||||
|
||||
if !V.is_empty() {
|
||||
@@ -218,11 +194,7 @@ fn fuzz_test_arithmetic_circuit() {
|
||||
|
||||
let mut transcript = Transcript::new([0; 32]);
|
||||
let commitments = transcript.write_commitments(
|
||||
C.iter()
|
||||
.map(|C| {
|
||||
C.commit(generators.g_bold_slice(), generators.h_bold_slice(), generators.h()).unwrap()
|
||||
})
|
||||
.collect(),
|
||||
C.iter().map(|C| C.commit(generators.g_bold_slice(), generators.h()).unwrap()).collect(),
|
||||
V.iter().map(|V| V.commit(generators.g(), generators.h())).collect(),
|
||||
);
|
||||
|
||||
@@ -239,7 +211,7 @@ fn fuzz_test_arithmetic_circuit() {
|
||||
statement.clone().prove(&mut OsRng, &mut transcript, witness).unwrap();
|
||||
transcript.complete()
|
||||
};
|
||||
let mut verifier = generators.batch_verifier();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
|
||||
let mut transcript = VerifierTranscript::new([0; 32], &proof);
|
||||
let verifier_commmitments = transcript.read_commitments(C.len(), V.len());
|
||||
|
||||
@@ -8,7 +8,7 @@ use ciphersuite::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ScalarVector, PointVector,
|
||||
ScalarVector, PointVector, Generators,
|
||||
transcript::*,
|
||||
inner_product::{P, IpStatement, IpWitness},
|
||||
tests::generators,
|
||||
@@ -41,7 +41,7 @@ fn test_zero_inner_product() {
|
||||
transcript.complete()
|
||||
};
|
||||
|
||||
let mut verifier = generators.batch_verifier();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
IpStatement::<Ristretto>::new(
|
||||
reduced,
|
||||
ScalarVector(vec![<Ristretto as Ciphersuite>::F::ONE; 1]),
|
||||
@@ -58,7 +58,7 @@ fn test_zero_inner_product() {
|
||||
fn test_inner_product() {
|
||||
// P = sum(g_bold * a, h_bold * b)
|
||||
let generators = generators::<Ristretto>(32);
|
||||
let mut verifier = generators.batch_verifier();
|
||||
let mut verifier = Generators::batch_verifier();
|
||||
for i in [1, 2, 4, 8, 16, 32] {
|
||||
let generators = generators.reduce(i).unwrap();
|
||||
let g = generators.g();
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use std::io;
|
||||
use std_shims::{vec::Vec, io};
|
||||
|
||||
use blake2::{Digest, Blake2b512};
|
||||
|
||||
use ciphersuite::{
|
||||
group::{ff::PrimeField, GroupEncoding},
|
||||
group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
},
|
||||
Ciphersuite,
|
||||
};
|
||||
|
||||
@@ -13,27 +16,11 @@ const SCALAR: u8 = 0;
|
||||
const POINT: u8 = 1;
|
||||
const CHALLENGE: u8 = 2;
|
||||
|
||||
fn challenge<F: PrimeField>(digest: &mut Blake2b512) -> F {
|
||||
// Panic if this is such a wide field, we won't successfully perform a reduction into an unbiased
|
||||
// scalar
|
||||
debug_assert!((F::NUM_BITS + 128) < 512);
|
||||
|
||||
fn challenge<C: Ciphersuite>(digest: &mut Blake2b512) -> C::F {
|
||||
digest.update([CHALLENGE]);
|
||||
let chl = digest.clone().finalize();
|
||||
let chl = digest.clone().finalize().into();
|
||||
|
||||
let mut res = F::ZERO;
|
||||
for (i, mut byte) in chl.iter().cloned().enumerate() {
|
||||
for j in 0 .. 8 {
|
||||
let lsb = byte & 1;
|
||||
let mut bit = F::from(u64::from(lsb));
|
||||
for _ in 0 .. ((i * 8) + j) {
|
||||
bit = bit.double();
|
||||
}
|
||||
res += bit;
|
||||
|
||||
byte >>= 1;
|
||||
}
|
||||
}
|
||||
let res = C::reduce_512(chl);
|
||||
|
||||
// Negligible probability
|
||||
if bool::from(res.is_zero()) {
|
||||
@@ -83,6 +70,8 @@ impl Transcript {
|
||||
}
|
||||
|
||||
/// Push a scalar onto the transcript.
|
||||
///
|
||||
/// The order and layout of this must be constant to the context.
|
||||
pub fn push_scalar(&mut self, scalar: impl PrimeField) {
|
||||
self.digest.update([SCALAR]);
|
||||
let bytes = scalar.to_repr();
|
||||
@@ -91,6 +80,8 @@ impl Transcript {
|
||||
}
|
||||
|
||||
/// Push a point onto the transcript.
|
||||
///
|
||||
/// The order and layout of this must be constant to the context.
|
||||
pub fn push_point(&mut self, point: impl GroupEncoding) {
|
||||
self.digest.update([POINT]);
|
||||
let bytes = point.to_bytes();
|
||||
@@ -104,9 +95,11 @@ impl Transcript {
|
||||
C: Vec<C::G>,
|
||||
V: Vec<C::G>,
|
||||
) -> Commitments<C> {
|
||||
self.digest.update(u32::try_from(C.len()).unwrap().to_le_bytes());
|
||||
for C in &C {
|
||||
self.push_point(*C);
|
||||
}
|
||||
self.digest.update(u32::try_from(V.len()).unwrap().to_le_bytes());
|
||||
for V in &V {
|
||||
self.push_point(*V);
|
||||
}
|
||||
@@ -114,8 +107,14 @@ impl Transcript {
|
||||
}
|
||||
|
||||
/// Sample a challenge.
|
||||
pub fn challenge<F: PrimeField>(&mut self) -> F {
|
||||
challenge(&mut self.digest)
|
||||
pub fn challenge<C: Ciphersuite>(&mut self) -> C::F {
|
||||
challenge::<C>(&mut self.digest)
|
||||
}
|
||||
|
||||
/// Sample a challenge as a byte array.
|
||||
pub fn challenge_bytes(&mut self) -> [u8; 64] {
|
||||
self.digest.update([CHALLENGE]);
|
||||
self.digest.clone().finalize().into()
|
||||
}
|
||||
|
||||
/// Complete a transcript, yielding the fully serialized proof.
|
||||
@@ -139,20 +138,36 @@ impl<'a> VerifierTranscript<'a> {
|
||||
}
|
||||
|
||||
/// Read a scalar from the transcript.
|
||||
///
|
||||
/// The order and layout of this must be constant to the context.
|
||||
pub fn read_scalar<C: Ciphersuite>(&mut self) -> io::Result<C::F> {
|
||||
let scalar = C::read_F(&mut self.transcript)?;
|
||||
// Read the scalar onto the transcript using the serialization present in the transcript
|
||||
self.digest.update([SCALAR]);
|
||||
let bytes = scalar.to_repr();
|
||||
self.digest.update(bytes);
|
||||
let scalar_len = <C::F as PrimeField>::Repr::default().as_ref().len();
|
||||
if self.transcript.len() < scalar_len {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "not enough bytes to read_scalar"))?;
|
||||
}
|
||||
self.digest.update(&self.transcript[.. scalar_len]);
|
||||
|
||||
// Read the actual scalar, where `read_F` ensures its canonically serialized
|
||||
let scalar = C::read_F(&mut self.transcript)?;
|
||||
Ok(scalar)
|
||||
}
|
||||
|
||||
/// Read a point from the transcript.
|
||||
///
|
||||
/// The order and layout of this must be constant to the context.
|
||||
pub fn read_point<C: Ciphersuite>(&mut self) -> io::Result<C::G> {
|
||||
let point = C::read_G(&mut self.transcript)?;
|
||||
// Read the point onto the transcript using the serialization present in the transcript
|
||||
self.digest.update([POINT]);
|
||||
let bytes = point.to_bytes();
|
||||
self.digest.update(bytes);
|
||||
let point_len = <C::G as GroupEncoding>::Repr::default().as_ref().len();
|
||||
if self.transcript.len() < point_len {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "not enough bytes to read_point"))?;
|
||||
}
|
||||
self.digest.update(&self.transcript[.. point_len]);
|
||||
|
||||
// Read the actual point, where `read_G` ensures its canonically serialized
|
||||
let point = C::read_G(&mut self.transcript)?;
|
||||
Ok(point)
|
||||
}
|
||||
|
||||
@@ -165,10 +180,12 @@ impl<'a> VerifierTranscript<'a> {
|
||||
C: usize,
|
||||
V: usize,
|
||||
) -> io::Result<Commitments<C>> {
|
||||
self.digest.update(u32::try_from(C).unwrap().to_le_bytes());
|
||||
let mut C_vec = Vec::with_capacity(C);
|
||||
for _ in 0 .. C {
|
||||
C_vec.push(self.read_point::<C>()?);
|
||||
}
|
||||
self.digest.update(u32::try_from(V).unwrap().to_le_bytes());
|
||||
let mut V_vec = Vec::with_capacity(V);
|
||||
for _ in 0 .. V {
|
||||
V_vec.push(self.read_point::<C>()?);
|
||||
@@ -177,11 +194,17 @@ impl<'a> VerifierTranscript<'a> {
|
||||
}
|
||||
|
||||
/// Sample a challenge.
|
||||
pub fn challenge<F: PrimeField>(&mut self) -> F {
|
||||
challenge(&mut self.digest)
|
||||
pub fn challenge<C: Ciphersuite>(&mut self) -> C::F {
|
||||
challenge::<C>(&mut self.digest)
|
||||
}
|
||||
|
||||
/// Complete the transcript, returning the advanced slice.
|
||||
/// Sample a challenge as a byte array.
|
||||
pub fn challenge_bytes(&mut self) -> [u8; 64] {
|
||||
self.digest.update([CHALLENGE]);
|
||||
self.digest.clone().finalize().into()
|
||||
}
|
||||
|
||||
/// Complete the transcript transcript, yielding what remains.
|
||||
pub fn complete(self) -> &'a [u8] {
|
||||
self.transcript
|
||||
}
|
||||
|
||||
@@ -17,20 +17,22 @@ rustdoc-args = ["--cfg", "docsrs"]
|
||||
rustversion = "1"
|
||||
hex-literal = { version = "0.4", default-features = false }
|
||||
|
||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||
std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false, optional = true }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
|
||||
subtle = { version = "^2.4", default-features = false, features = ["std"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
|
||||
generic-array = { version = "0.14", default-features = false }
|
||||
crypto-bigint = { version = "0.5", default-features = false, features = ["zeroize"] }
|
||||
|
||||
k256 = { version = "0.13", default-features = false, features = ["arithmetic"] }
|
||||
|
||||
blake2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false, features = ["std"] }
|
||||
ec-divisors = { path = "../divisors" }
|
||||
generalized-bulletproofs-ec-gadgets = { path = "../ec-gadgets" }
|
||||
blake2 = { version = "0.10", default-features = false }
|
||||
ciphersuite = { path = "../../ciphersuite", version = "0.4", default-features = false }
|
||||
ec-divisors = { path = "../divisors", default-features = false }
|
||||
generalized-bulletproofs-ec-gadgets = { path = "../ec-gadgets", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = "0.4"
|
||||
@@ -38,3 +40,8 @@ hex = "0.4"
|
||||
rand_core = { version = "0.6", features = ["std"] }
|
||||
|
||||
ff-group-tests = { path = "../../ff-group-tests" }
|
||||
|
||||
[features]
|
||||
alloc = ["std-shims", "zeroize/alloc", "ciphersuite/alloc"]
|
||||
std = ["std-shims/std", "rand_core/std", "zeroize/std", "subtle/std", "blake2/std", "ciphersuite/std", "ec-divisors/std", "generalized-bulletproofs-ec-gadgets/std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(any(feature = "alloc", feature = "std"))]
|
||||
use std_shims::io::{self, Read};
|
||||
|
||||
use generic_array::typenum::{Sum, Diff, Quot, U, U1, U2};
|
||||
use ciphersuite::group::{ff::PrimeField, Group};
|
||||
@@ -33,10 +37,29 @@ impl ciphersuite::Ciphersuite for Secq256k1 {
|
||||
Point::generator()
|
||||
}
|
||||
|
||||
fn reduce_512(scalar: [u8; 64]) -> Self::F {
|
||||
Scalar::wide_reduce(scalar)
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
use blake2::Digest;
|
||||
Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_slice().try_into().unwrap())
|
||||
}
|
||||
|
||||
// We override the provided impl, which compares against the reserialization, because
|
||||
// we already require canonicity
|
||||
#[cfg(any(feature = "alloc", feature = "std"))]
|
||||
#[allow(non_snake_case)]
|
||||
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
|
||||
use ciphersuite::group::GroupEncoding;
|
||||
|
||||
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
|
||||
reader.read_exact(encoding.as_mut())?;
|
||||
|
||||
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
|
||||
Ok(point)
|
||||
}
|
||||
}
|
||||
|
||||
impl generalized_bulletproofs_ec_gadgets::DiscreteLogParameters for Secq256k1 {
|
||||
|
||||
@@ -40,7 +40,8 @@ impl ConstantTimeEq for Point {
|
||||
let y1 = self.y * other.z;
|
||||
let y2 = other.y * self.z;
|
||||
|
||||
(self.x.is_zero() & other.x.is_zero()) | (x1.ct_eq(&x2) & y1.ct_eq(&y2))
|
||||
// Identity or equivalent
|
||||
(self.z.is_zero() & other.z.is_zero()) | (x1.ct_eq(&x2) & y1.ct_eq(&y2))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,6 +193,7 @@ impl Group for Point {
|
||||
Point { x: FieldElement::ZERO, y: FieldElement::ONE, z: FieldElement::ZERO }
|
||||
}
|
||||
fn generator() -> Self {
|
||||
// Point with the lowest valid x-coordinate
|
||||
Point {
|
||||
x: FieldElement::from_repr(
|
||||
hex_literal::hex!("0000000000000000000000000000000000000000000000000000000000000001")
|
||||
@@ -334,8 +336,10 @@ impl GroupEncoding for Point {
|
||||
// If this the identity, set y to 1
|
||||
let y =
|
||||
CtOption::conditional_select(&y, &CtOption::new(FieldElement::ONE, 1.into()), is_identity);
|
||||
// If this the identity, set y to 1 and z to 0 (instead of 1)
|
||||
let z = <_>::conditional_select(&FieldElement::ONE, &FieldElement::ZERO, is_identity);
|
||||
// Create the point if we have a y solution
|
||||
let point = y.map(|y| Point { x, y, z: FieldElement::ONE });
|
||||
let point = y.map(|y| Point { x, y, z });
|
||||
|
||||
let not_negative_zero = !(is_identity & sign);
|
||||
// Only return the point if it isn't -0 and the sign byte wasn't malleated
|
||||
|
||||
@@ -30,4 +30,4 @@ p256 = { version = "^0.13.1", default-features = false, features = ["std", "arit
|
||||
|
||||
bls12_381 = "0.8"
|
||||
|
||||
pasta_curves = "0.5"
|
||||
pasta_curves = { git = "https://github.com/kayabaNerve/pasta_curves", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616" }
|
||||
|
||||
@@ -154,18 +154,20 @@ pub fn test_group<R: RngCore, G: Group>(rng: &mut R) {
|
||||
|
||||
/// Test encoding and decoding of group elements.
|
||||
pub fn test_encoding<G: PrimeGroup>() {
|
||||
let test = |point: G, msg| {
|
||||
let test = |point: G, msg| -> G {
|
||||
let bytes = point.to_bytes();
|
||||
let mut repr = G::Repr::default();
|
||||
repr.as_mut().copy_from_slice(bytes.as_ref());
|
||||
assert_eq!(point, G::from_bytes(&repr).unwrap(), "{msg} couldn't be encoded and decoded");
|
||||
let decoded = G::from_bytes(&repr).unwrap();
|
||||
assert_eq!(point, decoded, "{msg} couldn't be encoded and decoded");
|
||||
assert_eq!(
|
||||
point,
|
||||
G::from_bytes_unchecked(&repr).unwrap(),
|
||||
"{msg} couldn't be encoded and decoded",
|
||||
);
|
||||
decoded
|
||||
};
|
||||
test(G::identity(), "identity");
|
||||
assert!(bool::from(test(G::identity(), "identity").is_identity()));
|
||||
test(G::generator(), "generator");
|
||||
test(G::generator() + G::generator(), "(generator * 2)");
|
||||
}
|
||||
|
||||
@@ -121,7 +121,10 @@ license-files = [
|
||||
multiple-versions = "warn"
|
||||
wildcards = "warn"
|
||||
highlight = "all"
|
||||
deny = [ { name = "serde_derive", version = ">=1.0.172, <1.0.185" } ]
|
||||
deny = [
|
||||
{ name = "serde_derive", version = ">=1.0.172, <1.0.185" },
|
||||
{ name = "hashbrown", version = ">=0.15" },
|
||||
]
|
||||
|
||||
[sources]
|
||||
unknown-registry = "deny"
|
||||
@@ -132,5 +135,4 @@ allow-git = [
|
||||
"https://github.com/serai-dex/substrate-bip39",
|
||||
"https://github.com/serai-dex/substrate",
|
||||
"https://github.com/kayabaNerve/pasta_curves",
|
||||
"https://github.com/alloy-rs/core",
|
||||
]
|
||||
|
||||
@@ -5,20 +5,20 @@ GEM
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
bigdecimal (3.1.8)
|
||||
colorator (1.1.0)
|
||||
concurrent-ruby (1.3.3)
|
||||
concurrent-ruby (1.3.4)
|
||||
em-websocket (0.5.3)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0)
|
||||
eventmachine (1.2.7)
|
||||
ffi (1.17.0-x86_64-linux-gnu)
|
||||
forwardable-extended (2.6.0)
|
||||
google-protobuf (4.27.3-x86_64-linux)
|
||||
google-protobuf (4.28.2-x86_64-linux)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
http_parser.rb (0.8.0)
|
||||
i18n (1.14.5)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (4.3.3)
|
||||
jekyll (4.3.4)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
@@ -63,17 +63,15 @@ GEM
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.11.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.3.4)
|
||||
strscan
|
||||
rouge (4.3.0)
|
||||
rexml (3.3.7)
|
||||
rouge (4.4.0)
|
||||
safe_yaml (1.0.5)
|
||||
sass-embedded (1.77.8-x86_64-linux-gnu)
|
||||
google-protobuf (~> 4.26)
|
||||
strscan (3.1.0)
|
||||
sass-embedded (1.79.3-x86_64-linux-gnu)
|
||||
google-protobuf (~> 4.27)
|
||||
terminal-table (3.0.2)
|
||||
unicode-display_width (>= 1.1.1, < 3)
|
||||
unicode-display_width (2.5.0)
|
||||
webrick (1.8.1)
|
||||
unicode-display_width (2.6.0)
|
||||
webrick (1.8.2)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
|
||||
@@ -6,7 +6,7 @@ pub(crate) use std::{
|
||||
pub(crate) use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
pub(crate) use schnorr_signatures::SchnorrSignature;
|
||||
|
||||
pub(crate) use serai_primitives::NetworkId;
|
||||
pub(crate) use serai_primitives::ExternalNetworkId;
|
||||
|
||||
pub(crate) use tokio::{
|
||||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
@@ -197,10 +197,7 @@ async fn main() {
|
||||
KEYS.write().unwrap().insert(service, key);
|
||||
let mut queues = QUEUES.write().unwrap();
|
||||
if service == Service::Coordinator {
|
||||
for network in serai_primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
for network in serai_primitives::EXTERNAL_NETWORKS {
|
||||
queues.insert(
|
||||
(service, Service::Processor(network)),
|
||||
RwLock::new(Queue(db.clone(), service, Service::Processor(network))),
|
||||
@@ -214,17 +211,13 @@ async fn main() {
|
||||
}
|
||||
};
|
||||
|
||||
// Make queues for each NetworkId, other than Serai
|
||||
for network in serai_primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
// Make queues for each ExternalNetworkId
|
||||
for network in serai_primitives::EXTERNAL_NETWORKS {
|
||||
// Use a match so we error if the list of NetworkIds changes
|
||||
let Some(key) = read_key(match network {
|
||||
NetworkId::Serai => unreachable!(),
|
||||
NetworkId::Bitcoin => "BITCOIN_KEY",
|
||||
NetworkId::Ethereum => "ETHEREUM_KEY",
|
||||
NetworkId::Monero => "MONERO_KEY",
|
||||
ExternalNetworkId::Bitcoin => "BITCOIN_KEY",
|
||||
ExternalNetworkId::Ethereum => "ETHEREUM_KEY",
|
||||
ExternalNetworkId::Monero => "MONERO_KEY",
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
@@ -3,11 +3,11 @@ use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_primitives::NetworkId;
|
||||
use serai_primitives::ExternalNetworkId;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Service {
|
||||
Processor(NetworkId),
|
||||
Processor(ExternalNetworkId),
|
||||
Coordinator,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
[package]
|
||||
name = "polyseed"
|
||||
version = "0.1.0"
|
||||
description = "Rust implementation of Polyseed"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/polyseed"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
thiserror = { version = "2", default-features = false }
|
||||
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror/std",
|
||||
|
||||
"subtle/std",
|
||||
"zeroize/std",
|
||||
"rand_core/std",
|
||||
|
||||
"sha3/std",
|
||||
"pbkdf2/std",
|
||||
]
|
||||
default = ["std"]
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2024 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,11 +0,0 @@
|
||||
# Polyseed
|
||||
|
||||
Rust implementation of [Polyseed](https://github.com/tevador/polyseed).
|
||||
|
||||
This library is usable under no-std when the `std` feature (on by default) is
|
||||
disabled.
|
||||
|
||||
### Cargo Features
|
||||
|
||||
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||
implementations).
|
||||
@@ -1,472 +0,0 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use core::fmt;
|
||||
use std_shims::{sync::LazyLock, string::String, collections::HashMap};
|
||||
#[cfg(feature = "std")]
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use subtle::ConstantTimeEq;
|
||||
use zeroize::{Zeroize, Zeroizing, ZeroizeOnDrop};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use sha3::Sha3_256;
|
||||
use pbkdf2::pbkdf2_hmac;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// Features
|
||||
const FEATURE_BITS: u8 = 5;
|
||||
#[allow(dead_code)]
|
||||
const INTERNAL_FEATURES: u8 = 2;
|
||||
const USER_FEATURES: u8 = 3;
|
||||
|
||||
const USER_FEATURES_MASK: u8 = (1 << USER_FEATURES) - 1;
|
||||
const ENCRYPTED_MASK: u8 = 1 << 4;
|
||||
const RESERVED_FEATURES_MASK: u8 = ((1 << FEATURE_BITS) - 1) ^ ENCRYPTED_MASK;
|
||||
|
||||
fn user_features(features: u8) -> u8 {
|
||||
features & USER_FEATURES_MASK
|
||||
}
|
||||
|
||||
fn polyseed_features_supported(features: u8) -> bool {
|
||||
(features & RESERVED_FEATURES_MASK) == 0
|
||||
}
|
||||
|
||||
// Dates
|
||||
const DATE_BITS: u8 = 10;
|
||||
const DATE_MASK: u16 = (1u16 << DATE_BITS) - 1;
|
||||
const POLYSEED_EPOCH: u64 = 1635768000; // 1st November 2021 12:00 UTC
|
||||
const TIME_STEP: u64 = 2629746; // 30.436875 days = 1/12 of the Gregorian year
|
||||
|
||||
// After ~85 years, this will roll over.
|
||||
fn birthday_encode(time: u64) -> u16 {
|
||||
u16::try_from((time.saturating_sub(POLYSEED_EPOCH) / TIME_STEP) & u64::from(DATE_MASK))
|
||||
.expect("value masked by 2**10 - 1 didn't fit into a u16")
|
||||
}
|
||||
|
||||
fn birthday_decode(birthday: u16) -> u64 {
|
||||
POLYSEED_EPOCH + (u64::from(birthday) * TIME_STEP)
|
||||
}
|
||||
|
||||
// Polyseed parameters
|
||||
const SECRET_BITS: usize = 150;
|
||||
|
||||
const BITS_PER_BYTE: usize = 8;
|
||||
const SECRET_SIZE: usize = SECRET_BITS.div_ceil(BITS_PER_BYTE); // 19
|
||||
const CLEAR_BITS: usize = (SECRET_SIZE * BITS_PER_BYTE) - SECRET_BITS; // 2
|
||||
|
||||
// Polyseed calls this CLEAR_MASK and has a very complicated formula for this fundamental
|
||||
// equivalency
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
const LAST_BYTE_SECRET_BITS_MASK: u8 = ((1 << (BITS_PER_BYTE - CLEAR_BITS)) - 1) as u8;
|
||||
|
||||
const SECRET_BITS_PER_WORD: usize = 10;
|
||||
|
||||
// The amount of words in a seed.
|
||||
const POLYSEED_LENGTH: usize = 16;
|
||||
// Amount of characters each word must have if trimmed
|
||||
pub(crate) const PREFIX_LEN: usize = 4;
|
||||
|
||||
const POLY_NUM_CHECK_DIGITS: usize = 1;
|
||||
const DATA_WORDS: usize = POLYSEED_LENGTH - POLY_NUM_CHECK_DIGITS;
|
||||
|
||||
// Polynomial
|
||||
const GF_BITS: usize = 11;
|
||||
const POLYSEED_MUL2_TABLE: [u16; 8] = [5, 7, 1, 3, 13, 15, 9, 11];
|
||||
|
||||
type Poly = [u16; POLYSEED_LENGTH];
|
||||
|
||||
fn elem_mul2(x: u16) -> u16 {
|
||||
if x < 1024 {
|
||||
return 2 * x;
|
||||
}
|
||||
POLYSEED_MUL2_TABLE[usize::from(x % 8)] + (16 * ((x - 1024) / 8))
|
||||
}
|
||||
|
||||
fn poly_eval(poly: &Poly) -> u16 {
|
||||
// Horner's method at x = 2
|
||||
let mut result = poly[POLYSEED_LENGTH - 1];
|
||||
for i in (0 .. (POLYSEED_LENGTH - 1)).rev() {
|
||||
result = elem_mul2(result) ^ poly[i];
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// Key gen parameters
|
||||
const POLYSEED_SALT: &[u8] = b"POLYSEED key";
|
||||
const POLYSEED_KEYGEN_ITERATIONS: u32 = 10000;
|
||||
|
||||
// Polyseed technically supports multiple coins, and the value for Monero is 0
|
||||
// See: https://github.com/tevador/polyseed/blob/dfb05d8edb682b0e8f743b1b70c9131712ff4157
|
||||
// /include/polyseed.h#L57
|
||||
const COIN: u16 = 0;
|
||||
|
||||
/// An error when working with a Polyseed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, thiserror::Error)]
|
||||
pub enum PolyseedError {
|
||||
/// The seed was invalid.
|
||||
#[error("invalid seed")]
|
||||
InvalidSeed,
|
||||
/// The entropy was invalid.
|
||||
#[error("invalid entropy")]
|
||||
InvalidEntropy,
|
||||
/// The checksum did not match the data.
|
||||
#[error("invalid checksum")]
|
||||
InvalidChecksum,
|
||||
/// Unsupported feature bits were set.
|
||||
#[error("unsupported features")]
|
||||
UnsupportedFeatures,
|
||||
}
|
||||
|
||||
/// Language options for Polyseed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Zeroize)]
|
||||
pub enum Language {
|
||||
/// English language option.
|
||||
English,
|
||||
/// Spanish language option.
|
||||
Spanish,
|
||||
/// French language option.
|
||||
French,
|
||||
/// Italian language option.
|
||||
Italian,
|
||||
/// Japanese language option.
|
||||
Japanese,
|
||||
/// Korean language option.
|
||||
Korean,
|
||||
/// Czech language option.
|
||||
Czech,
|
||||
/// Portuguese language option.
|
||||
Portuguese,
|
||||
/// Simplified Chinese language option.
|
||||
ChineseSimplified,
|
||||
/// Traditional Chinese language option.
|
||||
ChineseTraditional,
|
||||
}
|
||||
|
||||
struct WordList {
|
||||
words: &'static [&'static str],
|
||||
has_prefix: bool,
|
||||
has_accent: bool,
|
||||
}
|
||||
|
||||
impl WordList {
|
||||
fn new(words: &'static [&'static str], has_prefix: bool, has_accent: bool) -> WordList {
|
||||
let res = WordList { words, has_prefix, has_accent };
|
||||
// This is needed for a later unwrap to not fails
|
||||
assert!(words.len() < usize::from(u16::MAX));
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
static LANGUAGES: LazyLock<HashMap<Language, WordList>> = LazyLock::new(|| {
|
||||
HashMap::from([
|
||||
(Language::Czech, WordList::new(include!("./words/cs.rs"), true, false)),
|
||||
(Language::French, WordList::new(include!("./words/fr.rs"), true, true)),
|
||||
(Language::Korean, WordList::new(include!("./words/ko.rs"), false, false)),
|
||||
(Language::English, WordList::new(include!("./words/en.rs"), true, false)),
|
||||
(Language::Italian, WordList::new(include!("./words/it.rs"), true, false)),
|
||||
(Language::Spanish, WordList::new(include!("./words/es.rs"), true, true)),
|
||||
(Language::Japanese, WordList::new(include!("./words/ja.rs"), false, false)),
|
||||
(Language::Portuguese, WordList::new(include!("./words/pt.rs"), true, false)),
|
||||
(
|
||||
Language::ChineseSimplified,
|
||||
WordList::new(include!("./words/zh_simplified.rs"), false, false),
|
||||
),
|
||||
(
|
||||
Language::ChineseTraditional,
|
||||
WordList::new(include!("./words/zh_traditional.rs"), false, false),
|
||||
),
|
||||
])
|
||||
});
|
||||
|
||||
/// A Polyseed.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Polyseed {
|
||||
language: Language,
|
||||
features: u8,
|
||||
birthday: u16,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
checksum: u16,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Polyseed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Polyseed").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
fn valid_entropy(entropy: &Zeroizing<[u8; 32]>) -> bool {
|
||||
// Last byte of the entropy should only use certain bits
|
||||
let mut res =
|
||||
entropy[SECRET_SIZE - 1].ct_eq(&(entropy[SECRET_SIZE - 1] & LAST_BYTE_SECRET_BITS_MASK));
|
||||
// Last 13 bytes of the buffer should be unused
|
||||
for b in SECRET_SIZE .. entropy.len() {
|
||||
res &= entropy[b].ct_eq(&0);
|
||||
}
|
||||
res.into()
|
||||
}
|
||||
|
||||
impl Polyseed {
|
||||
// TODO: Clean this
|
||||
fn to_poly(&self) -> Poly {
|
||||
let mut extra_bits = u32::from(FEATURE_BITS + DATE_BITS);
|
||||
let extra_val = (u16::from(self.features) << DATE_BITS) | self.birthday;
|
||||
|
||||
let mut entropy_idx = 0;
|
||||
let mut secret_bits = BITS_PER_BYTE;
|
||||
let mut seed_rem_bits = SECRET_BITS - BITS_PER_BYTE;
|
||||
|
||||
let mut poly = [0; POLYSEED_LENGTH];
|
||||
for i in 0 .. DATA_WORDS {
|
||||
extra_bits -= 1;
|
||||
|
||||
let mut word_bits = 0;
|
||||
let mut word_val = 0;
|
||||
while word_bits < SECRET_BITS_PER_WORD {
|
||||
if secret_bits == 0 {
|
||||
entropy_idx += 1;
|
||||
secret_bits = seed_rem_bits.min(BITS_PER_BYTE);
|
||||
seed_rem_bits -= secret_bits;
|
||||
}
|
||||
let chunk_bits = secret_bits.min(SECRET_BITS_PER_WORD - word_bits);
|
||||
secret_bits -= chunk_bits;
|
||||
word_bits += chunk_bits;
|
||||
word_val <<= chunk_bits;
|
||||
word_val |=
|
||||
(u16::from(self.entropy[entropy_idx]) >> secret_bits) & ((1u16 << chunk_bits) - 1);
|
||||
}
|
||||
|
||||
word_val <<= 1;
|
||||
word_val |= (extra_val >> extra_bits) & 1;
|
||||
poly[POLY_NUM_CHECK_DIGITS + i] = word_val;
|
||||
}
|
||||
|
||||
poly
|
||||
}
|
||||
|
||||
fn from_internal(
|
||||
language: Language,
|
||||
masked_features: u8,
|
||||
encoded_birthday: u16,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
) -> Result<Polyseed, PolyseedError> {
|
||||
if !polyseed_features_supported(masked_features) {
|
||||
Err(PolyseedError::UnsupportedFeatures)?;
|
||||
}
|
||||
|
||||
if !valid_entropy(&entropy) {
|
||||
Err(PolyseedError::InvalidEntropy)?;
|
||||
}
|
||||
|
||||
let mut res = Polyseed {
|
||||
language,
|
||||
birthday: encoded_birthday,
|
||||
features: masked_features,
|
||||
entropy,
|
||||
checksum: 0,
|
||||
};
|
||||
res.checksum = poly_eval(&res.to_poly());
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed` with specific internals.
|
||||
///
|
||||
/// `birthday` is defined in seconds since the epoch.
|
||||
pub fn from(
|
||||
language: Language,
|
||||
features: u8,
|
||||
birthday: u64,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
) -> Result<Polyseed, PolyseedError> {
|
||||
Self::from_internal(language, user_features(features), birthday_encode(birthday), entropy)
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed`.
|
||||
///
|
||||
/// This uses the system's time for the birthday, if available, else 0.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, language: Language) -> Polyseed {
|
||||
// Get the birthday
|
||||
#[cfg(feature = "std")]
|
||||
let birthday =
|
||||
SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or(core::time::Duration::ZERO).as_secs();
|
||||
#[cfg(not(feature = "std"))]
|
||||
let birthday = 0;
|
||||
|
||||
// Derive entropy
|
||||
let mut entropy = Zeroizing::new([0; 32]);
|
||||
rng.fill_bytes(entropy.as_mut());
|
||||
entropy[SECRET_SIZE ..].fill(0);
|
||||
entropy[SECRET_SIZE - 1] &= LAST_BYTE_SECRET_BITS_MASK;
|
||||
|
||||
Self::from(language, 0, birthday, entropy).unwrap()
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed` from a String.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn from_string(lang: Language, seed: Zeroizing<String>) -> Result<Polyseed, PolyseedError> {
|
||||
// Decode the seed into its polynomial coefficients
|
||||
let mut poly = [0; POLYSEED_LENGTH];
|
||||
|
||||
// Validate words are in the lang word list
|
||||
let lang_word_list: &WordList = &LANGUAGES[&lang];
|
||||
for (i, word) in seed.split_whitespace().enumerate() {
|
||||
// Find the word's index
|
||||
fn check_if_matches<S: AsRef<str>, I: Iterator<Item = S>>(
|
||||
has_prefix: bool,
|
||||
mut lang_words: I,
|
||||
word: &str,
|
||||
) -> Option<usize> {
|
||||
if has_prefix {
|
||||
// Get the position of the word within the iterator
|
||||
// Doesn't use starts_with and some words are substrs of others, leading to false
|
||||
// positives
|
||||
let mut get_position = || {
|
||||
lang_words.position(|lang_word| {
|
||||
let mut lang_word = lang_word.as_ref().chars();
|
||||
let mut word = word.chars();
|
||||
|
||||
let mut res = true;
|
||||
for _ in 0 .. PREFIX_LEN {
|
||||
res &= lang_word.next() == word.next();
|
||||
}
|
||||
res
|
||||
})
|
||||
};
|
||||
let res = get_position();
|
||||
// If another word has this prefix, don't call it a match
|
||||
if get_position().is_some() {
|
||||
return None;
|
||||
}
|
||||
res
|
||||
} else {
|
||||
lang_words.position(|lang_word| lang_word.as_ref() == word)
|
||||
}
|
||||
}
|
||||
|
||||
let Some(coeff) = (if lang_word_list.has_accent {
|
||||
let ascii = |word: &str| word.chars().filter(char::is_ascii).collect::<String>();
|
||||
check_if_matches(
|
||||
lang_word_list.has_prefix,
|
||||
lang_word_list.words.iter().map(|lang_word| ascii(lang_word)),
|
||||
&ascii(word),
|
||||
)
|
||||
} else {
|
||||
check_if_matches(lang_word_list.has_prefix, lang_word_list.words.iter(), word)
|
||||
}) else {
|
||||
Err(PolyseedError::InvalidSeed)?
|
||||
};
|
||||
|
||||
// WordList asserts the word list length is less than u16::MAX
|
||||
poly[i] = u16::try_from(coeff).expect("coeff exceeded u16");
|
||||
}
|
||||
|
||||
// xor out the coin
|
||||
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
|
||||
|
||||
// Validate the checksum
|
||||
if poly_eval(&poly) != 0 {
|
||||
Err(PolyseedError::InvalidChecksum)?;
|
||||
}
|
||||
|
||||
// Convert the polynomial into entropy
|
||||
let mut entropy = Zeroizing::new([0; 32]);
|
||||
|
||||
let mut extra = 0;
|
||||
|
||||
let mut entropy_idx = 0;
|
||||
let mut entropy_bits = 0;
|
||||
|
||||
let checksum = poly[0];
|
||||
for mut word_val in poly.into_iter().skip(POLY_NUM_CHECK_DIGITS) {
|
||||
// Parse the bottom bit, which is one of the bits of extra
|
||||
// This iterates for less than 16 iters, meaning this won't drop any bits
|
||||
extra <<= 1;
|
||||
extra |= word_val & 1;
|
||||
word_val >>= 1;
|
||||
|
||||
// 10 bits per word creates a [8, 2], [6, 4], [4, 6], [2, 8] cycle
|
||||
// 15 % 4 is 3, leaving 2 bits off, and 152 (19 * 8) - 2 is 150, the amount of bits in the
|
||||
// secret
|
||||
let mut word_bits = GF_BITS - 1;
|
||||
while word_bits > 0 {
|
||||
if entropy_bits == BITS_PER_BYTE {
|
||||
entropy_idx += 1;
|
||||
entropy_bits = 0;
|
||||
}
|
||||
let chunk_bits = word_bits.min(BITS_PER_BYTE - entropy_bits);
|
||||
word_bits -= chunk_bits;
|
||||
let chunk_mask = (1u16 << chunk_bits) - 1;
|
||||
if chunk_bits < BITS_PER_BYTE {
|
||||
entropy[entropy_idx] <<= chunk_bits;
|
||||
}
|
||||
entropy[entropy_idx] |=
|
||||
u8::try_from((word_val >> word_bits) & chunk_mask).expect("chunk exceeded u8");
|
||||
entropy_bits += chunk_bits;
|
||||
}
|
||||
}
|
||||
|
||||
let birthday = extra & DATE_MASK;
|
||||
// extra is contained to u16, and DATE_BITS > 8
|
||||
let features =
|
||||
u8::try_from(extra >> DATE_BITS).expect("couldn't convert extra >> DATE_BITS to u8");
|
||||
|
||||
let res = Self::from_internal(lang, features, birthday, entropy);
|
||||
if let Ok(res) = res.as_ref() {
|
||||
debug_assert_eq!(res.checksum, checksum);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// When this seed was created, defined in seconds since the epoch.
|
||||
pub fn birthday(&self) -> u64 {
|
||||
birthday_decode(self.birthday)
|
||||
}
|
||||
|
||||
/// This seed's features.
|
||||
pub fn features(&self) -> u8 {
|
||||
self.features
|
||||
}
|
||||
|
||||
/// This seed's entropy.
|
||||
pub fn entropy(&self) -> &Zeroizing<[u8; 32]> {
|
||||
&self.entropy
|
||||
}
|
||||
|
||||
/// The key derived from this seed.
|
||||
pub fn key(&self) -> Zeroizing<[u8; 32]> {
|
||||
let mut key = Zeroizing::new([0; 32]);
|
||||
pbkdf2_hmac::<Sha3_256>(
|
||||
self.entropy.as_slice(),
|
||||
POLYSEED_SALT,
|
||||
POLYSEED_KEYGEN_ITERATIONS,
|
||||
key.as_mut(),
|
||||
);
|
||||
key
|
||||
}
|
||||
|
||||
/// The String representation of this seed.
|
||||
pub fn to_string(&self) -> Zeroizing<String> {
|
||||
// Encode the polynomial with the existing checksum
|
||||
let mut poly = self.to_poly();
|
||||
poly[0] = self.checksum;
|
||||
|
||||
// Embed the coin
|
||||
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
|
||||
|
||||
// Output words
|
||||
let mut seed = Zeroizing::new(String::new());
|
||||
let words = &LANGUAGES[&self.language].words;
|
||||
for i in 0 .. poly.len() {
|
||||
seed.push_str(words[usize::from(poly[i])]);
|
||||
if i < poly.len() - 1 {
|
||||
seed.push(' ');
|
||||
}
|
||||
}
|
||||
|
||||
seed
|
||||
}
|
||||
}
|
||||
@@ -1,218 +0,0 @@
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[test]
|
||||
fn test_polyseed() {
|
||||
struct Vector {
|
||||
language: Language,
|
||||
seed: String,
|
||||
entropy: String,
|
||||
birthday: u64,
|
||||
has_prefix: bool,
|
||||
has_accent: bool,
|
||||
}
|
||||
|
||||
let vectors = [
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "raven tail swear infant grief assist regular lamp \
|
||||
duck valid someone little harsh puppy airport language"
|
||||
.into(),
|
||||
entropy: "dd76e7359a0ded37cd0ff0f3c829a5ae01673300000000000000000000000000".into(),
|
||||
birthday: 1638446400,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "eje fin parte célebre tabú pestaña lienzo puma \
|
||||
prisión hora regalo lengua existir lápiz lote sonoro"
|
||||
.into(),
|
||||
entropy: "5a2b02df7db21fcbe6ec6df137d54c7b20fd2b00000000000000000000000000".into(),
|
||||
birthday: 3118651200,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
Vector {
|
||||
language: Language::French,
|
||||
seed: "valable arracher décaler jeudi amusant dresser mener épaissir risible \
|
||||
prouesse réserve ampleur ajuster muter caméra enchère"
|
||||
.into(),
|
||||
entropy: "11cfd870324b26657342c37360c424a14a050b00000000000000000000000000".into(),
|
||||
birthday: 1679314966,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Italian,
|
||||
seed: "caduco midollo copione meninge isotopo illogico riflesso tartaruga fermento \
|
||||
olandese normale tristezza episodio voragine forbito achille"
|
||||
.into(),
|
||||
entropy: "7ecc57c9b4652d4e31428f62bec91cfd55500600000000000000000000000000".into(),
|
||||
birthday: 1679316358,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Portuguese,
|
||||
seed: "caverna custear azedo adeus senador apertada sedoso omitir \
|
||||
sujeito aurora videira molho cartaz gesso dentista tapar"
|
||||
.into(),
|
||||
entropy: "45473063711376cae38f1b3eba18c874124e1d00000000000000000000000000".into(),
|
||||
birthday: 1679316657,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Czech,
|
||||
seed: "usmrtit nora dotaz komunita zavalit funkce mzda sotva akce \
|
||||
vesta kabel herna stodola uvolnit ustrnout email"
|
||||
.into(),
|
||||
entropy: "7ac8a4efd62d9c3c4c02e350d32326df37821c00000000000000000000000000".into(),
|
||||
birthday: 1679316898,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Korean,
|
||||
seed: "전망 선풍기 국제 무궁화 설사 기름 이론적 해안 절망 예선 \
|
||||
지우개 보관 절망 말기 시각 귀신"
|
||||
.into(),
|
||||
entropy: "684663fda420298f42ed94b2c512ed38ddf12b00000000000000000000000000".into(),
|
||||
birthday: 1679317073,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Japanese,
|
||||
seed: "うちあわせ ちつじょ つごう しはい けんこう とおる てみやげ はんとし たんとう \
|
||||
といれ おさない おさえる むかう ぬぐう なふだ せまる"
|
||||
.into(),
|
||||
entropy: "94e6665518a6286c6e3ba508a2279eb62b771f00000000000000000000000000".into(),
|
||||
birthday: 1679318722,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::ChineseTraditional,
|
||||
seed: "亂 挖 斤 柄 代 圈 枝 轄 魯 論 函 開 勘 番 榮 壁".into(),
|
||||
entropy: "b1594f585987ab0fd5a31da1f0d377dae5283f00000000000000000000000000".into(),
|
||||
birthday: 1679426433,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::ChineseSimplified,
|
||||
seed: "啊 百 族 府 票 划 伪 仓 叶 虾 借 溜 晨 左 等 鬼".into(),
|
||||
entropy: "21cdd366f337b89b8d1bc1df9fe73047c22b0300000000000000000000000000".into(),
|
||||
birthday: 1679426817,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
// The following seed requires the language specification in order to calculate
|
||||
// a single valid checksum
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "impo sort usua cabi venu nobl oliv clim \
|
||||
cont barr marc auto prod vaca torn fati"
|
||||
.into(),
|
||||
entropy: "dbfce25fe09b68a340e01c62417eeef43ad51800000000000000000000000000".into(),
|
||||
birthday: 1701511650,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
];
|
||||
|
||||
for vector in vectors {
|
||||
let add_whitespace = |mut seed: String| {
|
||||
seed.push(' ');
|
||||
seed
|
||||
};
|
||||
|
||||
let seed_without_accents = |seed: &str| {
|
||||
seed
|
||||
.split_whitespace()
|
||||
.map(|w| w.chars().filter(char::is_ascii).collect::<String>())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
let trim_seed = |seed: &str| {
|
||||
let seed_to_trim =
|
||||
if vector.has_accent { seed_without_accents(seed) } else { seed.to_string() };
|
||||
seed_to_trim
|
||||
.split_whitespace()
|
||||
.map(|w| {
|
||||
let mut ascii = 0;
|
||||
let mut to_take = w.len();
|
||||
for (i, char) in w.chars().enumerate() {
|
||||
if char.is_ascii() {
|
||||
ascii += 1;
|
||||
}
|
||||
if ascii == PREFIX_LEN {
|
||||
// +1 to include this character, which put us at the prefix length
|
||||
to_take = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
w.chars().take(to_take).collect::<String>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
// String -> Seed
|
||||
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
|
||||
let seed = Polyseed::from_string(vector.language, Zeroizing::new(vector.seed.clone())).unwrap();
|
||||
let trim = trim_seed(&vector.seed);
|
||||
let add_whitespace = add_whitespace(vector.seed.clone());
|
||||
let seed_without_accents = seed_without_accents(&vector.seed);
|
||||
|
||||
// Make sure a version with added whitespace still works
|
||||
let whitespaced_seed =
|
||||
Polyseed::from_string(vector.language, Zeroizing::new(add_whitespace)).unwrap();
|
||||
assert_eq!(seed, whitespaced_seed);
|
||||
// Check trimmed versions works
|
||||
if vector.has_prefix {
|
||||
let trimmed_seed = Polyseed::from_string(vector.language, Zeroizing::new(trim)).unwrap();
|
||||
assert_eq!(seed, trimmed_seed);
|
||||
}
|
||||
// Check versions without accents work
|
||||
if vector.has_accent {
|
||||
let seed_without_accents =
|
||||
Polyseed::from_string(vector.language, Zeroizing::new(seed_without_accents)).unwrap();
|
||||
assert_eq!(seed, seed_without_accents);
|
||||
}
|
||||
|
||||
let entropy = Zeroizing::new(hex::decode(vector.entropy).unwrap().try_into().unwrap());
|
||||
assert_eq!(*seed.entropy(), entropy);
|
||||
assert!(seed.birthday().abs_diff(vector.birthday) < TIME_STEP);
|
||||
|
||||
// Entropy -> Seed
|
||||
let from_entropy = Polyseed::from(vector.language, 0, seed.birthday(), entropy).unwrap();
|
||||
assert_eq!(seed.to_string(), from_entropy.to_string());
|
||||
|
||||
// Check against ourselves
|
||||
{
|
||||
let seed = Polyseed::new(&mut OsRng, vector.language);
|
||||
println!("{}. seed: {}", line!(), *seed.to_string());
|
||||
assert_eq!(seed, Polyseed::from_string(vector.language, seed.to_string()).unwrap());
|
||||
assert_eq!(
|
||||
seed,
|
||||
Polyseed::from(vector.language, 0, seed.birthday(), seed.entropy().clone(),).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_polyseed() {
|
||||
// This seed includes unsupported features bits and should error on decode
|
||||
let seed = "include domain claim resemble urban hire lunch bird \
|
||||
crucial fire best wife ring warm ignore model"
|
||||
.into();
|
||||
let res = Polyseed::from_string(Language::English, Zeroizing::new(seed));
|
||||
assert_eq!(res, Err(PolyseedError::UnsupportedFeatures));
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,41 +0,0 @@
|
||||
[package]
|
||||
name = "monero-seed"
|
||||
version = "0.1.0"
|
||||
description = "Rust implementation of Monero's seed algorithm"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/seed"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
thiserror = { version = "2", default-features = false }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
monero-primitives = { path = "../../primitives", default-features = false, features = ["std"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror/std",
|
||||
|
||||
"zeroize/std",
|
||||
"rand_core/std",
|
||||
]
|
||||
default = ["std"]
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2024 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,11 +0,0 @@
|
||||
# Monero Seeds
|
||||
|
||||
Rust implementation of Monero's seed algorithm.
|
||||
|
||||
This library is usable under no-std when the `std` feature (on by default) is
|
||||
disabled.
|
||||
|
||||
### Cargo Features
|
||||
|
||||
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||
implementations).
|
||||
@@ -1,352 +0,0 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use core::{ops::Deref, fmt};
|
||||
use std_shims::{
|
||||
sync::LazyLock,
|
||||
vec,
|
||||
vec::Vec,
|
||||
string::{String, ToString},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// The amount of words in a seed without a checksum.
|
||||
const SEED_LENGTH: usize = 24;
|
||||
// The amount of words in a seed with a checksum.
|
||||
const SEED_LENGTH_WITH_CHECKSUM: usize = 25;
|
||||
|
||||
/// An error when working with a seed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, thiserror::Error)]
|
||||
pub enum SeedError {
|
||||
#[error("invalid seed")]
|
||||
/// The seed was invalid.
|
||||
InvalidSeed,
|
||||
/// The checksum did not match the data.
|
||||
#[error("invalid checksum")]
|
||||
InvalidChecksum,
|
||||
/// The deprecated English language option was used with a checksum.
|
||||
///
|
||||
/// The deprecated English language option did not include a checksum.
|
||||
#[error("deprecated English language option included a checksum")]
|
||||
DeprecatedEnglishWithChecksum,
|
||||
}
|
||||
|
||||
/// Language options.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Zeroize)]
|
||||
pub enum Language {
|
||||
/// Chinese language option.
|
||||
Chinese,
|
||||
/// English language option.
|
||||
English,
|
||||
/// Dutch language option.
|
||||
Dutch,
|
||||
/// French language option.
|
||||
French,
|
||||
/// Spanish language option.
|
||||
Spanish,
|
||||
/// German language option.
|
||||
German,
|
||||
/// Italian language option.
|
||||
Italian,
|
||||
/// Portuguese language option.
|
||||
Portuguese,
|
||||
/// Japanese language option.
|
||||
Japanese,
|
||||
/// Russian language option.
|
||||
Russian,
|
||||
/// Esperanto language option.
|
||||
Esperanto,
|
||||
/// Lojban language option.
|
||||
Lojban,
|
||||
/// The original, and deprecated, English language.
|
||||
DeprecatedEnglish,
|
||||
}
|
||||
|
||||
fn trim(word: &str, len: usize) -> Zeroizing<String> {
|
||||
Zeroizing::new(word.chars().take(len).collect())
|
||||
}
|
||||
|
||||
struct WordList {
|
||||
word_list: &'static [&'static str],
|
||||
word_map: HashMap<&'static str, usize>,
|
||||
trimmed_word_map: HashMap<String, usize>,
|
||||
unique_prefix_length: usize,
|
||||
}
|
||||
|
||||
impl WordList {
|
||||
fn new(word_list: &'static [&'static str], prefix_length: usize) -> WordList {
|
||||
let mut lang = WordList {
|
||||
word_list,
|
||||
word_map: HashMap::new(),
|
||||
trimmed_word_map: HashMap::new(),
|
||||
unique_prefix_length: prefix_length,
|
||||
};
|
||||
|
||||
for (i, word) in lang.word_list.iter().enumerate() {
|
||||
lang.word_map.insert(word, i);
|
||||
lang.trimmed_word_map.insert(trim(word, lang.unique_prefix_length).deref().clone(), i);
|
||||
}
|
||||
|
||||
lang
|
||||
}
|
||||
}
|
||||
|
||||
static LANGUAGES: LazyLock<HashMap<Language, WordList>> = LazyLock::new(|| {
|
||||
HashMap::from([
|
||||
(Language::Chinese, WordList::new(include!("./words/zh.rs"), 1)),
|
||||
(Language::English, WordList::new(include!("./words/en.rs"), 3)),
|
||||
(Language::Dutch, WordList::new(include!("./words/nl.rs"), 4)),
|
||||
(Language::French, WordList::new(include!("./words/fr.rs"), 4)),
|
||||
(Language::Spanish, WordList::new(include!("./words/es.rs"), 4)),
|
||||
(Language::German, WordList::new(include!("./words/de.rs"), 4)),
|
||||
(Language::Italian, WordList::new(include!("./words/it.rs"), 4)),
|
||||
(Language::Portuguese, WordList::new(include!("./words/pt.rs"), 4)),
|
||||
(Language::Japanese, WordList::new(include!("./words/ja.rs"), 3)),
|
||||
(Language::Russian, WordList::new(include!("./words/ru.rs"), 4)),
|
||||
(Language::Esperanto, WordList::new(include!("./words/eo.rs"), 4)),
|
||||
(Language::Lojban, WordList::new(include!("./words/jbo.rs"), 4)),
|
||||
(Language::DeprecatedEnglish, WordList::new(include!("./words/ang.rs"), 4)),
|
||||
])
|
||||
});
|
||||
|
||||
fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
|
||||
let mut trimmed_words = Zeroizing::new(String::new());
|
||||
for w in words {
|
||||
*trimmed_words += &trim(w, lang.unique_prefix_length);
|
||||
}
|
||||
|
||||
const fn crc32_table() -> [u32; 256] {
|
||||
let poly = 0xedb88320u32;
|
||||
|
||||
let mut res = [0; 256];
|
||||
let mut i = 0;
|
||||
while i < 256 {
|
||||
let mut entry = i;
|
||||
let mut b = 0;
|
||||
while b < 8 {
|
||||
let trigger = entry & 1;
|
||||
entry >>= 1;
|
||||
if trigger == 1 {
|
||||
entry ^= poly;
|
||||
}
|
||||
b += 1;
|
||||
}
|
||||
res[i as usize] = entry;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
const CRC32_TABLE: [u32; 256] = crc32_table();
|
||||
|
||||
let trimmed_words = trimmed_words.as_bytes();
|
||||
let mut checksum = u32::MAX;
|
||||
for i in 0 .. trimmed_words.len() {
|
||||
checksum = CRC32_TABLE[usize::from(u8::try_from(checksum % 256).unwrap() ^ trimmed_words[i])] ^
|
||||
(checksum >> 8);
|
||||
}
|
||||
|
||||
usize::try_from(!checksum).unwrap() % words.len()
|
||||
}
|
||||
|
||||
// Convert a private key to a seed
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> Seed {
|
||||
let bytes = Zeroizing::new(key.to_bytes());
|
||||
|
||||
// get the language words
|
||||
let words = &LANGUAGES[&lang].word_list;
|
||||
let list_len = u64::try_from(words.len()).unwrap();
|
||||
|
||||
// To store the found words & add the checksum word later.
|
||||
let mut seed = Vec::with_capacity(25);
|
||||
|
||||
// convert to words
|
||||
// 4 bytes -> 3 words. 8 digits base 16 -> 3 digits base 1626
|
||||
let mut segment = [0; 4];
|
||||
let mut indices = [0; 4];
|
||||
for i in 0 .. 8 {
|
||||
// convert first 4 byte to u32 & get the word indices
|
||||
let start = i * 4;
|
||||
// convert 4 byte to u32
|
||||
segment.copy_from_slice(&bytes[start .. (start + 4)]);
|
||||
// Actually convert to a u64 so we can add without overflowing
|
||||
indices[0] = u64::from(u32::from_le_bytes(segment));
|
||||
indices[1] = indices[0];
|
||||
indices[0] /= list_len;
|
||||
indices[2] = indices[0] + indices[1];
|
||||
indices[0] /= list_len;
|
||||
indices[3] = indices[0] + indices[2];
|
||||
|
||||
// append words to seed
|
||||
for i in indices.iter().skip(1) {
|
||||
let word = usize::try_from(i % list_len).unwrap();
|
||||
seed.push(Zeroizing::new(words[word].to_string()));
|
||||
}
|
||||
}
|
||||
segment.zeroize();
|
||||
indices.zeroize();
|
||||
|
||||
// create a checksum word for all languages except old english
|
||||
if lang != Language::DeprecatedEnglish {
|
||||
let checksum = seed[checksum_index(&seed, &LANGUAGES[&lang])].clone();
|
||||
seed.push(checksum);
|
||||
}
|
||||
|
||||
let mut res = Zeroizing::new(String::new());
|
||||
for (i, word) in seed.iter().enumerate() {
|
||||
if i != 0 {
|
||||
*res += " ";
|
||||
}
|
||||
*res += word;
|
||||
}
|
||||
Seed(lang, res)
|
||||
}
|
||||
|
||||
// Convert a seed to bytes
|
||||
fn seed_to_bytes(lang: Language, words: &str) -> Result<Zeroizing<[u8; 32]>, SeedError> {
|
||||
// get seed words
|
||||
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
|
||||
if (words.len() != SEED_LENGTH) && (words.len() != SEED_LENGTH_WITH_CHECKSUM) {
|
||||
panic!("invalid seed passed to seed_to_bytes");
|
||||
}
|
||||
|
||||
let has_checksum = words.len() == SEED_LENGTH_WITH_CHECKSUM;
|
||||
if has_checksum && lang == Language::DeprecatedEnglish {
|
||||
Err(SeedError::DeprecatedEnglishWithChecksum)?;
|
||||
}
|
||||
|
||||
// Validate words are in the language word list
|
||||
let lang_word_list: &WordList = &LANGUAGES[&lang];
|
||||
let matched_indices = (|| {
|
||||
let has_checksum = words.len() == SEED_LENGTH_WITH_CHECKSUM;
|
||||
let mut matched_indices = Zeroizing::new(vec![]);
|
||||
|
||||
// Iterate through all the words and see if they're all present
|
||||
for word in &words {
|
||||
let trimmed = trim(word, lang_word_list.unique_prefix_length);
|
||||
let word = if has_checksum { &trimmed } else { word };
|
||||
|
||||
if let Some(index) = if has_checksum {
|
||||
lang_word_list.trimmed_word_map.get(word.deref())
|
||||
} else {
|
||||
lang_word_list.word_map.get(&word.as_str())
|
||||
} {
|
||||
matched_indices.push(*index);
|
||||
} else {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
}
|
||||
|
||||
if has_checksum {
|
||||
// exclude the last word when calculating a checksum.
|
||||
let last_word = words.last().unwrap().clone();
|
||||
let checksum = words[checksum_index(&words[.. words.len() - 1], lang_word_list)].clone();
|
||||
|
||||
// check the trimmed checksum and trimmed last word line up
|
||||
if trim(&checksum, lang_word_list.unique_prefix_length) !=
|
||||
trim(&last_word, lang_word_list.unique_prefix_length)
|
||||
{
|
||||
Err(SeedError::InvalidChecksum)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(matched_indices)
|
||||
})()?;
|
||||
|
||||
// convert to bytes
|
||||
let mut res = Zeroizing::new([0; 32]);
|
||||
let mut indices = Zeroizing::new([0; 4]);
|
||||
for i in 0 .. 8 {
|
||||
// read 3 indices at a time
|
||||
let i3 = i * 3;
|
||||
indices[1] = matched_indices[i3];
|
||||
indices[2] = matched_indices[i3 + 1];
|
||||
indices[3] = matched_indices[i3 + 2];
|
||||
|
||||
let inner = |i| {
|
||||
let mut base = (lang_word_list.word_list.len() - indices[i] + indices[i + 1]) %
|
||||
lang_word_list.word_list.len();
|
||||
// Shift the index over
|
||||
for _ in 0 .. i {
|
||||
base *= lang_word_list.word_list.len();
|
||||
}
|
||||
base
|
||||
};
|
||||
// set the last index
|
||||
indices[0] = indices[1] + inner(1) + inner(2);
|
||||
if (indices[0] % lang_word_list.word_list.len()) != indices[1] {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
|
||||
let pos = i * 4;
|
||||
let mut bytes = u32::try_from(indices[0]).unwrap().to_le_bytes();
|
||||
res[pos .. (pos + 4)].copy_from_slice(&bytes);
|
||||
bytes.zeroize();
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// A Monero seed.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub struct Seed(Language, Zeroizing<String>);
|
||||
|
||||
impl fmt::Debug for Seed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Seed").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl Seed {
|
||||
/// Create a new seed.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Seed {
|
||||
let mut scalar_bytes = Zeroizing::new([0; 64]);
|
||||
rng.fill_bytes(scalar_bytes.as_mut());
|
||||
key_to_seed(lang, Zeroizing::new(Scalar::from_bytes_mod_order_wide(scalar_bytes.deref())))
|
||||
}
|
||||
|
||||
/// Parse a seed from a string.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn from_string(lang: Language, words: Zeroizing<String>) -> Result<Seed, SeedError> {
|
||||
let entropy = seed_to_bytes(lang, &words)?;
|
||||
|
||||
// Make sure this is a valid scalar
|
||||
let scalar = Scalar::from_canonical_bytes(*entropy);
|
||||
if scalar.is_none().into() {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
let mut scalar = scalar.unwrap();
|
||||
scalar.zeroize();
|
||||
|
||||
// Call from_entropy so a trimmed seed becomes a full seed
|
||||
Ok(Self::from_entropy(lang, entropy).unwrap())
|
||||
}
|
||||
|
||||
/// Create a seed from entropy.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Seed> {
|
||||
Option::from(Scalar::from_canonical_bytes(*entropy))
|
||||
.map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
|
||||
}
|
||||
|
||||
/// Convert a seed to a string.
|
||||
pub fn to_string(&self) -> Zeroizing<String> {
|
||||
self.1.clone()
|
||||
}
|
||||
|
||||
/// Return the entropy underlying this seed.
|
||||
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
||||
seed_to_bytes(self.0, &self.1).unwrap()
|
||||
}
|
||||
}
|
||||
@@ -1,234 +0,0 @@
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
use monero_primitives::keccak256;
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[test]
|
||||
fn test_original_seed() {
|
||||
struct Vector {
|
||||
language: Language,
|
||||
seed: String,
|
||||
spend: String,
|
||||
view: String,
|
||||
}
|
||||
|
||||
let vectors = [
|
||||
Vector {
|
||||
language: Language::Chinese,
|
||||
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
||||
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
||||
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
||||
abnormal memoir nylon mostly building shrugged online ember northern \
|
||||
ruby woes dauntless boil family illness inroads northern"
|
||||
.into(),
|
||||
spend: "c0af65c0dd837e666b9d0dfed62745f4df35aed7ea619b2798a709f0fe545403".into(),
|
||||
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Dutch,
|
||||
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
||||
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
||||
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
||||
.into(),
|
||||
spend: "e2d2873085c447c2bc7664222ac8f7d240df3aeac137f5ff2022eaa629e5b10a".into(),
|
||||
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::French,
|
||||
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
||||
sensuel ennui pacte osselet poudre battre alibi mouton \
|
||||
stade paquet pliage gibier type question position projet pliage"
|
||||
.into(),
|
||||
spend: "2dd39ff1a4628a94b5c2ec3e42fb3dfe15c2b2f010154dc3b3de6791e805b904".into(),
|
||||
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
||||
opaco disco ancla litio clase cuello nasal clase \
|
||||
fiar avance deseo mente grumo negro cordón croqueta clase"
|
||||
.into(),
|
||||
spend: "ae2c9bebdddac067d73ec0180147fc92bdf9ac7337f1bcafbbe57dd13558eb02".into(),
|
||||
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::German,
|
||||
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
||||
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
||||
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
||||
.into(),
|
||||
spend: "79801b7a1b9796856e2397d862a113862e1fdc289a205e79d8d70995b276db06".into(),
|
||||
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Italian,
|
||||
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
||||
forzare meritare litigare lezione segreto evasione votare buio \
|
||||
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
||||
.into(),
|
||||
spend: "5e7fd774eb00fa5877e2a8b4dc9c7ffe111008a3891220b56a6e49ac816d650a".into(),
|
||||
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Portuguese,
|
||||
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
||||
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
||||
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
||||
.into(),
|
||||
spend: "13b3115f37e35c6aa1db97428b897e584698670c1b27854568d678e729200c0f".into(),
|
||||
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Japanese,
|
||||
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
||||
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
||||
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
||||
.into(),
|
||||
spend: "c56e895cdb13007eda8399222974cdbab493640663804b93cbef3d8c3df80b0b".into(),
|
||||
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Russian,
|
||||
seed: "шатер икра нация ехать получать инерция доза реальный \
|
||||
рыжий таможня лопата душа веселый клетка атлас лекция \
|
||||
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
||||
.into(),
|
||||
spend: "7cb5492df5eb2db4c84af20766391cd3e3662ab1a241c70fc881f3d02c381f05".into(),
|
||||
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Esperanto,
|
||||
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
||||
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
||||
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
||||
.into(),
|
||||
spend: "82ebf0336d3b152701964ed41df6b6e9a035e57fc98b84039ed0bd4611c58904".into(),
|
||||
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Lojban,
|
||||
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
||||
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
||||
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
||||
.into(),
|
||||
spend: "e4f8c6819ab6cf792cebb858caabac9307fd646901d72123e0367ebc0a79c200".into(),
|
||||
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::DeprecatedEnglish,
|
||||
seed: "glorious especially puff son moment add youth nowhere \
|
||||
throw glide grip wrong rhythm consume very swear \
|
||||
bitter heavy eventually begin reason flirt type unable"
|
||||
.into(),
|
||||
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
|
||||
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
|
||||
},
|
||||
// The following seeds require the language specification in order to calculate
|
||||
// a single valid checksum
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "pluma laico atraer pintor peor cerca balde buscar \
|
||||
lancha batir nulo reloj resto gemelo nevera poder columna gol \
|
||||
oveja latir amplio bolero feliz fuerza nevera"
|
||||
.into(),
|
||||
spend: "30303983fc8d215dd020cc6b8223793318d55c466a86e4390954f373fdc7200a".into(),
|
||||
view: "97c649143f3c147ba59aa5506cc09c7992c5c219bb26964442142bf97980800e".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "pluma pluma pluma pluma pluma pluma pluma pluma \
|
||||
pluma pluma pluma pluma pluma pluma pluma pluma \
|
||||
pluma pluma pluma pluma pluma pluma pluma pluma pluma"
|
||||
.into(),
|
||||
spend: "b4050000b4050000b4050000b4050000b4050000b4050000b4050000b4050000".into(),
|
||||
view: "d73534f7912b395eb70ef911791a2814eb6df7ce56528eaaa83ff2b72d9f5e0f".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "plus plus plus plus plus plus plus plus \
|
||||
plus plus plus plus plus plus plus plus \
|
||||
plus plus plus plus plus plus plus plus plus"
|
||||
.into(),
|
||||
spend: "3b0400003b0400003b0400003b0400003b0400003b0400003b0400003b040000".into(),
|
||||
view: "43a8a7715eed11eff145a2024ddcc39740255156da7bbd736ee66a0838053a02".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio audio"
|
||||
.into(),
|
||||
spend: "ba000000ba000000ba000000ba000000ba000000ba000000ba000000ba000000".into(),
|
||||
view: "1437256da2c85d029b293d8c6b1d625d9374969301869b12f37186e3f906c708".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio audio"
|
||||
.into(),
|
||||
spend: "7900000079000000790000007900000079000000790000007900000079000000".into(),
|
||||
view: "20bec797ab96780ae6a045dd816676ca7ed1d7c6773f7022d03ad234b581d600".into(),
|
||||
},
|
||||
];
|
||||
|
||||
for vector in vectors {
|
||||
fn trim_by_lang(word: &str, lang: Language) -> String {
|
||||
if lang != Language::DeprecatedEnglish {
|
||||
word.chars().take(LANGUAGES[&lang].unique_prefix_length).collect()
|
||||
} else {
|
||||
word.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
let trim_seed = |seed: &str| {
|
||||
seed
|
||||
.split_whitespace()
|
||||
.map(|word| trim_by_lang(word, vector.language))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
// Test against Monero
|
||||
{
|
||||
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
|
||||
let seed = Seed::from_string(vector.language, Zeroizing::new(vector.seed.clone())).unwrap();
|
||||
let trim = trim_seed(&vector.seed);
|
||||
assert_eq!(seed, Seed::from_string(vector.language, Zeroizing::new(trim)).unwrap());
|
||||
|
||||
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
||||
// For originalal seeds, Monero directly uses the entropy as a spend key
|
||||
assert_eq!(
|
||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(*seed.entropy())),
|
||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(spend)),
|
||||
);
|
||||
|
||||
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
||||
// Monero then derives the view key as H(spend)
|
||||
assert_eq!(
|
||||
Scalar::from_bytes_mod_order(keccak256(spend)),
|
||||
Scalar::from_canonical_bytes(view).unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
|
||||
}
|
||||
|
||||
// Test against ourselves
|
||||
{
|
||||
let seed = Seed::new(&mut OsRng, vector.language);
|
||||
println!("{}. seed: {}", line!(), *seed.to_string());
|
||||
let trim = trim_seed(&seed.to_string());
|
||||
assert_eq!(seed, Seed::from_string(vector.language, Zeroizing::new(trim)).unwrap());
|
||||
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
|
||||
assert_eq!(seed, Seed::from_string(vector.language, seed.to_string()).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user