mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-11 13:39:25 +00:00
Compare commits
86 Commits
undroppabl
...
e3809b2ff1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3809b2ff1 | ||
|
|
fd2d8b4f0a | ||
|
|
bc81614894 | ||
|
|
8df5aa2e2d | ||
|
|
b000740470 | ||
|
|
b9f554111d | ||
|
|
354c408e3e | ||
|
|
df3b60376a | ||
|
|
8d209c652e | ||
|
|
9ddad794b4 | ||
|
|
b934e484cc | ||
|
|
f8aee9b3c8 | ||
|
|
f51d77d26a | ||
|
|
0780deb643 | ||
|
|
75c38560f4 | ||
|
|
9f1c5268a5 | ||
|
|
35b113768b | ||
|
|
f2595c4939 | ||
|
|
8fcfa6d3d5 | ||
|
|
54c9d19726 | ||
|
|
25324c3cd5 | ||
|
|
ecb7df85b0 | ||
|
|
68c7acdbef | ||
|
|
8b60feed92 | ||
|
|
5c895efcd0 | ||
|
|
60e55656aa | ||
|
|
9536282418 | ||
|
|
8297d0679d | ||
|
|
d9f854b08a | ||
|
|
8aaf7f7dc6 | ||
|
|
ce447558ac | ||
|
|
fc850da30e | ||
|
|
d6f6cf1965 | ||
|
|
4438b51881 | ||
|
|
6ae0d9fad7 | ||
|
|
ad08b410a8 | ||
|
|
ec3cfd3ab7 | ||
|
|
01eb2daa0b | ||
|
|
885000f970 | ||
|
|
4be506414b | ||
|
|
1143d84e1d | ||
|
|
336922101f | ||
|
|
ffa033d978 | ||
|
|
23f986f57a | ||
|
|
bb726b58af | ||
|
|
387615705c | ||
|
|
c7f825a192 | ||
|
|
d363b1c173 | ||
|
|
d5077ae966 | ||
|
|
188fcc3cb4 | ||
|
|
cbab9486c6 | ||
|
|
a5f4c450c6 | ||
|
|
4f65a0b147 | ||
|
|
feb18d64a7 | ||
|
|
cb1e6535cb | ||
|
|
6b8cf6653a | ||
|
|
b426bfcfe8 | ||
|
|
21ce50ecf7 | ||
|
|
a4ceb2e756 | ||
|
|
eab5d9e64f | ||
|
|
e9c1235b76 | ||
|
|
dc1b8dfccd | ||
|
|
d0201cf2e5 | ||
|
|
f3d20e60b3 | ||
|
|
dafba81b40 | ||
|
|
91f8ec53d9 | ||
|
|
fc9a4a08b8 | ||
|
|
45fadb21ac | ||
|
|
28619fbee1 | ||
|
|
bbe014c3a7 | ||
|
|
fb3fadb3d3 | ||
|
|
f481d20773 | ||
|
|
599b2dec8f | ||
|
|
435f1d9ae1 | ||
|
|
d7ecab605e | ||
|
|
805fea52ec | ||
|
|
48db06f901 | ||
|
|
e9d0a5e0ed | ||
|
|
44d05518aa | ||
|
|
23b433fe6c | ||
|
|
2e57168a97 | ||
|
|
5c6160c398 | ||
|
|
9eee1d971e | ||
|
|
e6300847d6 | ||
|
|
e0a3e7bea6 | ||
|
|
cbebaa1349 |
2
.github/actions/bitcoin/action.yml
vendored
2
.github/actions/bitcoin/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
||||
steps:
|
||||
- name: Bitcoin Daemon Cache
|
||||
id: cache-bitcoind
|
||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: bitcoin.tar.gz
|
||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
34
.github/actions/build-dependencies/action.yml
vendored
34
.github/actions/build-dependencies/action.yml
vendored
@@ -7,13 +7,12 @@ runs:
|
||||
- name: Remove unused packages
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||
sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||
sudo apt remove -y --allow-remove-essential -f shim-signed
|
||||
# This command would fail, due to shim-signed having unmet dependencies, hence its removal
|
||||
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||
sudo apt autoremove -y
|
||||
sudo apt clean
|
||||
docker system prune -a --volumes
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
- name: Remove unused packages
|
||||
@@ -41,9 +40,34 @@ runs:
|
||||
- name: Install solc
|
||||
shell: bash
|
||||
run: |
|
||||
cargo install svm-rs
|
||||
cargo +1.89 install svm-rs --version =0.5.18
|
||||
svm install 0.8.25
|
||||
svm use 0.8.25
|
||||
|
||||
- name: Remove preinstalled Docker
|
||||
shell: bash
|
||||
run: |
|
||||
docker system prune -a --volumes
|
||||
sudo apt remove -y *docker*
|
||||
# Install uidmap which will be required for the explicitly installed Docker
|
||||
sudo apt install uidmap
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
- name: Update system dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt update -y
|
||||
sudo apt upgrade -y
|
||||
sudo apt autoremove -y
|
||||
sudo apt clean
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
- name: Install rootless Docker
|
||||
uses: docker/setup-docker-action@b60f85385d03ac8acfca6d9996982511d8620a19
|
||||
with:
|
||||
rootless: true
|
||||
set-host: true
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
# - name: Cache Rust
|
||||
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
||||
|
||||
2
.github/actions/monero-wallet-rpc/action.yml
vendored
2
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
||||
steps:
|
||||
- name: Monero Wallet RPC Cache
|
||||
id: cache-monero-wallet-rpc
|
||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: monero-wallet-rpc
|
||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
2
.github/actions/monero/action.yml
vendored
2
.github/actions/monero/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
||||
steps:
|
||||
- name: Monero Daemon Cache
|
||||
id: cache-monerod
|
||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: /usr/bin/monerod
|
||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
||||
nightly-2024-07-01
|
||||
nightly-2024-09-01
|
||||
|
||||
6
.github/workflows/daily-deny.yml
vendored
6
.github/workflows/daily-deny.yml
vendored
@@ -12,13 +12,13 @@ jobs:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
run: cargo +1.89 install cargo-deny --version =0.18.3
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
run: cargo deny -L error --all-features check --hide-inclusion-graph
|
||||
|
||||
10
.github/workflows/lint.yml
vendored
10
.github/workflows/lint.yml
vendored
@@ -46,16 +46,16 @@ jobs:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
run: cargo +1.89 install cargo-deny --version =0.18.3
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
run: cargo deny -L error --all-features check --hide-inclusion-graph
|
||||
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -79,5 +79,5 @@ jobs:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Verify all dependencies are in use
|
||||
run: |
|
||||
cargo install cargo-machete
|
||||
cargo machete
|
||||
cargo +1.89 install cargo-machete --version =0.8.0
|
||||
cargo +1.89 machete
|
||||
|
||||
5
.github/workflows/monero-tests.yaml
vendored
5
.github/workflows/monero-tests.yaml
vendored
@@ -39,9 +39,6 @@ jobs:
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
|
||||
|
||||
# Doesn't run unit tests with features as the tests workflow will
|
||||
|
||||
@@ -65,7 +62,6 @@ jobs:
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
|
||||
|
||||
- name: Run Integration Tests
|
||||
# Don't run if the the tests workflow also will
|
||||
@@ -74,4 +70,3 @@ jobs:
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'
|
||||
|
||||
3
.github/workflows/networks-tests.yml
vendored
3
.github/workflows/networks-tests.yml
vendored
@@ -45,7 +45,4 @@ jobs:
|
||||
-p monero-simple-request-rpc \
|
||||
-p monero-address \
|
||||
-p monero-wallet \
|
||||
-p monero-seed \
|
||||
-p polyseed \
|
||||
-p monero-wallet-util \
|
||||
-p monero-serai-verify-chain
|
||||
|
||||
37
.github/workflows/pages.yml
vendored
37
.github/workflows/pages.yml
vendored
@@ -1,6 +1,7 @@
|
||||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2022 just-the-docs
|
||||
# Copyright (c) 2022-2024 Luke Parker
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -20,31 +21,21 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
|
||||
name: Deploy Jekyll site to Pages
|
||||
name: Deploy Rust docs and Jekyll site to Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
paths:
|
||||
- "docs/**"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow one concurrent deployment
|
||||
# Only allow one concurrent deployment
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: true
|
||||
@@ -53,9 +44,6 @@ jobs:
|
||||
# Build job
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
@@ -69,11 +57,24 @@ jobs:
|
||||
id: pages
|
||||
uses: actions/configure-pages@v3
|
||||
- name: Build with Jekyll
|
||||
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||
run: cd ${{ github.workspace }}/docs && bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||
env:
|
||||
JEKYLL_ENV: production
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
- name: Buld Rust docs
|
||||
run: |
|
||||
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c rust-docs
|
||||
RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --all-features
|
||||
mv target/doc docs/_site/rust
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v1
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: "docs/_site/"
|
||||
|
||||
@@ -87,4 +88,4 @@ jobs:
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v2
|
||||
uses: actions/deploy-pages@v4
|
||||
|
||||
973
Cargo.lock
generated
973
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -55,9 +55,6 @@ members = [
|
||||
"networks/monero/rpc/simple-request",
|
||||
"networks/monero/wallet/address",
|
||||
"networks/monero/wallet",
|
||||
"networks/monero/wallet/seed",
|
||||
"networks/monero/wallet/polyseed",
|
||||
"networks/monero/wallet/util",
|
||||
"networks/monero/verify-chain",
|
||||
|
||||
"message-queue",
|
||||
@@ -144,6 +141,9 @@ parking_lot = { path = "patches/parking_lot" }
|
||||
zstd = { path = "patches/zstd" }
|
||||
# Needed for WAL compression
|
||||
rocksdb = { path = "patches/rocksdb" }
|
||||
# 1.0.1 was yanked due to a breaking change (an extra field)
|
||||
# 2.0 has fewer dependencies and still works within our tree
|
||||
tiny-bip39 = { path = "patches/tiny-bip39" }
|
||||
|
||||
# is-terminal now has an std-based solution with an equivalent API
|
||||
is-terminal = { path = "patches/is-terminal" }
|
||||
@@ -158,9 +158,6 @@ matches = { path = "patches/matches" }
|
||||
option-ext = { path = "patches/option-ext" }
|
||||
directories-next = { path = "patches/directories-next" }
|
||||
|
||||
# https://github.com/alloy-rs/core/issues/717
|
||||
alloy-sol-type-parser = { git = "https://github.com/alloy-rs/core", rev = "446b9d2fbce12b88456152170709a3eaac929af0" }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
unwrap_or_default = "allow"
|
||||
borrow_as_ptr = "deny"
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -5,4 +5,4 @@ a full copy of the AGPL-3.0 License is included in the root of this repository
|
||||
as a reference text. This copy should be provided with any distribution of a
|
||||
crate licensed under the AGPL-3.0, as per its terms.
|
||||
|
||||
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
||||
The GitHub actions/workflows (`.github`) are licensed under the MIT license.
|
||||
|
||||
@@ -59,7 +59,6 @@ issued at the discretion of the Immunefi program managers.
|
||||
- [Website](https://serai.exchange/): https://serai.exchange/
|
||||
- [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/
|
||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||
- [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org
|
||||
- [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/
|
||||
|
||||
@@ -12,9 +12,9 @@ use tokio::{
|
||||
use borsh::BorshSerialize;
|
||||
use sp_application_crypto::RuntimePublic;
|
||||
use serai_client::{
|
||||
primitives::{NETWORKS, NetworkId, Signature},
|
||||
validator_sets::primitives::{Session, ValidatorSet},
|
||||
SeraiError, TemporalSerai, Serai,
|
||||
primitives::{ExternalNetworkId, Signature, EXTERNAL_NETWORKS},
|
||||
validator_sets::primitives::{ExternalValidatorSet, Session},
|
||||
Serai, SeraiError, TemporalSerai,
|
||||
};
|
||||
|
||||
use serai_db::{Get, DbTxn, Db, create_db};
|
||||
@@ -28,17 +28,17 @@ use crate::{
|
||||
|
||||
create_db! {
|
||||
CosignDb {
|
||||
ReceivedCosign: (set: ValidatorSet, block: [u8; 32]) -> CosignedBlock,
|
||||
LatestCosign: (network: NetworkId) -> CosignedBlock,
|
||||
DistinctChain: (set: ValidatorSet) -> (),
|
||||
ReceivedCosign: (set: ExternalValidatorSet, block: [u8; 32]) -> CosignedBlock,
|
||||
LatestCosign: (network: ExternalNetworkId) -> CosignedBlock,
|
||||
DistinctChain: (set: ExternalValidatorSet) -> (),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CosignEvaluator<D: Db> {
|
||||
db: Mutex<D>,
|
||||
serai: Arc<Serai>,
|
||||
stakes: RwLock<Option<HashMap<NetworkId, u64>>>,
|
||||
latest_cosigns: RwLock<HashMap<NetworkId, CosignedBlock>>,
|
||||
stakes: RwLock<Option<HashMap<ExternalNetworkId, u64>>>,
|
||||
latest_cosigns: RwLock<HashMap<ExternalNetworkId, CosignedBlock>>,
|
||||
}
|
||||
|
||||
impl<D: Db> CosignEvaluator<D> {
|
||||
@@ -79,7 +79,7 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
let serai = self.serai.as_of_latest_finalized_block().await?;
|
||||
|
||||
let mut stakes = HashMap::new();
|
||||
for network in NETWORKS {
|
||||
for network in EXTERNAL_NETWORKS {
|
||||
// Use if this network has published a Batch for a short-circuit of if they've ever set a key
|
||||
let set_key = serai.in_instructions().last_batch_for_network(network).await?.is_some();
|
||||
if set_key {
|
||||
@@ -87,7 +87,7 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
network,
|
||||
serai
|
||||
.validator_sets()
|
||||
.total_allocated_stake(network)
|
||||
.total_allocated_stake(network.into())
|
||||
.await?
|
||||
.expect("network which published a batch didn't have a stake set")
|
||||
.0,
|
||||
@@ -126,9 +126,9 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
|
||||
async fn set_with_keys_fn(
|
||||
serai: &TemporalSerai<'_>,
|
||||
network: NetworkId,
|
||||
) -> Result<Option<ValidatorSet>, SeraiError> {
|
||||
let Some(latest_session) = serai.validator_sets().session(network).await? else {
|
||||
network: ExternalNetworkId,
|
||||
) -> Result<Option<ExternalValidatorSet>, SeraiError> {
|
||||
let Some(latest_session) = serai.validator_sets().session(network.into()).await? else {
|
||||
log::warn!("received cosign from {:?}, which doesn't yet have a session", network);
|
||||
return Ok(None);
|
||||
};
|
||||
@@ -136,13 +136,13 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
Ok(Some(
|
||||
if serai
|
||||
.validator_sets()
|
||||
.keys(ValidatorSet { network, session: prior_session })
|
||||
.keys(ExternalValidatorSet { network, session: prior_session })
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
ValidatorSet { network, session: prior_session }
|
||||
ExternalValidatorSet { network, session: prior_session }
|
||||
} else {
|
||||
ValidatorSet { network, session: latest_session }
|
||||
ExternalValidatorSet { network, session: latest_session }
|
||||
},
|
||||
))
|
||||
}
|
||||
@@ -204,16 +204,12 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
|
||||
let mut total_stake = 0;
|
||||
let mut total_on_distinct_chain = 0;
|
||||
for network in NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
|
||||
for network in EXTERNAL_NETWORKS {
|
||||
// Get the current set for this network
|
||||
let set_with_keys = {
|
||||
let mut res;
|
||||
while {
|
||||
res = set_with_keys_fn(&serai, cosign.network).await;
|
||||
res = set_with_keys_fn(&serai, network).await;
|
||||
res.is_err()
|
||||
} {
|
||||
log::error!(
|
||||
@@ -231,7 +227,8 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
let stake = {
|
||||
let mut res;
|
||||
while {
|
||||
res = serai.validator_sets().total_allocated_stake(set_with_keys.network).await;
|
||||
res =
|
||||
serai.validator_sets().total_allocated_stake(set_with_keys.network.into()).await;
|
||||
res.is_err()
|
||||
} {
|
||||
log::error!(
|
||||
@@ -271,7 +268,7 @@ impl<D: Db> CosignEvaluator<D> {
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new<P: P2p>(db: D, p2p: P, serai: Arc<Serai>) -> mpsc::UnboundedSender<CosignedBlock> {
|
||||
let mut latest_cosigns = HashMap::new();
|
||||
for network in NETWORKS {
|
||||
for network in EXTERNAL_NETWORKS {
|
||||
if let Some(cosign) = LatestCosign::get(&db, network) {
|
||||
latest_cosigns.insert(network, cosign);
|
||||
}
|
||||
|
||||
@@ -6,9 +6,9 @@ use blake2::{
|
||||
use scale::Encode;
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
use serai_client::{
|
||||
primitives::NetworkId,
|
||||
validator_sets::primitives::{Session, ValidatorSet},
|
||||
in_instructions::primitives::{Batch, SignedBatch},
|
||||
primitives::ExternalNetworkId,
|
||||
validator_sets::primitives::{ExternalValidatorSet, Session},
|
||||
};
|
||||
|
||||
pub use serai_db::*;
|
||||
@@ -18,21 +18,21 @@ use crate::tributary::{TributarySpec, Transaction, scanner::RecognizedIdType};
|
||||
|
||||
create_db!(
|
||||
MainDb {
|
||||
HandledMessageDb: (network: NetworkId) -> u64,
|
||||
HandledMessageDb: (network: ExternalNetworkId) -> u64,
|
||||
ActiveTributaryDb: () -> Vec<u8>,
|
||||
RetiredTributaryDb: (set: ValidatorSet) -> (),
|
||||
RetiredTributaryDb: (set: ExternalValidatorSet) -> (),
|
||||
FirstPreprocessDb: (
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
id_type: RecognizedIdType,
|
||||
id: &[u8]
|
||||
) -> Vec<Vec<u8>>,
|
||||
LastReceivedBatchDb: (network: NetworkId) -> u32,
|
||||
ExpectedBatchDb: (network: NetworkId, id: u32) -> [u8; 32],
|
||||
BatchDb: (network: NetworkId, id: u32) -> SignedBatch,
|
||||
LastVerifiedBatchDb: (network: NetworkId) -> u32,
|
||||
HandoverBatchDb: (set: ValidatorSet) -> u32,
|
||||
LookupHandoverBatchDb: (network: NetworkId, batch: u32) -> Session,
|
||||
QueuedBatchesDb: (set: ValidatorSet) -> Vec<u8>
|
||||
LastReceivedBatchDb: (network: ExternalNetworkId) -> u32,
|
||||
ExpectedBatchDb: (network: ExternalNetworkId, id: u32) -> [u8; 32],
|
||||
BatchDb: (network: ExternalNetworkId, id: u32) -> SignedBatch,
|
||||
LastVerifiedBatchDb: (network: ExternalNetworkId) -> u32,
|
||||
HandoverBatchDb: (set: ExternalValidatorSet) -> u32,
|
||||
LookupHandoverBatchDb: (network: ExternalNetworkId, batch: u32) -> Session,
|
||||
QueuedBatchesDb: (set: ExternalValidatorSet) -> Vec<u8>
|
||||
}
|
||||
);
|
||||
|
||||
@@ -61,7 +61,7 @@ impl ActiveTributaryDb {
|
||||
ActiveTributaryDb::set(txn, &existing_bytes);
|
||||
}
|
||||
|
||||
pub fn retire_tributary(txn: &mut impl DbTxn, set: ValidatorSet) {
|
||||
pub fn retire_tributary(txn: &mut impl DbTxn, set: ExternalValidatorSet) {
|
||||
let mut active = Self::active_tributaries(txn).1;
|
||||
for i in 0 .. active.len() {
|
||||
if active[i].set() == set {
|
||||
@@ -82,7 +82,7 @@ impl ActiveTributaryDb {
|
||||
impl FirstPreprocessDb {
|
||||
pub fn save_first_preprocess(
|
||||
txn: &mut impl DbTxn,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
id_type: RecognizedIdType,
|
||||
id: &[u8],
|
||||
preprocess: &Vec<Vec<u8>>,
|
||||
@@ -108,19 +108,19 @@ impl ExpectedBatchDb {
|
||||
}
|
||||
|
||||
impl HandoverBatchDb {
|
||||
pub fn set_handover_batch(txn: &mut impl DbTxn, set: ValidatorSet, batch: u32) {
|
||||
pub fn set_handover_batch(txn: &mut impl DbTxn, set: ExternalValidatorSet, batch: u32) {
|
||||
Self::set(txn, set, &batch);
|
||||
LookupHandoverBatchDb::set(txn, set.network, batch, &set.session);
|
||||
}
|
||||
}
|
||||
impl QueuedBatchesDb {
|
||||
pub fn queue(txn: &mut impl DbTxn, set: ValidatorSet, batch: &Transaction) {
|
||||
pub fn queue(txn: &mut impl DbTxn, set: ExternalValidatorSet, batch: &Transaction) {
|
||||
let mut batches = Self::get(txn, set).unwrap_or_default();
|
||||
batch.write(&mut batches).unwrap();
|
||||
Self::set(txn, set, &batches);
|
||||
}
|
||||
|
||||
pub fn take(txn: &mut impl DbTxn, set: ValidatorSet) -> Vec<Transaction> {
|
||||
pub fn take(txn: &mut impl DbTxn, set: ExternalValidatorSet) -> Vec<Transaction> {
|
||||
let batches_vec = Self::get(txn, set).unwrap_or_default();
|
||||
txn.del(Self::key(set));
|
||||
|
||||
|
||||
@@ -23,8 +23,8 @@ use serai_db::{DbTxn, Db};
|
||||
use scale::Encode;
|
||||
use borsh::BorshSerialize;
|
||||
use serai_client::{
|
||||
primitives::NetworkId,
|
||||
validator_sets::primitives::{Session, ValidatorSet, KeyPair},
|
||||
primitives::ExternalNetworkId,
|
||||
validator_sets::primitives::{ExternalValidatorSet, KeyPair, Session},
|
||||
Public, Serai, SeraiInInstructions,
|
||||
};
|
||||
|
||||
@@ -79,7 +79,7 @@ pub struct ActiveTributary<D: Db, P: P2p> {
|
||||
#[derive(Clone)]
|
||||
pub enum TributaryEvent<D: Db, P: P2p> {
|
||||
NewTributary(ActiveTributary<D, P>),
|
||||
TributaryRetired(ValidatorSet),
|
||||
TributaryRetired(ExternalValidatorSet),
|
||||
}
|
||||
|
||||
// Creates a new tributary and sends it to all listeners.
|
||||
@@ -145,7 +145,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
|
||||
p2p: &P,
|
||||
cosign_channel: &mpsc::UnboundedSender<CosignedBlock>,
|
||||
tributaries: &HashMap<Session, ActiveTributary<D, P>>,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
msg: &processors::Message,
|
||||
) -> bool {
|
||||
#[allow(clippy::nonminimal_bool)]
|
||||
@@ -193,7 +193,8 @@ async fn handle_processor_message<D: Db, P: P2p>(
|
||||
.iter()
|
||||
.map(|plan| plan.session)
|
||||
.filter(|session| {
|
||||
RetiredTributaryDb::get(&txn, ValidatorSet { network, session: *session }).is_none()
|
||||
RetiredTributaryDb::get(&txn, ExternalValidatorSet { network, session: *session })
|
||||
.is_none()
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
@@ -265,7 +266,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
|
||||
}
|
||||
// This causes an action on Substrate yet not on any Tributary
|
||||
coordinator::ProcessorMessage::SignedSlashReport { session, signature } => {
|
||||
let set = ValidatorSet { network, session: *session };
|
||||
let set = ExternalValidatorSet { network, session: *session };
|
||||
let signature: &[u8] = signature.as_ref();
|
||||
let signature = serai_client::Signature(signature.try_into().unwrap());
|
||||
|
||||
@@ -393,7 +394,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
|
||||
if let Some(relevant_tributary_value) = relevant_tributary {
|
||||
if RetiredTributaryDb::get(
|
||||
&txn,
|
||||
ValidatorSet { network: msg.network, session: relevant_tributary_value },
|
||||
ExternalValidatorSet { network: msg.network, session: relevant_tributary_value },
|
||||
)
|
||||
.is_some()
|
||||
{
|
||||
@@ -782,7 +783,7 @@ async fn handle_processor_messages<D: Db, Pro: Processors, P: P2p>(
|
||||
processors: Pro,
|
||||
p2p: P,
|
||||
cosign_channel: mpsc::UnboundedSender<CosignedBlock>,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
mut tributary_event: mpsc::UnboundedReceiver<TributaryEvent<D, P>>,
|
||||
) {
|
||||
let mut tributaries = HashMap::new();
|
||||
@@ -831,7 +832,7 @@ async fn handle_processor_messages<D: Db, Pro: Processors, P: P2p>(
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn handle_cosigns_and_batch_publication<D: Db, P: P2p>(
|
||||
mut db: D,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
mut tributary_event: mpsc::UnboundedReceiver<TributaryEvent<D, P>>,
|
||||
) {
|
||||
let mut tributaries = HashMap::new();
|
||||
@@ -905,7 +906,7 @@ async fn handle_cosigns_and_batch_publication<D: Db, P: P2p>(
|
||||
for batch in start_id ..= last_id {
|
||||
let is_pre_handover = LookupHandoverBatchDb::get(&txn, network, batch + 1);
|
||||
if let Some(session) = is_pre_handover {
|
||||
let set = ValidatorSet { network, session };
|
||||
let set = ExternalValidatorSet { network, session };
|
||||
let mut queued = QueuedBatchesDb::take(&mut txn, set);
|
||||
// is_handover_batch is only set for handover `Batch`s we're participating in, making
|
||||
// this safe
|
||||
@@ -923,7 +924,8 @@ async fn handle_cosigns_and_batch_publication<D: Db, P: P2p>(
|
||||
|
||||
let is_handover = LookupHandoverBatchDb::get(&txn, network, batch);
|
||||
if let Some(session) = is_handover {
|
||||
for queued in QueuedBatchesDb::take(&mut txn, ValidatorSet { network, session }) {
|
||||
for queued in QueuedBatchesDb::take(&mut txn, ExternalValidatorSet { network, session })
|
||||
{
|
||||
to_publish.push((session, queued));
|
||||
}
|
||||
}
|
||||
@@ -970,10 +972,7 @@ pub async fn handle_processors<D: Db, Pro: Processors, P: P2p>(
|
||||
mut tributary_event: broadcast::Receiver<TributaryEvent<D, P>>,
|
||||
) {
|
||||
let mut channels = HashMap::new();
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let (processor_send, processor_recv) = mpsc::unbounded_channel();
|
||||
tokio::spawn(handle_processor_messages(
|
||||
db.clone(),
|
||||
@@ -1195,7 +1194,7 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
|
||||
}
|
||||
});
|
||||
|
||||
move |set: ValidatorSet, genesis, id_type, id: Vec<u8>| {
|
||||
move |set: ExternalValidatorSet, genesis, id_type, id: Vec<u8>| {
|
||||
log::debug!("recognized ID {:?} {}", id_type, hex::encode(&id));
|
||||
let mut raw_db = raw_db.clone();
|
||||
let key = key.clone();
|
||||
|
||||
@@ -11,7 +11,9 @@ use rand_core::{RngCore, OsRng};
|
||||
|
||||
use scale::{Decode, Encode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
use serai_client::{primitives::NetworkId, validator_sets::primitives::ValidatorSet, Serai};
|
||||
use serai_client::{
|
||||
primitives::ExternalNetworkId, validator_sets::primitives::ExternalValidatorSet, Serai,
|
||||
};
|
||||
|
||||
use serai_db::Db;
|
||||
|
||||
@@ -69,7 +71,7 @@ const BLOCKS_PER_BATCH: usize = BLOCKS_PER_MINUTE + 1;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct CosignedBlock {
|
||||
pub network: NetworkId,
|
||||
pub network: ExternalNetworkId,
|
||||
pub block_number: u64,
|
||||
pub block: [u8; 32],
|
||||
pub signature: [u8; 64],
|
||||
@@ -208,8 +210,8 @@ pub struct HeartbeatBatch {
|
||||
pub trait P2p: Send + Sync + Clone + fmt::Debug + TributaryP2p {
|
||||
type Id: Send + Sync + Clone + Copy + fmt::Debug;
|
||||
|
||||
async fn subscribe(&self, set: ValidatorSet, genesis: [u8; 32]);
|
||||
async fn unsubscribe(&self, set: ValidatorSet, genesis: [u8; 32]);
|
||||
async fn subscribe(&self, set: ExternalValidatorSet, genesis: [u8; 32]);
|
||||
async fn unsubscribe(&self, set: ExternalValidatorSet, genesis: [u8; 32]);
|
||||
|
||||
async fn send_raw(&self, to: Self::Id, msg: Vec<u8>);
|
||||
async fn broadcast_raw(&self, kind: P2pMessageKind, msg: Vec<u8>);
|
||||
@@ -309,7 +311,7 @@ struct Behavior {
|
||||
#[allow(clippy::type_complexity)]
|
||||
#[derive(Clone)]
|
||||
pub struct LibP2p {
|
||||
subscribe: Arc<Mutex<mpsc::UnboundedSender<(bool, ValidatorSet, [u8; 32])>>>,
|
||||
subscribe: Arc<Mutex<mpsc::UnboundedSender<(bool, ExternalValidatorSet, [u8; 32])>>>,
|
||||
send: Arc<Mutex<mpsc::UnboundedSender<(PeerId, Vec<u8>)>>>,
|
||||
broadcast: Arc<Mutex<mpsc::UnboundedSender<(P2pMessageKind, Vec<u8>)>>>,
|
||||
receive: Arc<Mutex<mpsc::UnboundedReceiver<Message<Self>>>>,
|
||||
@@ -397,7 +399,7 @@ impl LibP2p {
|
||||
let (receive_send, receive_recv) = mpsc::unbounded_channel();
|
||||
let (subscribe_send, mut subscribe_recv) = mpsc::unbounded_channel();
|
||||
|
||||
fn topic_for_set(set: ValidatorSet) -> IdentTopic {
|
||||
fn topic_for_set(set: ExternalValidatorSet) -> IdentTopic {
|
||||
IdentTopic::new(format!("{LIBP2P_TOPIC}-{}", hex::encode(set.encode())))
|
||||
}
|
||||
|
||||
@@ -407,7 +409,8 @@ impl LibP2p {
|
||||
// The addrs we're currently dialing, and the networks associated with them
|
||||
let dialing_peers = Arc::new(RwLock::new(HashMap::new()));
|
||||
// The peers we're currently connected to, and the networks associated with them
|
||||
let connected_peers = Arc::new(RwLock::new(HashMap::<Multiaddr, HashSet<NetworkId>>::new()));
|
||||
let connected_peers =
|
||||
Arc::new(RwLock::new(HashMap::<Multiaddr, HashSet<ExternalNetworkId>>::new()));
|
||||
|
||||
// Find and connect to peers
|
||||
let (connect_to_network_send, mut connect_to_network_recv) =
|
||||
@@ -420,7 +423,7 @@ impl LibP2p {
|
||||
let connect_to_network_send = connect_to_network_send.clone();
|
||||
async move {
|
||||
loop {
|
||||
let connect = |network: NetworkId, addr: Multiaddr| {
|
||||
let connect = |network: ExternalNetworkId, addr: Multiaddr| {
|
||||
let dialing_peers = dialing_peers.clone();
|
||||
let connected_peers = connected_peers.clone();
|
||||
let to_dial_send = to_dial_send.clone();
|
||||
@@ -507,7 +510,7 @@ impl LibP2p {
|
||||
connect_to_network_networks.insert(network);
|
||||
}
|
||||
for network in connect_to_network_networks {
|
||||
if let Ok(mut nodes) = serai.p2p_validators(network).await {
|
||||
if let Ok(mut nodes) = serai.p2p_validators(network.into()).await {
|
||||
// If there's an insufficient amount of nodes known, connect to all yet add it
|
||||
// back and break
|
||||
if nodes.len() < TARGET_PEERS {
|
||||
@@ -557,7 +560,7 @@ impl LibP2p {
|
||||
|
||||
// Subscribe to any new topics
|
||||
set = subscribe_recv.recv() => {
|
||||
let (subscribe, set, genesis): (_, ValidatorSet, [u8; 32]) =
|
||||
let (subscribe, set, genesis): (_, ExternalValidatorSet, [u8; 32]) =
|
||||
set.expect("subscribe_recv closed. are we shutting down?");
|
||||
let topic = topic_for_set(set);
|
||||
if subscribe {
|
||||
@@ -776,7 +779,7 @@ impl LibP2p {
|
||||
impl P2p for LibP2p {
|
||||
type Id = PeerId;
|
||||
|
||||
async fn subscribe(&self, set: ValidatorSet, genesis: [u8; 32]) {
|
||||
async fn subscribe(&self, set: ExternalValidatorSet, genesis: [u8; 32]) {
|
||||
self
|
||||
.subscribe
|
||||
.lock()
|
||||
@@ -785,7 +788,7 @@ impl P2p for LibP2p {
|
||||
.expect("subscribe_send closed. are we shutting down?");
|
||||
}
|
||||
|
||||
async fn unsubscribe(&self, set: ValidatorSet, genesis: [u8; 32]) {
|
||||
async fn unsubscribe(&self, set: ExternalValidatorSet, genesis: [u8; 32]) {
|
||||
self
|
||||
.subscribe
|
||||
.lock()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use serai_client::primitives::NetworkId;
|
||||
use serai_client::primitives::ExternalNetworkId;
|
||||
use processor_messages::{ProcessorMessage, CoordinatorMessage};
|
||||
|
||||
use message_queue::{Service, Metadata, client::MessageQueue};
|
||||
@@ -8,27 +8,27 @@ use message_queue::{Service, Metadata, client::MessageQueue};
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Message {
|
||||
pub id: u64,
|
||||
pub network: NetworkId,
|
||||
pub network: ExternalNetworkId,
|
||||
pub msg: ProcessorMessage,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait Processors: 'static + Send + Sync + Clone {
|
||||
async fn send(&self, network: NetworkId, msg: impl Send + Into<CoordinatorMessage>);
|
||||
async fn recv(&self, network: NetworkId) -> Message;
|
||||
async fn send(&self, network: ExternalNetworkId, msg: impl Send + Into<CoordinatorMessage>);
|
||||
async fn recv(&self, network: ExternalNetworkId) -> Message;
|
||||
async fn ack(&self, msg: Message);
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Processors for Arc<MessageQueue> {
|
||||
async fn send(&self, network: NetworkId, msg: impl Send + Into<CoordinatorMessage>) {
|
||||
async fn send(&self, network: ExternalNetworkId, msg: impl Send + Into<CoordinatorMessage>) {
|
||||
let msg: CoordinatorMessage = msg.into();
|
||||
let metadata =
|
||||
Metadata { from: self.service, to: Service::Processor(network), intent: msg.intent() };
|
||||
let msg = borsh::to_vec(&msg).unwrap();
|
||||
self.queue(metadata, msg).await;
|
||||
}
|
||||
async fn recv(&self, network: NetworkId) -> Message {
|
||||
async fn recv(&self, network: ExternalNetworkId) -> Message {
|
||||
let msg = self.next(Service::Processor(network)).await;
|
||||
assert_eq!(msg.from, Service::Processor(network));
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ use ciphersuite::{Ciphersuite, Ristretto};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{
|
||||
SeraiError, Serai,
|
||||
primitives::NetworkId,
|
||||
validator_sets::primitives::{Session, ValidatorSet},
|
||||
primitives::ExternalNetworkId,
|
||||
validator_sets::primitives::{ExternalValidatorSet, Session},
|
||||
Serai, SeraiError,
|
||||
};
|
||||
|
||||
use serai_db::*;
|
||||
@@ -70,13 +70,18 @@ impl LatestCosignedBlock {
|
||||
|
||||
db_channel! {
|
||||
SubstrateDbChannels {
|
||||
CosignTransactions: (network: NetworkId) -> (Session, u64, [u8; 32]),
|
||||
CosignTransactions: (network: ExternalNetworkId) -> (Session, u64, [u8; 32]),
|
||||
}
|
||||
}
|
||||
|
||||
impl CosignTransactions {
|
||||
// Append a cosign transaction.
|
||||
pub fn append_cosign(txn: &mut impl DbTxn, set: ValidatorSet, number: u64, hash: [u8; 32]) {
|
||||
pub fn append_cosign(
|
||||
txn: &mut impl DbTxn,
|
||||
set: ExternalValidatorSet,
|
||||
number: u64,
|
||||
hash: [u8; 32],
|
||||
) {
|
||||
CosignTransactions::send(txn, set.network, &(set.session, number, hash))
|
||||
}
|
||||
}
|
||||
@@ -256,22 +261,22 @@ async fn advance_cosign_protocol_inner(
|
||||
// Using the keys of the prior block ensures this deadlock isn't reached
|
||||
let serai = serai.as_of(actual_block.header.parent_hash.into());
|
||||
|
||||
for network in serai_client::primitives::NETWORKS {
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
// Get the latest session to have set keys
|
||||
let set_with_keys = {
|
||||
let Some(latest_session) = serai.validator_sets().session(network).await? else {
|
||||
let Some(latest_session) = serai.validator_sets().session(network.into()).await? else {
|
||||
continue;
|
||||
};
|
||||
let prior_session = Session(latest_session.0.saturating_sub(1));
|
||||
if serai
|
||||
.validator_sets()
|
||||
.keys(ValidatorSet { network, session: prior_session })
|
||||
.keys(ExternalValidatorSet { network, session: prior_session })
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
ValidatorSet { network, session: prior_session }
|
||||
ExternalValidatorSet { network, session: prior_session }
|
||||
} else {
|
||||
let set = ValidatorSet { network, session: latest_session };
|
||||
let set = ExternalValidatorSet { network, session: latest_session };
|
||||
if serai.validator_sets().keys(set).await?.is_none() {
|
||||
continue;
|
||||
}
|
||||
@@ -280,7 +285,7 @@ async fn advance_cosign_protocol_inner(
|
||||
};
|
||||
|
||||
log::debug!("{:?} will be cosigning {block}", set_with_keys.network);
|
||||
cosigning.push((set_with_keys, in_set(key, &serai, set_with_keys).await?.unwrap()));
|
||||
cosigning.push((set_with_keys, in_set(key, &serai, set_with_keys.into()).await?.unwrap()));
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use serai_client::primitives::NetworkId;
|
||||
use serai_client::primitives::ExternalNetworkId;
|
||||
|
||||
pub use serai_db::*;
|
||||
|
||||
@@ -9,7 +9,7 @@ mod inner_db {
|
||||
SubstrateDb {
|
||||
NextBlock: () -> u64,
|
||||
HandledEvent: (block: [u8; 32]) -> u32,
|
||||
BatchInstructionsHashDb: (network: NetworkId, id: u32) -> [u8; 32]
|
||||
BatchInstructionsHashDb: (network: ExternalNetworkId, id: u32) -> [u8; 32]
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -9,11 +9,14 @@ use zeroize::Zeroizing;
|
||||
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
|
||||
use serai_client::{
|
||||
SeraiError, Block, Serai, TemporalSerai,
|
||||
primitives::{BlockHash, NetworkId},
|
||||
validator_sets::{primitives::ValidatorSet, ValidatorSetsEvent},
|
||||
in_instructions::InInstructionsEvent,
|
||||
coins::CoinsEvent,
|
||||
in_instructions::InInstructionsEvent,
|
||||
primitives::{BlockHash, ExternalNetworkId},
|
||||
validator_sets::{
|
||||
primitives::{ExternalValidatorSet, ValidatorSet},
|
||||
ValidatorSetsEvent,
|
||||
},
|
||||
Block, Serai, SeraiError, TemporalSerai,
|
||||
};
|
||||
|
||||
use serai_db::DbTxn;
|
||||
@@ -52,9 +55,9 @@ async fn handle_new_set<D: Db>(
|
||||
new_tributary_spec: &mpsc::UnboundedSender<TributarySpec>,
|
||||
serai: &Serai,
|
||||
block: &Block,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
) -> Result<(), SeraiError> {
|
||||
if in_set(key, &serai.as_of(block.hash()), set)
|
||||
if in_set(key, &serai.as_of(block.hash()), set.into())
|
||||
.await?
|
||||
.expect("NewSet for set which doesn't exist")
|
||||
{
|
||||
@@ -64,7 +67,7 @@ async fn handle_new_set<D: Db>(
|
||||
let serai = serai.as_of(block.hash());
|
||||
let serai = serai.validator_sets();
|
||||
let set_participants =
|
||||
serai.participants(set.network).await?.expect("NewSet for set which doesn't exist");
|
||||
serai.participants(set.network.into()).await?.expect("NewSet for set which doesn't exist");
|
||||
|
||||
set_participants.into_iter().map(|(k, w)| (k, u16::try_from(w).unwrap())).collect::<Vec<_>>()
|
||||
};
|
||||
@@ -131,7 +134,7 @@ async fn handle_batch_and_burns<Pro: Processors>(
|
||||
};
|
||||
|
||||
let mut batch_block = HashMap::new();
|
||||
let mut batches = HashMap::<NetworkId, Vec<u32>>::new();
|
||||
let mut batches = HashMap::<ExternalNetworkId, Vec<u32>>::new();
|
||||
let mut burns = HashMap::new();
|
||||
|
||||
let serai = serai.as_of(block.hash());
|
||||
@@ -205,8 +208,8 @@ async fn handle_block<D: Db, Pro: Processors>(
|
||||
db: &mut D,
|
||||
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||
new_tributary_spec: &mpsc::UnboundedSender<TributarySpec>,
|
||||
perform_slash_report: &mpsc::UnboundedSender<ValidatorSet>,
|
||||
tributary_retired: &mpsc::UnboundedSender<ValidatorSet>,
|
||||
perform_slash_report: &mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
tributary_retired: &mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
processors: &Pro,
|
||||
serai: &Serai,
|
||||
block: Block,
|
||||
@@ -226,12 +229,8 @@ async fn handle_block<D: Db, Pro: Processors>(
|
||||
panic!("NewSet event wasn't NewSet: {new_set:?}");
|
||||
};
|
||||
|
||||
// If this is Serai, do nothing
|
||||
// We only coordinate/process external networks
|
||||
if set.network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(set) = ExternalValidatorSet::try_from(set) else { continue };
|
||||
if HandledEvent::is_unhandled(db, hash, event_id) {
|
||||
log::info!("found fresh new set event {:?}", new_set);
|
||||
let mut txn = db.txn();
|
||||
@@ -286,10 +285,7 @@ async fn handle_block<D: Db, Pro: Processors>(
|
||||
panic!("AcceptedHandover event wasn't AcceptedHandover: {accepted_handover:?}");
|
||||
};
|
||||
|
||||
if set.network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(set) = ExternalValidatorSet::try_from(set) else { continue };
|
||||
if HandledEvent::is_unhandled(db, hash, event_id) {
|
||||
log::info!("found fresh accepted handover event {:?}", accepted_handover);
|
||||
// TODO: This isn't atomic with the event handling
|
||||
@@ -307,10 +303,7 @@ async fn handle_block<D: Db, Pro: Processors>(
|
||||
panic!("SetRetired event wasn't SetRetired: {retired_set:?}");
|
||||
};
|
||||
|
||||
if set.network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(set) = ExternalValidatorSet::try_from(set) else { continue };
|
||||
if HandledEvent::is_unhandled(db, hash, event_id) {
|
||||
log::info!("found fresh set retired event {:?}", retired_set);
|
||||
let mut txn = db.txn();
|
||||
@@ -340,8 +333,8 @@ async fn handle_new_blocks<D: Db, Pro: Processors>(
|
||||
db: &mut D,
|
||||
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||
new_tributary_spec: &mpsc::UnboundedSender<TributarySpec>,
|
||||
perform_slash_report: &mpsc::UnboundedSender<ValidatorSet>,
|
||||
tributary_retired: &mpsc::UnboundedSender<ValidatorSet>,
|
||||
perform_slash_report: &mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
tributary_retired: &mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
processors: &Pro,
|
||||
serai: &Serai,
|
||||
next_block: &mut u64,
|
||||
@@ -395,8 +388,8 @@ pub async fn scan_task<D: Db, Pro: Processors>(
|
||||
processors: Pro,
|
||||
serai: Arc<Serai>,
|
||||
new_tributary_spec: mpsc::UnboundedSender<TributarySpec>,
|
||||
perform_slash_report: mpsc::UnboundedSender<ValidatorSet>,
|
||||
tributary_retired: mpsc::UnboundedSender<ValidatorSet>,
|
||||
perform_slash_report: mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
tributary_retired: mpsc::UnboundedSender<ExternalValidatorSet>,
|
||||
) {
|
||||
log::info!("scanning substrate");
|
||||
let mut next_substrate_block = NextBlock::get(&db).unwrap_or_default();
|
||||
@@ -494,9 +487,12 @@ pub async fn scan_task<D: Db, Pro: Processors>(
|
||||
/// retry.
|
||||
pub(crate) async fn expected_next_batch(
|
||||
serai: &Serai,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
) -> Result<u32, SeraiError> {
|
||||
async fn expected_next_batch_inner(serai: &Serai, network: NetworkId) -> Result<u32, SeraiError> {
|
||||
async fn expected_next_batch_inner(
|
||||
serai: &Serai,
|
||||
network: ExternalNetworkId,
|
||||
) -> Result<u32, SeraiError> {
|
||||
let serai = serai.as_of_latest_finalized_block().await?;
|
||||
let last = serai.in_instructions().last_batch_for_network(network).await?;
|
||||
Ok(if let Some(last) = last { last + 1 } else { 0 })
|
||||
@@ -519,7 +515,7 @@ pub(crate) async fn expected_next_batch(
|
||||
/// This is deemed fine.
|
||||
pub(crate) async fn verify_published_batches<D: Db>(
|
||||
txn: &mut D::Transaction<'_>,
|
||||
network: NetworkId,
|
||||
network: ExternalNetworkId,
|
||||
optimistic_up_to: u32,
|
||||
) -> Option<u32> {
|
||||
// TODO: Localize from MainDb to SubstrateDb
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::{
|
||||
collections::{VecDeque, HashSet, HashMap},
|
||||
};
|
||||
|
||||
use serai_client::{primitives::NetworkId, validator_sets::primitives::ValidatorSet};
|
||||
use serai_client::{primitives::ExternalNetworkId, validator_sets::primitives::ExternalValidatorSet};
|
||||
|
||||
use processor_messages::CoordinatorMessage;
|
||||
|
||||
@@ -20,7 +20,7 @@ use crate::{
|
||||
pub mod tributary;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MemProcessors(pub Arc<RwLock<HashMap<NetworkId, VecDeque<CoordinatorMessage>>>>);
|
||||
pub struct MemProcessors(pub Arc<RwLock<HashMap<ExternalNetworkId, VecDeque<CoordinatorMessage>>>>);
|
||||
impl MemProcessors {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> MemProcessors {
|
||||
@@ -30,12 +30,12 @@ impl MemProcessors {
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Processors for MemProcessors {
|
||||
async fn send(&self, network: NetworkId, msg: impl Send + Into<CoordinatorMessage>) {
|
||||
async fn send(&self, network: ExternalNetworkId, msg: impl Send + Into<CoordinatorMessage>) {
|
||||
let mut processors = self.0.write().await;
|
||||
let processor = processors.entry(network).or_insert_with(VecDeque::new);
|
||||
processor.push_back(msg.into());
|
||||
}
|
||||
async fn recv(&self, _: NetworkId) -> Message {
|
||||
async fn recv(&self, _: ExternalNetworkId) -> Message {
|
||||
todo!()
|
||||
}
|
||||
async fn ack(&self, _: Message) {
|
||||
@@ -65,8 +65,8 @@ impl LocalP2p {
|
||||
impl P2p for LocalP2p {
|
||||
type Id = usize;
|
||||
|
||||
async fn subscribe(&self, _set: ValidatorSet, _genesis: [u8; 32]) {}
|
||||
async fn unsubscribe(&self, _set: ValidatorSet, _genesis: [u8; 32]) {}
|
||||
async fn subscribe(&self, _set: ExternalValidatorSet, _genesis: [u8; 32]) {}
|
||||
async fn unsubscribe(&self, _set: ExternalValidatorSet, _genesis: [u8; 32]) {}
|
||||
|
||||
async fn send_raw(&self, to: Self::Id, msg: Vec<u8>) {
|
||||
let mut msg_ref = msg.as_slice();
|
||||
|
||||
@@ -15,8 +15,8 @@ use ciphersuite::{
|
||||
use sp_application_crypto::sr25519;
|
||||
use borsh::BorshDeserialize;
|
||||
use serai_client::{
|
||||
primitives::NetworkId,
|
||||
validator_sets::primitives::{Session, ValidatorSet},
|
||||
primitives::ExternalNetworkId,
|
||||
validator_sets::primitives::{ExternalValidatorSet, Session},
|
||||
};
|
||||
|
||||
use tokio::time::sleep;
|
||||
@@ -50,7 +50,7 @@ pub fn new_spec<R: RngCore + CryptoRng>(
|
||||
|
||||
let start_time = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs();
|
||||
|
||||
let set = ValidatorSet { session: Session(0), network: NetworkId::Bitcoin };
|
||||
let set = ExternalValidatorSet { session: Session(0), network: ExternalNetworkId::Bitcoin };
|
||||
|
||||
let set_participants = keys
|
||||
.iter()
|
||||
|
||||
@@ -10,7 +10,7 @@ use frost::Participant;
|
||||
use sp_runtime::traits::Verify;
|
||||
use serai_client::{
|
||||
primitives::{SeraiAddress, Signature},
|
||||
validator_sets::primitives::{ValidatorSet, KeyPair},
|
||||
validator_sets::primitives::{ExternalValidatorSet, KeyPair},
|
||||
};
|
||||
|
||||
use tokio::time::sleep;
|
||||
@@ -350,7 +350,7 @@ async fn dkg_test() {
|
||||
async fn publish_set_keys(
|
||||
&self,
|
||||
_db: &(impl Sync + Get),
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
removed: Vec<SeraiAddress>,
|
||||
key_pair: KeyPair,
|
||||
signature: Signature,
|
||||
@@ -362,7 +362,7 @@ async fn dkg_test() {
|
||||
&*serai_client::validator_sets::primitives::set_keys_message(&set, &[], &key_pair),
|
||||
&serai_client::Public(
|
||||
frost::dkg::musig::musig_key::<Ristretto>(
|
||||
&serai_client::validator_sets::primitives::musig_context(set),
|
||||
&serai_client::validator_sets::primitives::musig_context(set.into()),
|
||||
&self.spec.validators().into_iter().map(|(validator, _)| validator).collect::<Vec<_>>()
|
||||
)
|
||||
.unwrap()
|
||||
|
||||
@@ -7,7 +7,7 @@ use ciphersuite::{group::Group, Ciphersuite, Ristretto};
|
||||
use scale::{Encode, Decode};
|
||||
use serai_client::{
|
||||
primitives::{SeraiAddress, Signature},
|
||||
validator_sets::primitives::{MAX_KEY_SHARES_PER_SET, ValidatorSet, KeyPair},
|
||||
validator_sets::primitives::{ExternalValidatorSet, KeyPair, MAX_KEY_SHARES_PER_SET},
|
||||
};
|
||||
use processor_messages::coordinator::SubstrateSignableId;
|
||||
|
||||
@@ -31,7 +31,7 @@ impl PublishSeraiTransaction for () {
|
||||
async fn publish_set_keys(
|
||||
&self,
|
||||
_db: &(impl Sync + serai_db::Get),
|
||||
_set: ValidatorSet,
|
||||
_set: ExternalValidatorSet,
|
||||
_removed: Vec<SeraiAddress>,
|
||||
_key_pair: KeyPair,
|
||||
_signature: Signature,
|
||||
|
||||
@@ -6,7 +6,7 @@ use borsh::{BorshSerialize, BorshDeserialize};
|
||||
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
use frost::Participant;
|
||||
|
||||
use serai_client::validator_sets::primitives::{KeyPair, ValidatorSet};
|
||||
use serai_client::validator_sets::primitives::{KeyPair, ExternalValidatorSet};
|
||||
|
||||
use processor_messages::coordinator::SubstrateSignableId;
|
||||
|
||||
@@ -46,7 +46,7 @@ pub enum Accumulation {
|
||||
create_db!(
|
||||
Tributary {
|
||||
SeraiBlockNumber: (hash: [u8; 32]) -> u64,
|
||||
SeraiDkgCompleted: (spec: ValidatorSet) -> [u8; 32],
|
||||
SeraiDkgCompleted: (spec: ExternalValidatorSet) -> [u8; 32],
|
||||
|
||||
TributaryBlockNumber: (block: [u8; 32]) -> u32,
|
||||
LastHandledBlock: (genesis: [u8; 32]) -> [u8; 32],
|
||||
@@ -80,7 +80,7 @@ create_db!(
|
||||
SlashReports: (genesis: [u8; 32], signer: [u8; 32]) -> Vec<u32>,
|
||||
SlashReported: (genesis: [u8; 32]) -> u16,
|
||||
SlashReportCutOff: (genesis: [u8; 32]) -> u64,
|
||||
SlashReport: (set: ValidatorSet) -> Vec<([u8; 32], u32)>,
|
||||
SlashReport: (set: ExternalValidatorSet) -> Vec<([u8; 32], u32)>,
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
|
||||
use serai_client::validator_sets::primitives::ValidatorSet;
|
||||
use serai_client::validator_sets::primitives::ExternalValidatorSet;
|
||||
|
||||
use tributary::{
|
||||
ReadWrite,
|
||||
@@ -40,7 +40,7 @@ pub fn removed_as_of_dkg_attempt(
|
||||
|
||||
pub fn removed_as_of_set_keys(
|
||||
getter: &impl Get,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
genesis: [u8; 32],
|
||||
) -> Option<Vec<<Ristretto as Ciphersuite>::G>> {
|
||||
// SeraiDkgCompleted has the key placed on-chain.
|
||||
|
||||
@@ -10,7 +10,7 @@ use tokio::sync::broadcast;
|
||||
use scale::{Encode, Decode};
|
||||
use serai_client::{
|
||||
primitives::{SeraiAddress, Signature},
|
||||
validator_sets::primitives::{KeyPair, ValidatorSet},
|
||||
validator_sets::primitives::{ExternalValidatorSet, KeyPair},
|
||||
Serai,
|
||||
};
|
||||
|
||||
@@ -38,7 +38,7 @@ pub enum RecognizedIdType {
|
||||
pub trait RIDTrait {
|
||||
async fn recognized_id(
|
||||
&self,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
genesis: [u8; 32],
|
||||
kind: RecognizedIdType,
|
||||
id: Vec<u8>,
|
||||
@@ -47,12 +47,12 @@ pub trait RIDTrait {
|
||||
#[async_trait::async_trait]
|
||||
impl<
|
||||
FRid: Send + Future<Output = ()>,
|
||||
F: Sync + Fn(ValidatorSet, [u8; 32], RecognizedIdType, Vec<u8>) -> FRid,
|
||||
F: Sync + Fn(ExternalValidatorSet, [u8; 32], RecognizedIdType, Vec<u8>) -> FRid,
|
||||
> RIDTrait for F
|
||||
{
|
||||
async fn recognized_id(
|
||||
&self,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
genesis: [u8; 32],
|
||||
kind: RecognizedIdType,
|
||||
id: Vec<u8>,
|
||||
@@ -66,7 +66,7 @@ pub trait PublishSeraiTransaction {
|
||||
async fn publish_set_keys(
|
||||
&self,
|
||||
db: &(impl Sync + Get),
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
removed: Vec<SeraiAddress>,
|
||||
key_pair: KeyPair,
|
||||
signature: Signature,
|
||||
@@ -86,7 +86,7 @@ mod impl_pst_for_serai {
|
||||
async fn publish(
|
||||
serai: &Serai,
|
||||
db: &impl Get,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
tx: serai_client::Transaction,
|
||||
meta: $Meta,
|
||||
) -> bool {
|
||||
@@ -128,7 +128,7 @@ mod impl_pst_for_serai {
|
||||
async fn publish_set_keys(
|
||||
&self,
|
||||
db: &(impl Sync + Get),
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
removed: Vec<SeraiAddress>,
|
||||
key_pair: KeyPair,
|
||||
signature: Signature,
|
||||
@@ -140,7 +140,7 @@ mod impl_pst_for_serai {
|
||||
key_pair,
|
||||
signature,
|
||||
);
|
||||
async fn check(serai: SeraiValidatorSets<'_>, set: ValidatorSet, (): ()) -> bool {
|
||||
async fn check(serai: SeraiValidatorSets<'_>, set: ExternalValidatorSet, (): ()) -> bool {
|
||||
if matches!(serai.keys(set).await, Ok(Some(_))) {
|
||||
log::info!("another coordinator set key pair for {:?}", set);
|
||||
return true;
|
||||
|
||||
@@ -119,7 +119,7 @@ impl<T: DbTxn, C: Encode> SigningProtocol<'_, T, C> {
|
||||
|
||||
let algorithm = Schnorrkel::new(b"substrate");
|
||||
let keys: ThresholdKeys<Ristretto> =
|
||||
musig(&musig_context(self.spec.set()), self.key, participants)
|
||||
musig(&musig_context(self.spec.set().into()), self.key, participants)
|
||||
.expect("signing for a set we aren't in/validator present multiple times")
|
||||
.into();
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use frost::Participant;
|
||||
use scale::Encode;
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{primitives::PublicKey, validator_sets::primitives::ValidatorSet};
|
||||
use serai_client::{primitives::PublicKey, validator_sets::primitives::ExternalValidatorSet};
|
||||
|
||||
fn borsh_serialize_validators<W: io::Write>(
|
||||
validators: &Vec<(<Ristretto as Ciphersuite>::G, u16)>,
|
||||
@@ -43,7 +43,7 @@ fn borsh_deserialize_validators<R: io::Read>(
|
||||
pub struct TributarySpec {
|
||||
serai_block: [u8; 32],
|
||||
start_time: u64,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
#[borsh(
|
||||
serialize_with = "borsh_serialize_validators",
|
||||
deserialize_with = "borsh_deserialize_validators"
|
||||
@@ -55,7 +55,7 @@ impl TributarySpec {
|
||||
pub fn new(
|
||||
serai_block: [u8; 32],
|
||||
start_time: u64,
|
||||
set: ValidatorSet,
|
||||
set: ExternalValidatorSet,
|
||||
set_participants: Vec<(PublicKey, u16)>,
|
||||
) -> TributarySpec {
|
||||
let mut validators = vec![];
|
||||
@@ -68,7 +68,7 @@ impl TributarySpec {
|
||||
Self { serai_block, start_time, set, validators }
|
||||
}
|
||||
|
||||
pub fn set(&self) -> ValidatorSet {
|
||||
pub fn set(&self) -> ExternalValidatorSet {
|
||||
self.set
|
||||
}
|
||||
|
||||
|
||||
@@ -244,7 +244,16 @@ impl FieldElement {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
|
||||
let mut scale_by = FieldElement::ONE;
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
scale_by = <_>::conditional_select(&scale_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res *= scale_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -208,7 +208,16 @@ impl Scalar {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
|
||||
let mut scale_by = Scalar::ONE;
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
scale_by = <_>::conditional_select(&scale_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res *= scale_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,7 +161,16 @@ macro_rules! field {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
|
||||
let mut scale_by = $FieldName(Residue::ONE);
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
scale_by = <_>::conditional_select(&scale_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res *= scale_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -242,7 +242,16 @@ impl Mul<Scalar> for Point {
|
||||
res = res.double();
|
||||
}
|
||||
}
|
||||
res += table[usize::from(bits)];
|
||||
|
||||
let mut add_by = Point::identity();
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0 .. 16 {
|
||||
#[allow(clippy::cast_possible_truncation)] // Safe since 0 .. 16
|
||||
{
|
||||
add_by = <_>::conditional_select(&add_by, &table[i], bits.ct_eq(&(i as u8)));
|
||||
}
|
||||
}
|
||||
res += add_by;
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ pub trait Addendum: Send + Sync + Clone + PartialEq + Debug + WriteAddendum {}
|
||||
impl<A: Send + Sync + Clone + PartialEq + Debug + WriteAddendum> Addendum for A {}
|
||||
|
||||
/// Algorithm trait usable by the FROST signing machine to produce signatures..
|
||||
pub trait Algorithm<C: Curve>: Send + Sync + Clone {
|
||||
pub trait Algorithm<C: Curve>: Send + Sync {
|
||||
/// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible
|
||||
/// transcript included in this crate.
|
||||
type Transcript: Sync + Clone + Debug + Transcript;
|
||||
|
||||
@@ -47,7 +47,7 @@ impl<T: Writable> Writable for Vec<T> {
|
||||
}
|
||||
|
||||
// Pairing of an Algorithm with a ThresholdKeys instance.
|
||||
#[derive(Clone, Zeroize)]
|
||||
#[derive(Zeroize)]
|
||||
struct Params<C: Curve, A: Algorithm<C>> {
|
||||
// Skips the algorithm due to being too large a bound to feasibly enforce on users
|
||||
#[zeroize(skip)]
|
||||
@@ -193,7 +193,7 @@ impl<C: Curve> SignatureShare<C> {
|
||||
/// Trait for the second machine of a two-round signing protocol.
|
||||
pub trait SignMachine<S>: Send + Sync + Sized {
|
||||
/// Params used to instantiate this machine which can be used to rebuild from a cache.
|
||||
type Params: Clone;
|
||||
type Params;
|
||||
/// Keys used for signing operations.
|
||||
type Keys;
|
||||
/// Preprocess message for this machine.
|
||||
@@ -397,7 +397,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
|
||||
|
||||
Ok((
|
||||
AlgorithmSignatureMachine {
|
||||
params: self.params.clone(),
|
||||
params: self.params,
|
||||
view,
|
||||
B,
|
||||
Rs,
|
||||
|
||||
@@ -37,10 +37,10 @@ pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
|
||||
}
|
||||
|
||||
/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm.
|
||||
pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
|
||||
pub fn algorithm_machines_without_clone<R: RngCore, C: Curve, A: Algorithm<C>>(
|
||||
rng: &mut R,
|
||||
algorithm: &A,
|
||||
keys: &HashMap<Participant, ThresholdKeys<C>>,
|
||||
machines: HashMap<Participant, AlgorithmMachine<C, A>>,
|
||||
) -> HashMap<Participant, AlgorithmMachine<C, A>> {
|
||||
let mut included = vec![];
|
||||
while included.len() < usize::from(keys[&Participant::new(1).unwrap()].params().t()) {
|
||||
@@ -54,18 +54,28 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
|
||||
included.push(n);
|
||||
}
|
||||
|
||||
keys
|
||||
.iter()
|
||||
.filter_map(|(i, keys)| {
|
||||
if included.contains(i) {
|
||||
Some((*i, AlgorithmMachine::new(algorithm.clone(), keys.clone())))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
machines
|
||||
.into_iter()
|
||||
.filter_map(|(i, machine)| if included.contains(&i) { Some((i, machine)) } else { None })
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm.
|
||||
pub fn algorithm_machines<R: RngCore, C: Curve, A: Clone + Algorithm<C>>(
|
||||
rng: &mut R,
|
||||
algorithm: &A,
|
||||
keys: &HashMap<Participant, ThresholdKeys<C>>,
|
||||
) -> HashMap<Participant, AlgorithmMachine<C, A>> {
|
||||
algorithm_machines_without_clone(
|
||||
rng,
|
||||
keys,
|
||||
keys
|
||||
.values()
|
||||
.map(|keys| (keys.params().i(), AlgorithmMachine::new(algorithm.clone(), keys.clone())))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
// Run the preprocess step
|
||||
pub(crate) fn preprocess<
|
||||
R: RngCore + CryptoRng,
|
||||
@@ -165,10 +175,10 @@ pub fn sign_without_caching<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
|
||||
/// Execute the signing protocol, randomly caching various machines to ensure they can cache
|
||||
/// successfully.
|
||||
pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
pub fn sign_without_clone<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
rng: &mut R,
|
||||
params: &<M::SignMachine as SignMachine<M::Signature>>::Params,
|
||||
mut keys: HashMap<Participant, <M::SignMachine as SignMachine<M::Signature>>::Keys>,
|
||||
mut params: HashMap<Participant, <M::SignMachine as SignMachine<M::Signature>>::Params>,
|
||||
machines: HashMap<Participant, M>,
|
||||
msg: &[u8],
|
||||
) -> M::Signature {
|
||||
@@ -183,7 +193,8 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
let cache = machines.remove(&i).unwrap().cache();
|
||||
machines.insert(
|
||||
i,
|
||||
M::SignMachine::from_cache(params.clone(), keys.remove(&i).unwrap(), cache).0,
|
||||
M::SignMachine::from_cache(params.remove(&i).unwrap(), keys.remove(&i).unwrap(), cache)
|
||||
.0,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -192,6 +203,22 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
||||
)
|
||||
}
|
||||
|
||||
/// Execute the signing protocol, randomly caching various machines to ensure they can cache
|
||||
/// successfully.
|
||||
pub fn sign<
|
||||
R: RngCore + CryptoRng,
|
||||
M: PreprocessMachine<SignMachine: SignMachine<M::Signature, Params: Clone>>,
|
||||
>(
|
||||
rng: &mut R,
|
||||
params: &<M::SignMachine as SignMachine<M::Signature>>::Params,
|
||||
keys: HashMap<Participant, <M::SignMachine as SignMachine<M::Signature>>::Keys>,
|
||||
machines: HashMap<Participant, M>,
|
||||
msg: &[u8],
|
||||
) -> M::Signature {
|
||||
let params = keys.keys().map(|i| (*i, params.clone())).collect();
|
||||
sign_without_clone(rng, keys, params, machines, msg)
|
||||
}
|
||||
|
||||
/// Test a basic Schnorr signature with the provided keys.
|
||||
pub fn test_schnorr_with_keys<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||
rng: &mut R,
|
||||
|
||||
@@ -92,7 +92,10 @@ license-files = [
|
||||
multiple-versions = "warn"
|
||||
wildcards = "warn"
|
||||
highlight = "all"
|
||||
deny = [ { name = "serde_derive", version = ">=1.0.172, <1.0.185" } ]
|
||||
deny = [
|
||||
{ name = "serde_derive", version = ">=1.0.172, <1.0.185" },
|
||||
{ name = "hashbrown", version = ">=0.15" },
|
||||
]
|
||||
|
||||
[sources]
|
||||
unknown-registry = "deny"
|
||||
@@ -102,5 +105,4 @@ allow-git = [
|
||||
"https://github.com/rust-lang-nursery/lazy-static.rs",
|
||||
"https://github.com/serai-dex/substrate-bip39",
|
||||
"https://github.com/serai-dex/substrate",
|
||||
"https://github.com/alloy-rs/core",
|
||||
]
|
||||
|
||||
@@ -5,20 +5,20 @@ GEM
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
bigdecimal (3.1.8)
|
||||
colorator (1.1.0)
|
||||
concurrent-ruby (1.3.3)
|
||||
concurrent-ruby (1.3.4)
|
||||
em-websocket (0.5.3)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0)
|
||||
eventmachine (1.2.7)
|
||||
ffi (1.17.0-x86_64-linux-gnu)
|
||||
forwardable-extended (2.6.0)
|
||||
google-protobuf (4.27.3-x86_64-linux)
|
||||
google-protobuf (4.28.2-x86_64-linux)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
http_parser.rb (0.8.0)
|
||||
i18n (1.14.5)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (4.3.3)
|
||||
jekyll (4.3.4)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
@@ -63,17 +63,15 @@ GEM
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.11.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.3.4)
|
||||
strscan
|
||||
rouge (4.3.0)
|
||||
rexml (3.3.7)
|
||||
rouge (4.4.0)
|
||||
safe_yaml (1.0.5)
|
||||
sass-embedded (1.77.8-x86_64-linux-gnu)
|
||||
google-protobuf (~> 4.26)
|
||||
strscan (3.1.0)
|
||||
sass-embedded (1.79.3-x86_64-linux-gnu)
|
||||
google-protobuf (~> 4.27)
|
||||
terminal-table (3.0.2)
|
||||
unicode-display_width (>= 1.1.1, < 3)
|
||||
unicode-display_width (2.5.0)
|
||||
webrick (1.8.1)
|
||||
unicode-display_width (2.6.0)
|
||||
webrick (1.8.2)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
|
||||
@@ -6,7 +6,7 @@ pub(crate) use std::{
|
||||
pub(crate) use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
pub(crate) use schnorr_signatures::SchnorrSignature;
|
||||
|
||||
pub(crate) use serai_primitives::NetworkId;
|
||||
pub(crate) use serai_primitives::ExternalNetworkId;
|
||||
|
||||
pub(crate) use tokio::{
|
||||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
@@ -193,10 +193,7 @@ async fn main() {
|
||||
KEYS.write().unwrap().insert(service, key);
|
||||
let mut queues = QUEUES.write().unwrap();
|
||||
if service == Service::Coordinator {
|
||||
for network in serai_primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
for network in serai_primitives::EXTERNAL_NETWORKS {
|
||||
queues.insert(
|
||||
(service, Service::Processor(network)),
|
||||
RwLock::new(Queue(db.clone(), service, Service::Processor(network))),
|
||||
@@ -210,17 +207,13 @@ async fn main() {
|
||||
}
|
||||
};
|
||||
|
||||
// Make queues for each NetworkId, other than Serai
|
||||
for network in serai_primitives::NETWORKS {
|
||||
if network == NetworkId::Serai {
|
||||
continue;
|
||||
}
|
||||
// Make queues for each ExternalNetworkId
|
||||
for network in serai_primitives::EXTERNAL_NETWORKS {
|
||||
// Use a match so we error if the list of NetworkIds changes
|
||||
let Some(key) = read_key(match network {
|
||||
NetworkId::Serai => unreachable!(),
|
||||
NetworkId::Bitcoin => "BITCOIN_KEY",
|
||||
NetworkId::Ethereum => "ETHEREUM_KEY",
|
||||
NetworkId::Monero => "MONERO_KEY",
|
||||
ExternalNetworkId::Bitcoin => "BITCOIN_KEY",
|
||||
ExternalNetworkId::Ethereum => "ETHEREUM_KEY",
|
||||
ExternalNetworkId::Monero => "MONERO_KEY",
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
@@ -3,11 +3,11 @@ use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
|
||||
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_primitives::NetworkId;
|
||||
use serai_primitives::ExternalNetworkId;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Service {
|
||||
Processor(NetworkId),
|
||||
Processor(ExternalNetworkId),
|
||||
Coordinator,
|
||||
}
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ pub enum TransactionError {
|
||||
#[error("fee was too low to pass the default minimum fee rate")]
|
||||
TooLowFee,
|
||||
#[error("not enough funds for these payments")]
|
||||
NotEnoughFunds,
|
||||
NotEnoughFunds { inputs: u64, payments: u64, fee: u64 },
|
||||
#[error("transaction was too large")]
|
||||
TooLargeTransaction,
|
||||
}
|
||||
@@ -213,7 +213,11 @@ impl SignableTransaction {
|
||||
}
|
||||
|
||||
if input_sat < (payment_sat + needed_fee) {
|
||||
Err(TransactionError::NotEnoughFunds)?;
|
||||
Err(TransactionError::NotEnoughFunds {
|
||||
inputs: input_sat,
|
||||
payments: payment_sat,
|
||||
fee: needed_fee,
|
||||
})?;
|
||||
}
|
||||
|
||||
// If there's a change address, check if there's change to give it
|
||||
@@ -258,9 +262,9 @@ impl SignableTransaction {
|
||||
res
|
||||
}
|
||||
|
||||
/// Returns the outputs this transaction will create.
|
||||
pub fn outputs(&self) -> &[TxOut] {
|
||||
&self.tx.output
|
||||
/// Returns the transaction, sans witness, this will create if signed.
|
||||
pub fn transaction(&self) -> &Transaction {
|
||||
&self.tx
|
||||
}
|
||||
|
||||
/// Create a multisig machine for this transaction.
|
||||
|
||||
@@ -195,10 +195,10 @@ async_sequential! {
|
||||
Err(TransactionError::TooLowFee),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
assert!(matches!(
|
||||
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
||||
Err(TransactionError::NotEnoughFunds),
|
||||
);
|
||||
Err(TransactionError::NotEnoughFunds { .. }),
|
||||
));
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
|
||||
|
||||
@@ -29,21 +29,21 @@ frost = { package = "modular-frost", path = "../../crypto/frost", default-featur
|
||||
|
||||
alloy-core = { version = "0.8", default-features = false }
|
||||
alloy-sol-types = { version = "0.8", default-features = false, features = ["json"] }
|
||||
alloy-consensus = { version = "0.3", default-features = false, features = ["k256"] }
|
||||
alloy-network = { version = "0.3", default-features = false }
|
||||
alloy-rpc-types-eth = { version = "0.3", default-features = false }
|
||||
alloy-rpc-client = { version = "0.3", default-features = false }
|
||||
alloy-consensus = { version = "0.4", default-features = false, features = ["k256"] }
|
||||
alloy-network = { version = "0.4", default-features = false }
|
||||
alloy-rpc-types-eth = { version = "0.4", default-features = false }
|
||||
alloy-rpc-client = { version = "0.4", default-features = false }
|
||||
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
|
||||
alloy-provider = { version = "0.3", default-features = false }
|
||||
alloy-provider = { version = "0.4", default-features = false }
|
||||
|
||||
alloy-node-bindings = { version = "0.3", default-features = false, optional = true }
|
||||
alloy-node-bindings = { version = "0.4", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] }
|
||||
|
||||
tokio = { version = "1", features = ["macros"] }
|
||||
|
||||
alloy-node-bindings = { version = "0.3", default-features = false }
|
||||
alloy-node-bindings = { version = "0.4", default-features = false }
|
||||
|
||||
[features]
|
||||
tests = ["alloy-node-bindings", "frost/tests"]
|
||||
|
||||
@@ -16,13 +16,13 @@ rustdoc-args = ["--cfg", "docsrs"]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
tower = "0.4"
|
||||
tower = "0.5"
|
||||
|
||||
serde_json = { version = "1", default-features = false }
|
||||
simple-request = { path = "../../../common/request", default-features = false }
|
||||
|
||||
alloy-json-rpc = { version = "0.3", default-features = false }
|
||||
alloy-transport = { version = "0.3", default-features = false }
|
||||
alloy-json-rpc = { version = "0.4", default-features = false }
|
||||
alloy-transport = { version = "0.4", default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["tls"]
|
||||
|
||||
@@ -39,7 +39,7 @@ impl Deployer {
|
||||
nonce: 0,
|
||||
gas_price: 100_000_000_000u128,
|
||||
// TODO: Use a more accurate gas limit
|
||||
gas_limit: 1_000_000u128,
|
||||
gas_limit: 1_000_000,
|
||||
to: TxKind::Create,
|
||||
value: U256::ZERO,
|
||||
input: bytecode,
|
||||
|
||||
@@ -226,7 +226,7 @@ impl Router {
|
||||
to: TxKind::Call(self.1),
|
||||
input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(),
|
||||
// TODO
|
||||
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
|
||||
gas_limit: 100_000 + ((200_000 + 10_000) * u64::try_from(outs.len()).unwrap()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,9 @@ the Monero protocol.
|
||||
This library is usable under no-std when the `std` feature (on by default) is
|
||||
disabled.
|
||||
|
||||
Recommended usage of the library is with `overflow-checks = true`, even for
|
||||
release builds.
|
||||
|
||||
### Wallet Functionality
|
||||
|
||||
monero-serai originally included wallet functionality. That has been moved to
|
||||
|
||||
@@ -29,7 +29,11 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||
let uv3 = u * v3;
|
||||
let v7 = v3 * v3 * v;
|
||||
let uv7 = u * v7;
|
||||
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
|
||||
uv3 *
|
||||
uv7.pow(
|
||||
(-FieldElement::from(5u8)) *
|
||||
FieldElement::from(8u8).invert().expect("eight was coprime with the prime 2^{255}-19"),
|
||||
)
|
||||
};
|
||||
let x = X.square() * x;
|
||||
|
||||
@@ -45,9 +49,23 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||
#[allow(non_snake_case)]
|
||||
let mut Y = z - w;
|
||||
|
||||
Y *= Z.invert().unwrap();
|
||||
/*
|
||||
If sign, `z = -486662`, else, `z = -486662 * v`
|
||||
`w = v + 1`
|
||||
|
||||
We need `z + w \ne 0`, which would require `z \cong -w \mod 2^{255}-19`. This requires:
|
||||
- If `sign`, `v \mod 2^{255}-19 \ne 486661`.
|
||||
- If `!sign`, `(v + 1) \mod 2^{255}-19 \ne (v * 486662) \mod 2^{255}-19` which is equivalent to
|
||||
`(v * 486661) \mod 2^{255}-19 \ne 1`.
|
||||
|
||||
In summary, if `sign`, `v` must not `486661`, and if `!sign`, `v` must not be the
|
||||
multiplicative inverse of `486661`. Since `v` is the output of a hash function, this should
|
||||
have negligible probability. Additionally, since the definition of `sign` is dependent on `v`,
|
||||
it may be truly impossible to reach.
|
||||
*/
|
||||
Y *= Z.invert().expect("if sign, v was 486661. if !sign, v was 486661^{-1}");
|
||||
let mut bytes = Y.to_repr();
|
||||
bytes[31] |= sign.unwrap_u8() << 7;
|
||||
|
||||
decompress_point(bytes).unwrap().mul_by_cofactor()
|
||||
decompress_point(bytes).expect("point from hash-to-curve wasn't on-curve").mul_by_cofactor()
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||
#[allow(non_snake_case)]
|
||||
pub static H: LazyLock<EdwardsPoint> = LazyLock::new(|| {
|
||||
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
|
||||
.unwrap()
|
||||
.expect("known on-curve point wasn't on-curve")
|
||||
.mul_by_cofactor()
|
||||
});
|
||||
|
||||
@@ -51,8 +51,6 @@ pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
|
||||
pub const MAX_COMMITMENTS: usize = 16;
|
||||
/// The amount of bits a value within a commitment may use.
|
||||
pub const COMMITMENT_BITS: usize = 64;
|
||||
/// The logarithm (over 2) of the amount of bits a value within a commitment may use.
|
||||
pub const LOG_COMMITMENT_BITS: usize = 6; // 2 ** 6 == N
|
||||
|
||||
/// Container struct for Bulletproofs(+) generators.
|
||||
#[allow(non_snake_case)]
|
||||
@@ -80,11 +78,11 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||
let i = 2 * i;
|
||||
|
||||
let mut even = preimage.clone();
|
||||
write_varint(&i, &mut even).unwrap();
|
||||
write_varint(&i, &mut even).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res.H.push(hash_to_point(keccak256(&even)));
|
||||
|
||||
let mut odd = preimage.clone();
|
||||
write_varint(&(i + 1), &mut odd).unwrap();
|
||||
write_varint(&(i + 1), &mut odd).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res.G.push(hash_to_point(keccak256(&odd)));
|
||||
}
|
||||
res
|
||||
|
||||
@@ -18,10 +18,12 @@ use curve25519_dalek::{
|
||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||
|
||||
mod sealed {
|
||||
use core::fmt::Debug;
|
||||
|
||||
/// A trait for a number readable/writable as a VarInt.
|
||||
///
|
||||
/// This is sealed to prevent unintended implementations.
|
||||
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
|
||||
pub trait VarInt: TryInto<u64, Error: Debug> + TryFrom<u64, Error: Debug> + Copy {
|
||||
const BITS: usize;
|
||||
}
|
||||
|
||||
@@ -34,6 +36,10 @@ mod sealed {
|
||||
impl VarInt for u64 {
|
||||
const BITS: usize = 64;
|
||||
}
|
||||
// Don't compile for platforms where `usize` exceeds `u64`, preventing various possible runtime
|
||||
// exceptions
|
||||
const _NO_128_BIT_PLATFORMS: [(); (u64::BITS - usize::BITS) as usize] =
|
||||
[(); (u64::BITS - usize::BITS) as usize];
|
||||
impl VarInt for usize {
|
||||
const BITS: usize = core::mem::size_of::<usize>() * 8;
|
||||
}
|
||||
@@ -43,8 +49,12 @@ mod sealed {
|
||||
///
|
||||
/// This function will panic if the VarInt exceeds u64::MAX.
|
||||
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
|
||||
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||
let varint_u64: u64 = varint.try_into().expect("varint exceeded u64");
|
||||
((usize::try_from(u64::BITS - varint_u64.leading_zeros())
|
||||
.expect("64 > usize::MAX")
|
||||
.saturating_sub(1)) /
|
||||
7) +
|
||||
1
|
||||
}
|
||||
|
||||
/// Write a byte.
|
||||
@@ -58,9 +68,10 @@ pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||
///
|
||||
/// This will panic if the VarInt exceeds u64::MAX.
|
||||
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
||||
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||
let mut varint: u64 = (*varint).try_into().expect("varint exceeded u64");
|
||||
while {
|
||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK))
|
||||
.expect("& eight_bit_mask left more than 8 bits set");
|
||||
varint >>= 7;
|
||||
if varint != 0 {
|
||||
b |= VARINT_CONTINUATION_MASK;
|
||||
@@ -210,10 +221,28 @@ pub fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: us
|
||||
f: F,
|
||||
r: &mut R,
|
||||
) -> io::Result<[T; N]> {
|
||||
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
|
||||
read_raw_vec(f, N, r).map(|vec| {
|
||||
vec.try_into().expect(
|
||||
"read vector of specific length yet couldn't transform to an array of the same length",
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Read a length-prefixed variable-length list of elements.
|
||||
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
|
||||
read_raw_vec(f, read_varint(r)?, r)
|
||||
///
|
||||
/// An optional bound on the length of the result may be provided. If `None`, the returned `Vec`
|
||||
/// will be of the length read off the reader, if successfully read. If `Some(_)`, an error will be
|
||||
/// raised if the length read off the read is greater than the bound.
|
||||
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||
f: F,
|
||||
length_bound: Option<usize>,
|
||||
r: &mut R,
|
||||
) -> io::Result<Vec<T>> {
|
||||
let declared_length: usize = read_varint(r)?;
|
||||
if let Some(length_bound) = length_bound {
|
||||
if declared_length > length_bound {
|
||||
Err(io::Error::other("vector exceeds bound on length"))?;
|
||||
}
|
||||
}
|
||||
read_raw_vec(f, declared_length, r)
|
||||
}
|
||||
|
||||
@@ -73,7 +73,11 @@ pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar {
|
||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||
// not generate/verify a proof we believe to be valid when it isn't
|
||||
assert!(scalar != Scalar::ZERO, "ZERO HASH: {:?}", data.as_ref());
|
||||
assert!(
|
||||
scalar != Scalar::ZERO,
|
||||
"keccak256(preimage) \\cong 0 \\mod l! Preimage: {:?}",
|
||||
data.as_ref()
|
||||
);
|
||||
scalar
|
||||
}
|
||||
|
||||
@@ -124,7 +128,7 @@ impl Commitment {
|
||||
/// defined serialization.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(32 + 8);
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -162,7 +166,14 @@ impl Decoys {
|
||||
/// `offsets` are the positions of each ring member within the Monero blockchain, offset from the
|
||||
/// prior member's position (with the initial ring member offset from 0).
|
||||
pub fn new(offsets: Vec<u64>, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option<Self> {
|
||||
if (offsets.len() != ring.len()) || (usize::from(signer_index) >= ring.len()) {
|
||||
if (offsets.len() > usize::from(u8::MAX)) ||
|
||||
(offsets.len() != ring.len()) ||
|
||||
(usize::from(signer_index) >= ring.len())
|
||||
{
|
||||
None?;
|
||||
}
|
||||
// Check these offsets form representable positions
|
||||
if offsets.iter().copied().try_fold(0, u64::checked_add).is_none() {
|
||||
None?;
|
||||
}
|
||||
Some(Decoys { offsets, signer_index, ring })
|
||||
@@ -213,7 +224,7 @@ impl Decoys {
|
||||
pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> {
|
||||
write_vec(write_varint, &self.offsets, w)?;
|
||||
w.write_all(&[self.signer_index])?;
|
||||
write_vec(
|
||||
write_raw_vec(
|
||||
|pair, w| {
|
||||
write_point(&pair[0], w)?;
|
||||
write_point(&pair[1], w)
|
||||
@@ -230,7 +241,7 @@ impl Decoys {
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res =
|
||||
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -239,10 +250,12 @@ impl Decoys {
|
||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||
/// defined serialization.
|
||||
pub fn read(r: &mut impl io::Read) -> io::Result<Decoys> {
|
||||
let offsets = read_vec(read_varint, None, r)?;
|
||||
let len = offsets.len();
|
||||
Decoys::new(
|
||||
read_vec(read_varint, r)?,
|
||||
offsets,
|
||||
read_byte(r)?,
|
||||
read_vec(|r| Ok([read_point(r)?, read_point(r)?]), r)?,
|
||||
read_raw_vec(|r| Ok([read_point(r)?, read_point(r)?]), len, r)?,
|
||||
)
|
||||
.ok_or_else(|| io::Error::other("invalid Decoys"))
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ fn recover_scalars() {
|
||||
let stored = UnreducedScalar(hex::decode(stored).unwrap().try_into().unwrap());
|
||||
let recovered =
|
||||
Scalar::from_canonical_bytes(hex::decode(recovered).unwrap().try_into().unwrap()).unwrap();
|
||||
assert_eq!(stored.recover_monero_slide_scalar(), recovered);
|
||||
assert_eq!(stored.ref10_slide_scalar_vartime(), recovered);
|
||||
};
|
||||
|
||||
// https://www.moneroinflation.com/static/data_py/report_scalars_df.pdf
|
||||
|
||||
@@ -14,7 +14,8 @@ use monero_io::*;
|
||||
static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| {
|
||||
let mut precomputed_scalars = [Scalar::ONE; 8];
|
||||
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
|
||||
*scalar = Scalar::from(u8::try_from((i * 2) + 1).unwrap());
|
||||
*scalar =
|
||||
Scalar::from(u64::try_from((i * 2) + 1).expect("enumerating more than u64::MAX / 2 items"));
|
||||
}
|
||||
precomputed_scalars
|
||||
});
|
||||
@@ -54,12 +55,12 @@ impl UnreducedScalar {
|
||||
// This matches Monero's `slide` function and intentionally gives incorrect outputs under
|
||||
// certain conditions in order to match Monero.
|
||||
//
|
||||
// This function does not execute in constant time.
|
||||
// This function does not execute in constant time and must only be used with public data.
|
||||
fn non_adjacent_form(&self) -> [i8; 256] {
|
||||
let bits = self.as_bits();
|
||||
let mut naf = [0i8; 256];
|
||||
for (b, bit) in bits.into_iter().enumerate() {
|
||||
naf[b] = i8::try_from(bit).unwrap();
|
||||
naf[b] = i8::try_from(bit).expect("bit didn't fit within an i8");
|
||||
}
|
||||
|
||||
for i in 0 .. 256 {
|
||||
@@ -107,15 +108,17 @@ impl UnreducedScalar {
|
||||
naf
|
||||
}
|
||||
|
||||
/// Recover the scalar that an array of bytes was incorrectly interpreted as by Monero's `slide`
|
||||
/// function.
|
||||
/// Recover the scalar that an array of bytes was incorrectly interpreted as by ref10's `slide`
|
||||
/// function (as used by the reference Monero implementation in C++).
|
||||
///
|
||||
/// In Borromean range proofs, Monero was not checking that the scalars used were
|
||||
/// reduced. This lead to the scalar stored being interpreted as a different scalar.
|
||||
/// This function recovers that scalar.
|
||||
/// For Borromean range proofs, Monero did not check the scalars used were reduced. This led to
|
||||
/// some scalars serialized being interpreted as distinct scalars. This function recovers these
|
||||
/// distinct scalars, as required to verify Borromean range proofs within the Monero protocol.
|
||||
///
|
||||
/// See <https://github.com/monero-project/monero/issues/8438> for more info.
|
||||
pub fn recover_monero_slide_scalar(&self) -> Scalar {
|
||||
//
|
||||
/// This function does not execute in constant time and must only be used with public data.
|
||||
pub fn ref10_slide_scalar_vartime(&self) -> Scalar {
|
||||
if self.0[31] & 128 == 0 {
|
||||
// Computing the w-NAF of a number can only give an output with 1 more bit than
|
||||
// the number, so even if the number isn't reduced, the `slide` function will be
|
||||
@@ -127,8 +130,13 @@ impl UnreducedScalar {
|
||||
for &numb in self.non_adjacent_form().iter().rev() {
|
||||
recovered += recovered;
|
||||
match numb.cmp(&0) {
|
||||
Ordering::Greater => recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).unwrap() / 2],
|
||||
Ordering::Less => recovered -= PRECOMPUTED_SCALARS[usize::try_from(-numb).unwrap() / 2],
|
||||
Ordering::Greater => {
|
||||
recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).expect("positive i8 -> usize") / 2]
|
||||
}
|
||||
Ordering::Less => {
|
||||
recovered -=
|
||||
PRECOMPUTED_SCALARS[usize::try_from(-numb).expect("negated negative i8 -> usize") / 2]
|
||||
}
|
||||
Ordering::Equal => (),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,13 +56,13 @@ impl BorromeanSignatures {
|
||||
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||
&self.ee,
|
||||
&keys_a[i],
|
||||
&self.s0[i].recover_monero_slide_scalar(),
|
||||
&self.s0[i].ref10_slide_scalar_vartime(),
|
||||
);
|
||||
#[allow(non_snake_case)]
|
||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||
&keccak256_to_scalar(LL.compress().as_bytes()),
|
||||
&keys_b[i],
|
||||
&self.s1[i].recover_monero_slide_scalar(),
|
||||
&self.s1[i].ref10_slide_scalar_vartime(),
|
||||
);
|
||||
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||
}
|
||||
|
||||
@@ -16,8 +16,10 @@ fn generators(prefix: &'static str, path: &str) {
|
||||
generators_string.extend(
|
||||
format!(
|
||||
"
|
||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(),
|
||||
",
|
||||
curve25519_dalek::edwards::CompressedEdwardsY({:?})
|
||||
.decompress()
|
||||
.expect(\"generator from build script wasn't on-curve\"),
|
||||
",
|
||||
generator.compress().to_bytes()
|
||||
)
|
||||
.chars(),
|
||||
@@ -33,10 +35,10 @@ fn generators(prefix: &'static str, path: &str) {
|
||||
let mut H_str = String::new();
|
||||
serialize(&mut H_str, &generators.H);
|
||||
|
||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||
let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path);
|
||||
let _ = remove_file(&path);
|
||||
File::create(&path)
|
||||
.unwrap()
|
||||
.expect("failed to create file in $OUT_DIR")
|
||||
.write_all(
|
||||
format!(
|
||||
"
|
||||
@@ -52,15 +54,15 @@ fn generators(prefix: &'static str, path: &str) {
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
.expect("couldn't write generated source code to file on disk");
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "compile-time-generators"))]
|
||||
fn generators(prefix: &'static str, path: &str) {
|
||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||
let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path);
|
||||
let _ = remove_file(&path);
|
||||
File::create(&path)
|
||||
.unwrap()
|
||||
.expect("failed to create file in $OUT_DIR")
|
||||
.write_all(
|
||||
format!(
|
||||
r#"
|
||||
@@ -71,7 +73,7 @@ fn generators(prefix: &'static str, path: &str) {
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
.expect("couldn't write generated source code to file on disk");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
||||
@@ -23,6 +23,11 @@ pub(crate) struct InternalBatchVerifier {
|
||||
impl InternalBatchVerifier {
|
||||
#[must_use]
|
||||
fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool {
|
||||
/*
|
||||
Technically, this following line can overflow, and joining these `Vec`s _may_ panic if
|
||||
they're individually acceptable lengths yet their sum isn't. This is so negligible, due to
|
||||
the amount of memory required, it's dismissed.
|
||||
*/
|
||||
let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len();
|
||||
let mut scalars = Vec::with_capacity(capacity);
|
||||
let mut points = Vec::with_capacity(capacity);
|
||||
|
||||
@@ -6,7 +6,7 @@ use curve25519_dalek::{
|
||||
edwards::EdwardsPoint,
|
||||
};
|
||||
|
||||
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS, LOG_COMMITMENT_BITS};
|
||||
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS};
|
||||
|
||||
pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
||||
let mut buf_scalars = Vec::with_capacity(pairs.len());
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std_shims::{
|
||||
vec,
|
||||
vec::Vec,
|
||||
io::{self, Read, Write},
|
||||
};
|
||||
@@ -17,13 +16,13 @@ use curve25519_dalek::edwards::EdwardsPoint;
|
||||
|
||||
use monero_io::*;
|
||||
pub use monero_generators::MAX_COMMITMENTS;
|
||||
use monero_generators::COMMITMENT_BITS;
|
||||
use monero_primitives::Commitment;
|
||||
|
||||
pub(crate) mod scalar_vector;
|
||||
pub(crate) mod point_vector;
|
||||
|
||||
pub(crate) mod core;
|
||||
use crate::core::LOG_COMMITMENT_BITS;
|
||||
|
||||
pub(crate) mod batch_verifier;
|
||||
use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier};
|
||||
@@ -44,6 +43,11 @@ use crate::plus::{
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// The logarithm (over 2) of the amount of bits a value within a commitment may use.
|
||||
const LOG_COMMITMENT_BITS: usize = COMMITMENT_BITS.ilog2() as usize;
|
||||
// The maximum length of L/R `Vec`s.
|
||||
const MAX_LR: usize = (MAX_COMMITMENTS.ilog2() as usize) + LOG_COMMITMENT_BITS;
|
||||
|
||||
/// An error from proving/verifying Bulletproofs(+).
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||
@@ -82,27 +86,30 @@ impl Bulletproof {
|
||||
/// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone
|
||||
/// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of
|
||||
/// the weight lost by using a proof smaller than it is fast to compensate for this.
|
||||
///
|
||||
/// If the amount of outputs specified exceeds the maximum amount of outputs, the result for the
|
||||
/// maximum amount of outputs will be returned.
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
|
||||
pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
||||
pub fn calculate_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
||||
#[allow(non_snake_case)]
|
||||
let mut LR_len = 0;
|
||||
let mut n_padded_outputs = 1;
|
||||
while n_padded_outputs < n_outputs {
|
||||
while n_padded_outputs < n_outputs.min(MAX_COMMITMENTS) {
|
||||
LR_len += 1;
|
||||
n_padded_outputs = 1 << LR_len;
|
||||
}
|
||||
LR_len += LOG_COMMITMENT_BITS;
|
||||
|
||||
let mut bp_clawback = 0;
|
||||
let mut clawback = 0;
|
||||
if n_padded_outputs > 2 {
|
||||
let fields = Bulletproof::bp_fields(plus);
|
||||
let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2;
|
||||
let size = (fields + (2 * LR_len)) * 32;
|
||||
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
||||
clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
||||
}
|
||||
|
||||
(bp_clawback, LR_len)
|
||||
(clawback, LR_len)
|
||||
}
|
||||
|
||||
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof.
|
||||
@@ -119,9 +126,15 @@ impl Bulletproof {
|
||||
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
Ok(Bulletproof::Original(
|
||||
OriginalStatement::new(&commitments)
|
||||
.unwrap()
|
||||
.prove(rng, OriginalWitness::new(outputs).unwrap())
|
||||
.unwrap(),
|
||||
.expect("failed to create statement despite checking amount of commitments")
|
||||
.prove(
|
||||
rng,
|
||||
OriginalWitness::new(outputs)
|
||||
.expect("failed to create witness despite checking amount of commitments"),
|
||||
)
|
||||
.expect(
|
||||
"failed to prove Bulletproof::Original despite ensuring statement/witness consistency",
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -139,9 +152,15 @@ impl Bulletproof {
|
||||
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
Ok(Bulletproof::Plus(
|
||||
PlusStatement::new(&commitments)
|
||||
.unwrap()
|
||||
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap()))
|
||||
.unwrap(),
|
||||
.expect("failed to create statement despite checking amount of commitments")
|
||||
.prove(
|
||||
rng,
|
||||
&Zeroizing::new(
|
||||
PlusWitness::new(outputs)
|
||||
.expect("failed to create witness despite checking amount of commitments"),
|
||||
),
|
||||
)
|
||||
.expect("failed to prove Bulletproof::Plus despite ensuring statement/witness consistency"),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -250,8 +269,8 @@ impl Bulletproof {
|
||||
|
||||
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
let mut serialized = Vec::with_capacity(512);
|
||||
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
serialized
|
||||
}
|
||||
|
||||
@@ -265,8 +284,8 @@ impl Bulletproof {
|
||||
tau_x: read_scalar(r)?,
|
||||
mu: read_scalar(r)?,
|
||||
ip: IpProof {
|
||||
L: read_vec(read_point, r)?,
|
||||
R: read_vec(read_point, r)?,
|
||||
L: read_vec(read_point, Some(MAX_LR), r)?,
|
||||
R: read_vec(read_point, Some(MAX_LR), r)?,
|
||||
a: read_scalar(r)?,
|
||||
b: read_scalar(r)?,
|
||||
},
|
||||
@@ -284,8 +303,8 @@ impl Bulletproof {
|
||||
r_answer: read_scalar(r)?,
|
||||
s_answer: read_scalar(r)?,
|
||||
delta_answer: read_scalar(r)?,
|
||||
L: read_vec(read_point, r)?.into_iter().collect(),
|
||||
R: read_vec(read_point, r)?.into_iter().collect(),
|
||||
L: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(),
|
||||
R: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(),
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -174,7 +174,11 @@ impl IpStatement {
|
||||
R_vec.push(R * INV_EIGHT());
|
||||
|
||||
// Now that we've calculate L, R, transcript them to receive x (26-27)
|
||||
transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap());
|
||||
transcript = Self::transcript_L_R(
|
||||
transcript,
|
||||
*L_vec.last().expect("couldn't get last L_vec despite always being non-empty"),
|
||||
*R_vec.last().expect("couldn't get last R_vec despite always being non-empty"),
|
||||
);
|
||||
let x = transcript;
|
||||
let x_inv = x.invert();
|
||||
|
||||
|
||||
@@ -227,8 +227,11 @@ impl<'a> AggregateRangeStatement<'a> {
|
||||
let x_ip = transcript;
|
||||
|
||||
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
|
||||
.prove(transcript, IpWitness::new(l, r).unwrap())
|
||||
.unwrap();
|
||||
.prove(
|
||||
transcript,
|
||||
IpWitness::new(l, r).expect("Bulletproofs::Original created an invalid IpWitness"),
|
||||
)
|
||||
.expect("Bulletproofs::Original failed to prove the inner-product");
|
||||
|
||||
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
|
||||
#[cfg(debug_assertions)]
|
||||
|
||||
@@ -106,7 +106,9 @@ impl<'a> AggregateRangeStatement<'a> {
|
||||
|
||||
let mut d = ScalarVector::new(mn);
|
||||
for j in 1 ..= V.len() {
|
||||
z_pow.push(*z_pow.last().unwrap() * z_pow[0]);
|
||||
z_pow.push(
|
||||
*z_pow.last().expect("couldn't get last z_pow despite always being non-empty") * z_pow[0],
|
||||
);
|
||||
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
||||
}
|
||||
|
||||
@@ -229,8 +231,15 @@ impl<'a> AggregateRangeStatement<'a> {
|
||||
Some(AggregateRangeProof {
|
||||
A,
|
||||
wip: WipStatement::new(generators, A_hat, y)
|
||||
.prove(rng, transcript, &Zeroizing::new(WipWitness::new(a_l, a_r, alpha).unwrap()))
|
||||
.unwrap(),
|
||||
.prove(
|
||||
rng,
|
||||
transcript,
|
||||
&Zeroizing::new(
|
||||
WipWitness::new(a_l, a_r, alpha)
|
||||
.expect("Bulletproofs::Plus created an invalid WipWitness"),
|
||||
),
|
||||
)
|
||||
.expect("Bulletproof::Plus failed to prove the weighted inner-product"),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -65,7 +65,10 @@ impl BpPlusGenerators {
|
||||
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
||||
// Round to the nearest power of 2
|
||||
let generators = padded_pow_of_2(generators);
|
||||
assert!(generators <= self.g_bold.len());
|
||||
assert!(
|
||||
generators <= self.g_bold.len(),
|
||||
"instantiated with less generators than application required"
|
||||
);
|
||||
|
||||
BpPlusGenerators { g_bold: &self.g_bold[.. generators], h_bold: &self.h_bold[.. generators] }
|
||||
}
|
||||
|
||||
@@ -230,7 +230,9 @@ impl WipStatement {
|
||||
let c_l = a1.clone().weighted_inner_product(&b2, &y);
|
||||
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
||||
|
||||
let y_inv_n_hat = y_inv.pop().unwrap();
|
||||
let y_inv_n_hat = y_inv
|
||||
.pop()
|
||||
.expect("couldn't pop y_inv despite y_inv being of same length as times iterated");
|
||||
|
||||
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
||||
.0
|
||||
@@ -331,7 +333,9 @@ impl WipStatement {
|
||||
let mut res = Vec::with_capacity(y.len());
|
||||
res.push(inv_y);
|
||||
while res.len() < y.len() {
|
||||
res.push(inv_y * res.last().unwrap());
|
||||
res.push(
|
||||
inv_y * res.last().expect("couldn't get last inv_y despite inv_y always being non-empty"),
|
||||
);
|
||||
}
|
||||
res
|
||||
};
|
||||
|
||||
@@ -89,8 +89,8 @@ impl ClsagContext {
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum Mode {
|
||||
Sign(usize, EdwardsPoint, EdwardsPoint),
|
||||
Verify(Scalar),
|
||||
Sign { signer_index: u8, A: EdwardsPoint, AH: EdwardsPoint },
|
||||
Verify { c1: Scalar, D_serialized: EdwardsPoint },
|
||||
}
|
||||
|
||||
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
|
||||
@@ -100,18 +100,18 @@ fn core(
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
I: &EdwardsPoint,
|
||||
pseudo_out: &EdwardsPoint,
|
||||
msg: &[u8; 32],
|
||||
D: &EdwardsPoint,
|
||||
msg_hash: &[u8; 32],
|
||||
D_torsion_free: &EdwardsPoint,
|
||||
s: &[Scalar],
|
||||
A_c1: &Mode,
|
||||
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
||||
let n = ring.len();
|
||||
|
||||
let images_precomp = match A_c1 {
|
||||
Mode::Sign(..) => None,
|
||||
Mode::Verify(..) => Some(VartimeEdwardsPrecomputation::new([I, D])),
|
||||
Mode::Sign { .. } => None,
|
||||
Mode::Verify { .. } => Some(VartimeEdwardsPrecomputation::new([I, D_torsion_free])),
|
||||
};
|
||||
let D_INV_EIGHT = D * INV_EIGHT();
|
||||
let D_inv_eight = D_torsion_free * INV_EIGHT();
|
||||
|
||||
// Generate the transcript
|
||||
// Instead of generating multiple, a single transcript is created and then edited as needed
|
||||
@@ -140,7 +140,14 @@ fn core(
|
||||
}
|
||||
|
||||
to_hash.extend(I.compress().to_bytes());
|
||||
to_hash.extend(D_INV_EIGHT.compress().to_bytes());
|
||||
match A_c1 {
|
||||
Mode::Sign { .. } => {
|
||||
to_hash.extend(D_inv_eight.compress().to_bytes());
|
||||
}
|
||||
Mode::Verify { D_serialized, .. } => {
|
||||
to_hash.extend(D_serialized.compress().to_bytes());
|
||||
}
|
||||
}
|
||||
to_hash.extend(pseudo_out.compress().to_bytes());
|
||||
// mu_P with agg_0
|
||||
let mu_P = keccak256_to_scalar(&to_hash);
|
||||
@@ -156,22 +163,23 @@ fn core(
|
||||
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
|
||||
// truncated just to add it back
|
||||
to_hash.extend(pseudo_out.compress().to_bytes());
|
||||
to_hash.extend(msg);
|
||||
to_hash.extend(msg_hash);
|
||||
|
||||
// Configure the loop based on if we're signing or verifying
|
||||
let start;
|
||||
let end;
|
||||
let mut c;
|
||||
match A_c1 {
|
||||
Mode::Sign(r, A, AH) => {
|
||||
start = r + 1;
|
||||
end = r + n;
|
||||
Mode::Sign { signer_index, A, AH } => {
|
||||
let signer_index = usize::from(*signer_index);
|
||||
start = signer_index + 1;
|
||||
end = signer_index + n;
|
||||
to_hash.extend(A.compress().to_bytes());
|
||||
to_hash.extend(AH.compress().to_bytes());
|
||||
c = keccak256_to_scalar(&to_hash);
|
||||
}
|
||||
|
||||
Mode::Verify(c1) => {
|
||||
Mode::Verify { c1, .. } => {
|
||||
start = 0;
|
||||
end = n;
|
||||
c = *c1;
|
||||
@@ -186,10 +194,10 @@ fn core(
|
||||
|
||||
// (s_i * G) + (c_p * P_i) + (c_c * C_i)
|
||||
let L = match A_c1 {
|
||||
Mode::Sign(..) => {
|
||||
Mode::Sign { .. } => {
|
||||
EdwardsPoint::multiscalar_mul([s[i], c_p, c_c], [ED25519_BASEPOINT_POINT, P[i], C[i]])
|
||||
}
|
||||
Mode::Verify(..) => {
|
||||
Mode::Verify { .. } => {
|
||||
G_PRECOMP().vartime_mixed_multiscalar_mul([s[i]], [c_p, c_c], [P[i], C[i]])
|
||||
}
|
||||
};
|
||||
@@ -198,10 +206,13 @@ fn core(
|
||||
|
||||
// (c_p * I) + (c_c * D) + (s_i * PH)
|
||||
let R = match A_c1 {
|
||||
Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]),
|
||||
Mode::Verify(..) => {
|
||||
images_precomp.as_ref().unwrap().vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH])
|
||||
Mode::Sign { .. } => {
|
||||
EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D_torsion_free, &PH])
|
||||
}
|
||||
Mode::Verify { .. } => images_precomp
|
||||
.as_ref()
|
||||
.expect("value populated when verifying wasn't populated")
|
||||
.vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]),
|
||||
};
|
||||
|
||||
to_hash.truncate(((2 * n) + 3) * 32);
|
||||
@@ -216,7 +227,7 @@ fn core(
|
||||
}
|
||||
|
||||
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
|
||||
((D_INV_EIGHT, c * mu_P, c * mu_C), c1)
|
||||
((D_inv_eight, c * mu_P, c * mu_C), c1)
|
||||
}
|
||||
|
||||
/// The CLSAG signature, as used in Monero.
|
||||
@@ -245,23 +256,30 @@ impl Clsag {
|
||||
I: &EdwardsPoint,
|
||||
input: &ClsagContext,
|
||||
mask: Scalar,
|
||||
msg: &[u8; 32],
|
||||
msg_hash: &[u8; 32],
|
||||
A: EdwardsPoint,
|
||||
AH: EdwardsPoint,
|
||||
) -> ClsagSignCore {
|
||||
let r: usize = input.decoys.signer_index().into();
|
||||
let signer_index = input.decoys.signer_index();
|
||||
|
||||
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
||||
let mask_delta = input.commitment.mask - mask;
|
||||
|
||||
let H = hash_to_point(input.decoys.ring()[r][0].compress().0);
|
||||
let H = hash_to_point(input.decoys.ring()[usize::from(signer_index)][0].compress().0);
|
||||
let D = H * mask_delta;
|
||||
let mut s = Vec::with_capacity(input.decoys.ring().len());
|
||||
for _ in 0 .. input.decoys.ring().len() {
|
||||
s.push(Scalar::random(rng));
|
||||
}
|
||||
let ((D, c_p, c_c), c1) =
|
||||
core(input.decoys.ring(), I, &pseudo_out, msg, &D, &s, &Mode::Sign(r, A, AH));
|
||||
let ((D, c_p, c_c), c1) = core(
|
||||
input.decoys.ring(),
|
||||
I,
|
||||
&pseudo_out,
|
||||
msg_hash,
|
||||
&D,
|
||||
&s,
|
||||
&Mode::Sign { signer_index, A, AH },
|
||||
);
|
||||
|
||||
ClsagSignCore {
|
||||
incomplete_clsag: Clsag { D, s, c1 },
|
||||
@@ -288,11 +306,15 @@ impl Clsag {
|
||||
/// `inputs` is of the form (discrete logarithm of the key, context).
|
||||
///
|
||||
/// `sum_outputs` is for the sum of the output commitments' masks.
|
||||
///
|
||||
/// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which
|
||||
/// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do
|
||||
/// not use this if you don't know what you're doing.
|
||||
pub fn sign<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
mut inputs: Vec<(Zeroizing<Scalar>, ClsagContext)>,
|
||||
sum_outputs: Scalar,
|
||||
msg: [u8; 32],
|
||||
msg_hash: [u8; 32],
|
||||
) -> Result<Vec<(Clsag, EdwardsPoint)>, ClsagError> {
|
||||
// Create the key images
|
||||
let mut key_image_generators = vec![];
|
||||
@@ -329,7 +351,7 @@ impl Clsag {
|
||||
&key_images[i],
|
||||
&inputs[i].1,
|
||||
mask,
|
||||
&msg,
|
||||
&msg_hash,
|
||||
nonce.deref() * ED25519_BASEPOINT_TABLE,
|
||||
nonce.deref() * key_image_generators[i],
|
||||
);
|
||||
@@ -345,7 +367,7 @@ impl Clsag {
|
||||
nonce.zeroize();
|
||||
|
||||
debug_assert!(clsag
|
||||
.verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg)
|
||||
.verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg_hash)
|
||||
.is_ok());
|
||||
|
||||
res.push((clsag, pseudo_out));
|
||||
@@ -355,12 +377,16 @@ impl Clsag {
|
||||
}
|
||||
|
||||
/// Verify a CLSAG signature for the provided context.
|
||||
///
|
||||
/// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which
|
||||
/// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do
|
||||
/// not use this if you don't know what you're doing.
|
||||
pub fn verify(
|
||||
&self,
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
I: &EdwardsPoint,
|
||||
pseudo_out: &EdwardsPoint,
|
||||
msg: &[u8; 32],
|
||||
msg_hash: &[u8; 32],
|
||||
) -> Result<(), ClsagError> {
|
||||
// Preliminary checks
|
||||
// s, c1, and points must also be encoded canonically, which is checked at time of decode
|
||||
@@ -374,12 +400,20 @@ impl Clsag {
|
||||
Err(ClsagError::InvalidImage)?;
|
||||
}
|
||||
|
||||
let D = self.D.mul_by_cofactor();
|
||||
if D.is_identity() {
|
||||
let D_torsion_free = self.D.mul_by_cofactor();
|
||||
if D_torsion_free.is_identity() {
|
||||
Err(ClsagError::InvalidD)?;
|
||||
}
|
||||
|
||||
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, &Mode::Verify(self.c1));
|
||||
let (_, c1) = core(
|
||||
ring,
|
||||
I,
|
||||
pseudo_out,
|
||||
msg_hash,
|
||||
&D_torsion_free,
|
||||
&self.s,
|
||||
&Mode::Verify { c1: self.c1, D_serialized: self.D },
|
||||
);
|
||||
if c1 != self.c1 {
|
||||
Err(ClsagError::InvalidC1)?;
|
||||
}
|
||||
|
||||
@@ -56,13 +56,12 @@ impl ClsagContext {
|
||||
|
||||
/// A channel to send the mask to use for the pseudo-out (rerandomized commitment) with.
|
||||
///
|
||||
/// A mask must be sent along this channel before any preprocess addendums are handled. Breaking
|
||||
/// this rule will cause a panic.
|
||||
#[derive(Clone, Debug)]
|
||||
/// A mask must be sent along this channel before any preprocess addendums are handled.
|
||||
#[derive(Debug)]
|
||||
pub struct ClsagMultisigMaskSender {
|
||||
buf: Arc<Mutex<Option<Scalar>>>,
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
struct ClsagMultisigMaskReceiver {
|
||||
buf: Arc<Mutex<Option<Scalar>>>,
|
||||
}
|
||||
@@ -74,12 +73,14 @@ impl ClsagMultisigMaskSender {
|
||||
|
||||
/// Send a mask to a CLSAG multisig instance.
|
||||
pub fn send(self, mask: Scalar) {
|
||||
// There is no risk this was prior set as this consumes `self`, which does not implement
|
||||
// `Clone`
|
||||
*self.buf.lock() = Some(mask);
|
||||
}
|
||||
}
|
||||
impl ClsagMultisigMaskReceiver {
|
||||
fn recv(self) -> Scalar {
|
||||
self.buf.lock().unwrap()
|
||||
fn recv(self) -> Option<Scalar> {
|
||||
*self.buf.lock()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,12 +115,12 @@ struct Interim {
|
||||
|
||||
/// FROST-inspired algorithm for producing a CLSAG signature.
|
||||
///
|
||||
/// Before this has its `process_addendum` called, a mask must be set. Else this will panic.
|
||||
/// Before this has its `process_addendum` called, a mask must be set.
|
||||
///
|
||||
/// The message signed is expected to be a 32-byte value. Per Monero, it's the keccak256 hash of
|
||||
/// the transaction data which is signed. This will panic if the message is not a 32-byte value.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct ClsagMultisig {
|
||||
transcript: RecommendedTranscript,
|
||||
|
||||
@@ -132,7 +133,7 @@ pub struct ClsagMultisig {
|
||||
mask_recv: Option<ClsagMultisigMaskReceiver>,
|
||||
mask: Option<Scalar>,
|
||||
|
||||
msg: Option<[u8; 32]>,
|
||||
msg_hash: Option<[u8; 32]>,
|
||||
interim: Option<Interim>,
|
||||
}
|
||||
|
||||
@@ -156,7 +157,7 @@ impl ClsagMultisig {
|
||||
mask_recv: Some(mask_recv),
|
||||
mask: None,
|
||||
|
||||
msg: None,
|
||||
msg_hash: None,
|
||||
interim: None,
|
||||
},
|
||||
mask_send,
|
||||
@@ -218,7 +219,14 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
// Fetch the mask from the Mutex
|
||||
// We set it to a variable to ensure our view of it is consistent
|
||||
// It was this or a mpsc channel... std doesn't have oneshot :/
|
||||
self.mask = Some(self.mask_recv.take().unwrap().recv());
|
||||
self.mask = Some(
|
||||
self
|
||||
.mask_recv
|
||||
.take()
|
||||
.expect("image was none multiple times, despite setting to Some on first iteration")
|
||||
.recv()
|
||||
.ok_or(FrostError::InternalError("CLSAG mask was not provided"))?,
|
||||
);
|
||||
// Transcript the mask
|
||||
self.transcript.append_message(b"mask", self.mask.expect("mask wasn't set").to_bytes());
|
||||
|
||||
@@ -235,7 +243,8 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
// Accumulate the interpolated share
|
||||
let interpolated_key_image_share =
|
||||
addendum.key_image_share * lagrange::<dfg::Scalar>(l, view.included());
|
||||
*self.image.as_mut().unwrap() += interpolated_key_image_share;
|
||||
*self.image.as_mut().expect("image populated on first iteration wasn't Some") +=
|
||||
interpolated_key_image_share;
|
||||
|
||||
self
|
||||
.key_image_shares
|
||||
@@ -253,7 +262,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
view: &ThresholdView<Ed25519>,
|
||||
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
||||
nonces: Vec<Zeroizing<dfg::Scalar>>,
|
||||
msg: &[u8],
|
||||
msg_hash: &[u8],
|
||||
) -> dfg::Scalar {
|
||||
// Use the transcript to get a seeded random number generator
|
||||
//
|
||||
@@ -264,14 +273,15 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
// opening of the commitment being re-randomized (and what it's re-randomized to)
|
||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
||||
|
||||
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
|
||||
let msg_hash = msg_hash.try_into().expect("CLSAG message hash should be 32-bytes");
|
||||
self.msg_hash = Some(msg_hash);
|
||||
|
||||
let sign_core = Clsag::sign_core(
|
||||
&mut rng,
|
||||
&self.image.expect("verifying a share despite never processing any addendums").0,
|
||||
&self.context,
|
||||
self.mask.expect("mask wasn't set"),
|
||||
self.msg.as_ref().unwrap(),
|
||||
&msg_hash,
|
||||
nonce_sums[0][0].0,
|
||||
nonce_sums[0][1].0,
|
||||
);
|
||||
@@ -293,7 +303,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
_: &[Vec<dfg::EdwardsPoint>],
|
||||
sum: dfg::Scalar,
|
||||
) -> Option<Self::Signature> {
|
||||
let interim = self.interim.as_ref().unwrap();
|
||||
let interim = self.interim.as_ref().expect("verify called before sign_share");
|
||||
let mut clsag = interim.clsag.clone();
|
||||
// We produced shares as `r - p x`, yet the signature is actually `r - p x - c x`
|
||||
// Substract `c x` (saved as `c`) now
|
||||
@@ -303,7 +313,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
self.context.decoys.ring(),
|
||||
&self.image.expect("verifying a signature despite never processing any addendums").0,
|
||||
&interim.pseudo_out,
|
||||
self.msg.as_ref().unwrap(),
|
||||
self.msg_hash.as_ref().expect("verify called before sign_share"),
|
||||
)
|
||||
.is_ok()
|
||||
{
|
||||
@@ -318,7 +328,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
nonces: &[Vec<dfg::EdwardsPoint>],
|
||||
share: dfg::Scalar,
|
||||
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
||||
let interim = self.interim.as_ref().unwrap();
|
||||
let interim = self.interim.as_ref().expect("verify_share called before sign_share");
|
||||
|
||||
// For a share `r - p x`, the following two equalities should hold:
|
||||
// - `(r - p x)G == R.0 - pV`, where `V = xG`
|
||||
|
||||
@@ -19,7 +19,8 @@ use crate::ClsagMultisig;
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::{
|
||||
Participant,
|
||||
tests::{key_gen, algorithm_machines, sign},
|
||||
sign::AlgorithmMachine,
|
||||
tests::{key_gen, algorithm_machines_without_clone, sign_without_clone},
|
||||
};
|
||||
|
||||
const RING_LEN: u64 = 11;
|
||||
@@ -31,7 +32,7 @@ const RING_INDEX: u8 = 3;
|
||||
#[test]
|
||||
fn clsag() {
|
||||
for real in 0 .. RING_LEN {
|
||||
let msg = [1; 32];
|
||||
let msg_hash = [1; 32];
|
||||
|
||||
let mut secrets = (Zeroizing::new(Scalar::ZERO), Scalar::ZERO);
|
||||
let mut ring = vec![];
|
||||
@@ -61,18 +62,18 @@ fn clsag() {
|
||||
.unwrap(),
|
||||
)],
|
||||
Scalar::random(&mut OsRng),
|
||||
msg,
|
||||
msg_hash,
|
||||
)
|
||||
.unwrap()
|
||||
.swap_remove(0);
|
||||
|
||||
let image =
|
||||
hash_to_point((ED25519_BASEPOINT_TABLE * secrets.0.deref()).compress().0) * secrets.0.deref();
|
||||
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||
clsag.verify(&ring, &image, &pseudo_out, &msg_hash).unwrap();
|
||||
|
||||
// make sure verification fails if we throw a random `c1` at it.
|
||||
clsag.c1 = Scalar::random(&mut OsRng);
|
||||
assert!(clsag.verify(&ring, &image, &pseudo_out, &msg).is_err());
|
||||
assert!(clsag.verify(&ring, &image, &pseudo_out, &msg_hash).is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,21 +100,32 @@ fn clsag_multisig() {
|
||||
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
||||
}
|
||||
|
||||
let (algorithm, mask_send) = ClsagMultisig::new(
|
||||
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
|
||||
ClsagContext::new(
|
||||
Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(),
|
||||
Commitment::new(randomness, AMOUNT),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
mask_send.send(Scalar::random(&mut OsRng));
|
||||
let mask = Scalar::random(&mut OsRng);
|
||||
let params = || {
|
||||
let (algorithm, mask_send) = ClsagMultisig::new(
|
||||
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
|
||||
ClsagContext::new(
|
||||
Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(),
|
||||
Commitment::new(randomness, AMOUNT),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
mask_send.send(mask);
|
||||
algorithm
|
||||
};
|
||||
|
||||
sign(
|
||||
sign_without_clone(
|
||||
&mut OsRng,
|
||||
&algorithm,
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, &algorithm, &keys),
|
||||
keys.values().map(|keys| (keys.params().i(), params())).collect(),
|
||||
algorithm_machines_without_clone(
|
||||
&mut OsRng,
|
||||
&keys,
|
||||
keys
|
||||
.values()
|
||||
.map(|keys| (keys.params().i(), AlgorithmMachine::new(params(), keys.clone())))
|
||||
.collect(),
|
||||
),
|
||||
&[1; 32],
|
||||
);
|
||||
}
|
||||
|
||||
@@ -122,6 +122,10 @@ impl Mlsag {
|
||||
}
|
||||
|
||||
/// Verify a MLSAG.
|
||||
///
|
||||
/// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which
|
||||
/// makes assumptions on what has already been transcripted and bound to within `msg`. Do not use
|
||||
/// this if you don't know what you're doing.
|
||||
pub fn verify(
|
||||
&self,
|
||||
msg: &[u8; 32],
|
||||
|
||||
@@ -17,6 +17,7 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["alloc", "std"] }
|
||||
digest_auth = { version = "0.3", default-features = false }
|
||||
simple-request = { path = "../../../../common/request", version = "0.1", default-features = false, features = ["tls"] }
|
||||
tokio = { version = "1", default-features = false }
|
||||
|
||||
@@ -7,6 +7,7 @@ use std::{sync::Arc, io::Read, time::Duration};
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use digest_auth::{WwwAuthenticateHeader, AuthContext};
|
||||
use simple_request::{
|
||||
hyper::{StatusCode, header::HeaderValue, Request},
|
||||
@@ -25,8 +26,8 @@ enum Authentication {
|
||||
// This ensures that if a nonce is requested, another caller doesn't make a request invalidating
|
||||
// it
|
||||
Authenticated {
|
||||
username: String,
|
||||
password: String,
|
||||
username: Zeroizing<String>,
|
||||
password: Zeroizing<String>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
connection: Arc<Mutex<(Option<(WwwAuthenticateHeader, u64)>, Client)>>,
|
||||
},
|
||||
@@ -77,7 +78,7 @@ impl SimpleRequestRpc {
|
||||
) -> Result<SimpleRequestRpc, RpcError> {
|
||||
let authentication = if url.contains('@') {
|
||||
// Parse out the username and password
|
||||
let url_clone = url;
|
||||
let url_clone = Zeroizing::new(url);
|
||||
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
||||
if split_url.len() != 2 {
|
||||
Err(RpcError::ConnectionError("invalid amount of login specifications".to_string()))?;
|
||||
@@ -114,8 +115,8 @@ impl SimpleRequestRpc {
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
||||
)?;
|
||||
Authentication::Authenticated {
|
||||
username: split_userpass[0].to_string(),
|
||||
password: (*split_userpass.get(1).unwrap_or(&"")).to_string(),
|
||||
username: Zeroizing::new(split_userpass[0].to_string()),
|
||||
password: Zeroizing::new((*split_userpass.get(1).unwrap_or(&"")).to_string()),
|
||||
connection: Arc::new(Mutex::new((challenge, client))),
|
||||
}
|
||||
} else {
|
||||
@@ -135,35 +136,13 @@ impl SimpleRequestRpc {
|
||||
};
|
||||
|
||||
async fn body_from_response(response: Response<'_>) -> Result<Vec<u8>, RpcError> {
|
||||
/*
|
||||
let length = usize::try_from(
|
||||
response
|
||||
.headers()
|
||||
.get("content-length")
|
||||
.ok_or(RpcError::InvalidNode("no content-length header"))?
|
||||
.to_str()
|
||||
.map_err(|_| RpcError::InvalidNode("non-ascii content-length value"))?
|
||||
.parse::<u32>()
|
||||
.map_err(|_| RpcError::InvalidNode("non-u32 content-length value"))?,
|
||||
)
|
||||
.unwrap();
|
||||
// Only pre-allocate 1 MB so a malicious node which claims a content-length of 1 GB actually
|
||||
// has to send 1 GB of data to cause a 1 GB allocation
|
||||
let mut res = Vec::with_capacity(length.max(1024 * 1024));
|
||||
let mut body = response.into_body();
|
||||
while res.len() < length {
|
||||
let Some(data) = body.data().await else { break };
|
||||
res.extend(data.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?.as_ref());
|
||||
}
|
||||
*/
|
||||
|
||||
let mut res = Vec::with_capacity(128);
|
||||
response
|
||||
.body()
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
||||
.read_to_end(&mut res)
|
||||
.unwrap();
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
@@ -202,8 +181,8 @@ impl SimpleRequestRpc {
|
||||
*cnonce += 1;
|
||||
|
||||
let mut context = AuthContext::new_post::<_, _, _, &[u8]>(
|
||||
username,
|
||||
password,
|
||||
<_ as AsRef<str>>::as_ref(username),
|
||||
<_ as AsRef<str>>::as_ref(password),
|
||||
"/".to_string() + route,
|
||||
None,
|
||||
);
|
||||
@@ -219,7 +198,12 @@ impl SimpleRequestRpc {
|
||||
})?
|
||||
.to_header_string(),
|
||||
)
|
||||
.unwrap(),
|
||||
.map_err(|_| {
|
||||
RpcError::InternalError(
|
||||
"digest-auth challenge response wasn't a valid string for an HTTP header"
|
||||
.to_string(),
|
||||
)
|
||||
})?,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -269,7 +253,7 @@ impl SimpleRequestRpc {
|
||||
))?
|
||||
}
|
||||
} else {
|
||||
body_from_response(response.unwrap()).await?
|
||||
body_from_response(response.expect("no response yet also no error?")).await?
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -121,7 +121,7 @@ impl FeeRate {
|
||||
/// defined serialization.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(16);
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -139,15 +139,22 @@ impl FeeRate {
|
||||
///
|
||||
/// This function may panic upon overflow.
|
||||
pub fn calculate_fee_from_weight(&self, weight: usize) -> u64 {
|
||||
let fee = self.per_weight * u64::try_from(weight).unwrap();
|
||||
let fee =
|
||||
self.per_weight * u64::try_from(weight).expect("couldn't convert weight (usize) to u64");
|
||||
let fee = fee.div_ceil(self.mask) * self.mask;
|
||||
debug_assert_eq!(weight, self.calculate_weight_from_fee(fee), "Miscalculated weight from fee");
|
||||
debug_assert_eq!(
|
||||
Some(weight),
|
||||
self.calculate_weight_from_fee(fee),
|
||||
"Miscalculated weight from fee"
|
||||
);
|
||||
fee
|
||||
}
|
||||
|
||||
/// Calculate the weight from the fee.
|
||||
pub fn calculate_weight_from_fee(&self, fee: u64) -> usize {
|
||||
usize::try_from(fee / self.per_weight).unwrap()
|
||||
///
|
||||
/// Returns `None` if the weight would not fit within a `usize`.
|
||||
pub fn calculate_weight_from_fee(&self, fee: u64) -> Option<usize> {
|
||||
usize::try_from(fee / self.per_weight).ok()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -249,7 +256,7 @@ fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
||||
/// While no implementors are directly provided, [monero-simple-request-rpc](
|
||||
/// https://github.com/serai-dex/serai/tree/develop/networks/monero/rpc/simple-request
|
||||
/// ) is recommended.
|
||||
pub trait Rpc: Sync + Clone + Debug {
|
||||
pub trait Rpc: Sync + Clone {
|
||||
/// Perform a POST request to the specified route with the specified body.
|
||||
///
|
||||
/// The implementor is left to handle anything such as authentication.
|
||||
@@ -272,8 +279,14 @@ pub trait Rpc: Sync + Clone + Debug {
|
||||
let res = self
|
||||
.post(
|
||||
route,
|
||||
if let Some(params) = params {
|
||||
serde_json::to_string(¶ms).unwrap().into_bytes()
|
||||
if let Some(params) = params.as_ref() {
|
||||
serde_json::to_string(params)
|
||||
.map_err(|e| {
|
||||
RpcError::InternalError(format!(
|
||||
"couldn't convert parameters ({params:?}) to JSON: {e:?}"
|
||||
))
|
||||
})?
|
||||
.into_bytes()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
@@ -295,7 +308,10 @@ pub trait Rpc: Sync + Clone + Debug {
|
||||
async move {
|
||||
let mut req = json!({ "method": method });
|
||||
if let Some(params) = params {
|
||||
req.as_object_mut().unwrap().insert("params".into(), params);
|
||||
req
|
||||
.as_object_mut()
|
||||
.expect("accessing object as object failed?")
|
||||
.insert("params".into(), params);
|
||||
}
|
||||
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
||||
}
|
||||
@@ -385,6 +401,11 @@ pub trait Rpc: Sync + Clone + Debug {
|
||||
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
||||
))?;
|
||||
}
|
||||
if txs.txs.len() != this_count {
|
||||
Err(RpcError::InvalidNode(
|
||||
"not missing any transactions yet didn't return all transactions".to_string(),
|
||||
))?;
|
||||
}
|
||||
|
||||
all_txs.extend(txs.txs);
|
||||
}
|
||||
@@ -1003,10 +1024,10 @@ pub trait Rpc: Sync + Clone + Debug {
|
||||
/// An implementation is provided for any satisfier of `Rpc`. It is not recommended to use an `Rpc`
|
||||
/// object to satisfy this. This should be satisfied by a local store of the output distribution,
|
||||
/// both for performance and to prevent potential attacks a remote node can perform.
|
||||
pub trait DecoyRpc: Sync + Clone + Debug {
|
||||
pub trait DecoyRpc: Sync {
|
||||
/// Get the height the output distribution ends at.
|
||||
///
|
||||
/// This is equivalent to the hight of the blockchain it's for. This is intended to be cheaper
|
||||
/// This is equivalent to the height of the blockchain it's for. This is intended to be cheaper
|
||||
/// than fetching the entire output distribution.
|
||||
fn get_output_distribution_end_height(
|
||||
&self,
|
||||
@@ -1130,7 +1151,13 @@ impl<R: Rpc> DecoyRpc for R {
|
||||
)))?;
|
||||
}
|
||||
|
||||
let expected_len = if zero_zero_case { 2 } else { (to - start_height) + 1 };
|
||||
let expected_len = if zero_zero_case {
|
||||
2
|
||||
} else {
|
||||
(to - start_height).checked_add(1).ok_or_else(|| {
|
||||
RpcError::InternalError("expected length of distribution exceeded usize".to_string())
|
||||
})?
|
||||
};
|
||||
// Yet this is actually a height
|
||||
if expected_len != distribution.len() {
|
||||
Err(RpcError::InvalidNode(format!(
|
||||
@@ -1145,6 +1172,20 @@ impl<R: Rpc> DecoyRpc for R {
|
||||
if zero_zero_case {
|
||||
distribution.pop();
|
||||
}
|
||||
|
||||
// Check the distribution monotonically increases
|
||||
{
|
||||
let mut monotonic = 0;
|
||||
for d in &distribution {
|
||||
if *d < monotonic {
|
||||
Err(RpcError::InvalidNode(
|
||||
"received output distribution didn't increase monotonically".to_string(),
|
||||
))?;
|
||||
}
|
||||
monotonic = *d;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(distribution)
|
||||
}
|
||||
}
|
||||
@@ -1255,8 +1296,8 @@ impl<R: Rpc> DecoyRpc for R {
|
||||
// https://github.com/monero-project/monero/blob
|
||||
// /cc73fe71162d564ffda8e549b79a350bca53c454/src/cryptonote_core
|
||||
// /blockchain.cpp#L3836
|
||||
((out.height + DEFAULT_LOCK_WINDOW) <= height) &&
|
||||
(Timelock::Block(height - 1 + ACCEPTED_TIMELOCK_DELTA) >=
|
||||
out.height.checked_add(DEFAULT_LOCK_WINDOW).is_some_and(|locked| locked <= height) &&
|
||||
(Timelock::Block(height.wrapping_add(ACCEPTED_TIMELOCK_DELTA - 1)) >=
|
||||
txs[i].prefix().additional_timelock)
|
||||
} else {
|
||||
out.unlocked
|
||||
|
||||
@@ -51,7 +51,7 @@ impl BlockHeader {
|
||||
/// Serialize the BlockHeader to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
serialized
|
||||
}
|
||||
|
||||
@@ -79,10 +79,13 @@ pub struct Block {
|
||||
}
|
||||
|
||||
impl Block {
|
||||
/// The zero-index position of this block within the blockchain.
|
||||
/// The zero-indexed position of this block within the blockchain.
|
||||
///
|
||||
/// This information comes from the Block's miner transaction. If the miner transaction isn't
|
||||
/// structed as expected, this will return None.
|
||||
/// structed as expected, this will return None. This will return Some for any Block which would
|
||||
/// pass the consensus rules.
|
||||
// https://github.com/monero-project/monero/blob/a1dc85c5373a30f14aaf7dcfdd95f5a7375d3623
|
||||
// /src/cryptonote_core/blockchain.cpp#L1365-L1382
|
||||
pub fn number(&self) -> Option<usize> {
|
||||
match &self.miner_transaction {
|
||||
Transaction::V1 { prefix, .. } | Transaction::V2 { prefix, .. } => {
|
||||
@@ -108,7 +111,7 @@ impl Block {
|
||||
/// Serialize the Block to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
serialized
|
||||
}
|
||||
|
||||
@@ -119,7 +122,13 @@ impl Block {
|
||||
pub fn serialize_pow_hash(&self) -> Vec<u8> {
|
||||
let mut blob = self.header.serialize();
|
||||
blob.extend_from_slice(&merkle_root(self.miner_transaction.hash(), &self.transactions));
|
||||
write_varint(&(1 + u64::try_from(self.transactions.len()).unwrap()), &mut blob).unwrap();
|
||||
write_varint(
|
||||
&(1 +
|
||||
u64::try_from(self.transactions.len())
|
||||
.expect("amount of transactions in block exceeded u64::MAX")),
|
||||
&mut blob,
|
||||
)
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
blob
|
||||
}
|
||||
|
||||
@@ -129,7 +138,11 @@ impl Block {
|
||||
// Monero pre-appends a VarInt of the block-to-hash'ss length before getting the block hash,
|
||||
// but doesn't do this when getting the proof of work hash :)
|
||||
let mut hashing_blob = Vec::with_capacity(9 + hashable.len());
|
||||
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
|
||||
write_varint(
|
||||
&u64::try_from(hashable.len()).expect("length of block hash's preimage exceeded u64::MAX"),
|
||||
&mut hashing_blob,
|
||||
)
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
hashing_blob.append(&mut hashable);
|
||||
|
||||
let hash = keccak256(hashing_blob);
|
||||
|
||||
@@ -28,7 +28,7 @@ pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
||||
|
||||
let mut paired_hashes = Vec::with_capacity(overage);
|
||||
while let Some(left) = rightmost.next() {
|
||||
let right = rightmost.next().unwrap();
|
||||
let right = rightmost.next().expect("rightmost is of even length");
|
||||
paired_hashes.push(keccak256([left.as_ref(), &right].concat()));
|
||||
}
|
||||
drop(rightmost);
|
||||
|
||||
@@ -326,7 +326,9 @@ impl RctPrunable {
|
||||
/// Serialize the RctPrunable to a `Vec<u8>`.
|
||||
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized, rct_type).unwrap();
|
||||
self
|
||||
.write(&mut serialized, rct_type)
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
serialized
|
||||
}
|
||||
|
||||
@@ -341,7 +343,13 @@ impl RctPrunable {
|
||||
Ok(match rct_type {
|
||||
RctType::AggregateMlsagBorromean => RctPrunable::AggregateMlsagBorromean {
|
||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||
mlsag: Mlsag::read(ring_length, inputs + 1, r)?,
|
||||
mlsag: Mlsag::read(
|
||||
ring_length,
|
||||
inputs.checked_add(1).ok_or_else(|| {
|
||||
io::Error::other("reading a MLSAG for more inputs than representable")
|
||||
})?,
|
||||
r,
|
||||
)?,
|
||||
},
|
||||
RctType::MlsagBorromean => RctPrunable::MlsagBorromean {
|
||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||
@@ -441,7 +449,7 @@ impl RctProofs {
|
||||
/// Serialize the RctProofs to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
serialized
|
||||
}
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ impl Input {
|
||||
/// Serialize the Input to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = vec![];
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -71,7 +71,7 @@ impl Input {
|
||||
let amount = if amount == 0 { None } else { Some(amount) };
|
||||
Input::ToKey {
|
||||
amount,
|
||||
key_offsets: read_vec(read_varint, r)?,
|
||||
key_offsets: read_vec(read_varint, None, r)?,
|
||||
key_image: read_torsion_free_point(r)?,
|
||||
}
|
||||
}
|
||||
@@ -106,7 +106,7 @@ impl Output {
|
||||
/// Write the Output to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(8 + 1 + 32);
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -163,7 +163,7 @@ impl Timelock {
|
||||
/// Serialize the Timelock to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(1);
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -241,7 +241,7 @@ impl TransactionPrefix {
|
||||
pub fn read<R: Read>(r: &mut R, version: u64) -> io::Result<TransactionPrefix> {
|
||||
let additional_timelock = Timelock::read(r)?;
|
||||
|
||||
let inputs = read_vec(|r| Input::read(r), r)?;
|
||||
let inputs = read_vec(|r| Input::read(r), None, r)?;
|
||||
if inputs.is_empty() {
|
||||
Err(io::Error::other("transaction had no inputs"))?;
|
||||
}
|
||||
@@ -250,17 +250,17 @@ impl TransactionPrefix {
|
||||
let mut prefix = TransactionPrefix {
|
||||
additional_timelock,
|
||||
inputs,
|
||||
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?,
|
||||
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), None, r)?,
|
||||
extra: vec![],
|
||||
};
|
||||
prefix.extra = read_vec(read_byte, r)?;
|
||||
prefix.extra = read_vec(read_byte, None, r)?;
|
||||
Ok(prefix)
|
||||
}
|
||||
|
||||
fn hash(&self, version: u64) -> [u8; 32] {
|
||||
let mut buf = vec![];
|
||||
write_varint(&version, &mut buf).unwrap();
|
||||
self.write(&mut buf).unwrap();
|
||||
write_varint(&version, &mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
keccak256(buf)
|
||||
}
|
||||
}
|
||||
@@ -451,7 +451,7 @@ impl<P: PotentiallyPruned> Transaction<P> {
|
||||
/// Write the Transaction to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(2048);
|
||||
self.write(&mut res).unwrap();
|
||||
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
res
|
||||
}
|
||||
|
||||
@@ -493,15 +493,16 @@ impl<P: PotentiallyPruned> Transaction<P> {
|
||||
let mut buf = Vec::with_capacity(512);
|
||||
|
||||
// We don't use `self.write` as that may write the signatures (if this isn't pruned)
|
||||
write_varint(&self.version(), &mut buf).unwrap();
|
||||
prefix.write(&mut buf).unwrap();
|
||||
write_varint(&self.version(), &mut buf)
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
prefix.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
|
||||
// We explicitly write the signatures ourselves here
|
||||
let PrunableHash::V1(signatures) = prunable else {
|
||||
panic!("hashing v1 TX with non-v1 prunable data")
|
||||
};
|
||||
for signature in signatures {
|
||||
signature.write(&mut buf).unwrap();
|
||||
signature.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
}
|
||||
|
||||
keccak256(buf)
|
||||
@@ -513,7 +514,10 @@ impl<P: PotentiallyPruned> Transaction<P> {
|
||||
|
||||
if let Some(proofs) = proofs {
|
||||
let mut buf = Vec::with_capacity(512);
|
||||
proofs.base().write(&mut buf, proofs.rct_type()).unwrap();
|
||||
proofs
|
||||
.base()
|
||||
.write(&mut buf, proofs.rct_type())
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
hashes.extend(keccak256(&buf));
|
||||
} else {
|
||||
// Serialization of RctBase::Null
|
||||
@@ -540,7 +544,10 @@ impl Transaction<NotPruned> {
|
||||
Transaction::V2 { proofs, .. } => {
|
||||
self.hash_with_prunable_hash(PrunableHash::V2(if let Some(proofs) = proofs {
|
||||
let mut buf = Vec::with_capacity(1024);
|
||||
proofs.prunable.write(&mut buf, proofs.rct_type()).unwrap();
|
||||
proofs
|
||||
.prunable
|
||||
.write(&mut buf, proofs.rct_type())
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
keccak256(buf)
|
||||
} else {
|
||||
[0; 32]
|
||||
@@ -563,7 +570,10 @@ impl Transaction<NotPruned> {
|
||||
Transaction::V2 { proofs, .. } => self.hash_with_prunable_hash({
|
||||
let Some(proofs) = proofs else { None? };
|
||||
let mut buf = Vec::with_capacity(1024);
|
||||
proofs.prunable.signature_write(&mut buf).unwrap();
|
||||
proofs
|
||||
.prunable
|
||||
.signature_write(&mut buf)
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
PrunableHash::V2(keccak256(buf))
|
||||
}),
|
||||
})
|
||||
@@ -599,7 +609,7 @@ impl Transaction<NotPruned> {
|
||||
blob_size
|
||||
} else {
|
||||
blob_size +
|
||||
Bulletproof::calculate_bp_clawback(
|
||||
Bulletproof::calculate_clawback(
|
||||
bp_plus,
|
||||
match self {
|
||||
Transaction::V1 { .. } => panic!("v1 transaction was BP(+)"),
|
||||
|
||||
@@ -6,7 +6,7 @@ license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
rust-version = "1.82"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -76,8 +76,10 @@ pub(crate) fn decode(data: &str) -> Option<Vec<u8>> {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let used_bytes = used_bytes
|
||||
.expect("chunk of bounded length exhaustively searched but couldn't find matching length");
|
||||
// Only push on the used bytes
|
||||
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes.unwrap()) ..]);
|
||||
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes) ..]);
|
||||
}
|
||||
|
||||
Some(res)
|
||||
@@ -92,11 +94,10 @@ pub(crate) fn encode_check(mut data: Vec<u8>) -> String {
|
||||
|
||||
// Decode an arbitrary-length stream of data, with a checksum
|
||||
pub(crate) fn decode_check(data: &str) -> Option<Vec<u8>> {
|
||||
if data.len() < CHECKSUM_LEN {
|
||||
let mut res = decode(data)?;
|
||||
if res.len() < CHECKSUM_LEN {
|
||||
None?;
|
||||
}
|
||||
|
||||
let mut res = decode(data)?;
|
||||
let checksum_pos = res.len() - CHECKSUM_LEN;
|
||||
if keccak256(&res[.. checksum_pos])[.. CHECKSUM_LEN] != res[checksum_pos ..] {
|
||||
None?;
|
||||
|
||||
@@ -357,21 +357,21 @@ pub struct Address<const ADDRESS_BYTES: u128> {
|
||||
|
||||
impl<const ADDRESS_BYTES: u128> fmt::Debug for Address<ADDRESS_BYTES> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
let hex = |bytes: &[u8]| -> String {
|
||||
let hex = |bytes: &[u8]| -> Result<String, fmt::Error> {
|
||||
let mut res = String::with_capacity(2 + (2 * bytes.len()));
|
||||
res.push_str("0x");
|
||||
for b in bytes {
|
||||
write!(&mut res, "{b:02x}").unwrap();
|
||||
write!(&mut res, "{b:02x}")?;
|
||||
}
|
||||
res
|
||||
Ok(res)
|
||||
};
|
||||
|
||||
fmt
|
||||
.debug_struct("Address")
|
||||
.field("network", &self.network)
|
||||
.field("kind", &self.kind)
|
||||
.field("spend", &hex(&self.spend.compress().to_bytes()))
|
||||
.field("view", &hex(&self.view.compress().to_bytes()))
|
||||
.field("spend", &hex(&self.spend.compress().to_bytes())?)
|
||||
.field("view", &hex(&self.view.compress().to_bytes())?)
|
||||
// This is not a real field yet is the most valuable thing to know when debugging
|
||||
.field("(address)", &self.to_string())
|
||||
.finish()
|
||||
@@ -389,7 +389,8 @@ impl<const ADDRESS_BYTES: u128> fmt::Display for Address<ADDRESS_BYTES> {
|
||||
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind {
|
||||
let features_uint =
|
||||
(u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress);
|
||||
write_varint(&features_uint, &mut data).unwrap();
|
||||
write_varint(&features_uint, &mut data)
|
||||
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||
}
|
||||
if let Some(id) = self.kind.payment_id() {
|
||||
data.extend(id);
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
[package]
|
||||
name = "polyseed"
|
||||
version = "0.1.0"
|
||||
description = "Rust implementation of Polyseed"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/polyseed"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
thiserror = { version = "1", default-features = false, optional = true }
|
||||
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror",
|
||||
|
||||
"subtle/std",
|
||||
"zeroize/std",
|
||||
"rand_core/std",
|
||||
|
||||
"sha3/std",
|
||||
"pbkdf2/std",
|
||||
]
|
||||
default = ["std"]
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2024 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,11 +0,0 @@
|
||||
# Polyseed
|
||||
|
||||
Rust implementation of [Polyseed](https://github.com/tevador/polyseed).
|
||||
|
||||
This library is usable under no-std when the `std` feature (on by default) is
|
||||
disabled.
|
||||
|
||||
### Cargo Features
|
||||
|
||||
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||
implementations).
|
||||
@@ -1,473 +0,0 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use core::fmt;
|
||||
use std_shims::{sync::LazyLock, string::String, collections::HashMap};
|
||||
#[cfg(feature = "std")]
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use subtle::ConstantTimeEq;
|
||||
use zeroize::{Zeroize, Zeroizing, ZeroizeOnDrop};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use sha3::Sha3_256;
|
||||
use pbkdf2::pbkdf2_hmac;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// Features
|
||||
const FEATURE_BITS: u8 = 5;
|
||||
#[allow(dead_code)]
|
||||
const INTERNAL_FEATURES: u8 = 2;
|
||||
const USER_FEATURES: u8 = 3;
|
||||
|
||||
const USER_FEATURES_MASK: u8 = (1 << USER_FEATURES) - 1;
|
||||
const ENCRYPTED_MASK: u8 = 1 << 4;
|
||||
const RESERVED_FEATURES_MASK: u8 = ((1 << FEATURE_BITS) - 1) ^ ENCRYPTED_MASK;
|
||||
|
||||
fn user_features(features: u8) -> u8 {
|
||||
features & USER_FEATURES_MASK
|
||||
}
|
||||
|
||||
fn polyseed_features_supported(features: u8) -> bool {
|
||||
(features & RESERVED_FEATURES_MASK) == 0
|
||||
}
|
||||
|
||||
// Dates
|
||||
const DATE_BITS: u8 = 10;
|
||||
const DATE_MASK: u16 = (1u16 << DATE_BITS) - 1;
|
||||
const POLYSEED_EPOCH: u64 = 1635768000; // 1st November 2021 12:00 UTC
|
||||
const TIME_STEP: u64 = 2629746; // 30.436875 days = 1/12 of the Gregorian year
|
||||
|
||||
// After ~85 years, this will roll over.
|
||||
fn birthday_encode(time: u64) -> u16 {
|
||||
u16::try_from((time.saturating_sub(POLYSEED_EPOCH) / TIME_STEP) & u64::from(DATE_MASK))
|
||||
.expect("value masked by 2**10 - 1 didn't fit into a u16")
|
||||
}
|
||||
|
||||
fn birthday_decode(birthday: u16) -> u64 {
|
||||
POLYSEED_EPOCH + (u64::from(birthday) * TIME_STEP)
|
||||
}
|
||||
|
||||
// Polyseed parameters
|
||||
const SECRET_BITS: usize = 150;
|
||||
|
||||
const BITS_PER_BYTE: usize = 8;
|
||||
const SECRET_SIZE: usize = SECRET_BITS.div_ceil(BITS_PER_BYTE); // 19
|
||||
const CLEAR_BITS: usize = (SECRET_SIZE * BITS_PER_BYTE) - SECRET_BITS; // 2
|
||||
|
||||
// Polyseed calls this CLEAR_MASK and has a very complicated formula for this fundamental
|
||||
// equivalency
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
const LAST_BYTE_SECRET_BITS_MASK: u8 = ((1 << (BITS_PER_BYTE - CLEAR_BITS)) - 1) as u8;
|
||||
|
||||
const SECRET_BITS_PER_WORD: usize = 10;
|
||||
|
||||
// The amount of words in a seed.
|
||||
const POLYSEED_LENGTH: usize = 16;
|
||||
// Amount of characters each word must have if trimmed
|
||||
pub(crate) const PREFIX_LEN: usize = 4;
|
||||
|
||||
const POLY_NUM_CHECK_DIGITS: usize = 1;
|
||||
const DATA_WORDS: usize = POLYSEED_LENGTH - POLY_NUM_CHECK_DIGITS;
|
||||
|
||||
// Polynomial
|
||||
const GF_BITS: usize = 11;
|
||||
const POLYSEED_MUL2_TABLE: [u16; 8] = [5, 7, 1, 3, 13, 15, 9, 11];
|
||||
|
||||
type Poly = [u16; POLYSEED_LENGTH];
|
||||
|
||||
fn elem_mul2(x: u16) -> u16 {
|
||||
if x < 1024 {
|
||||
return 2 * x;
|
||||
}
|
||||
POLYSEED_MUL2_TABLE[usize::from(x % 8)] + (16 * ((x - 1024) / 8))
|
||||
}
|
||||
|
||||
fn poly_eval(poly: &Poly) -> u16 {
|
||||
// Horner's method at x = 2
|
||||
let mut result = poly[POLYSEED_LENGTH - 1];
|
||||
for i in (0 .. (POLYSEED_LENGTH - 1)).rev() {
|
||||
result = elem_mul2(result) ^ poly[i];
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// Key gen parameters
|
||||
const POLYSEED_SALT: &[u8] = b"POLYSEED key";
|
||||
const POLYSEED_KEYGEN_ITERATIONS: u32 = 10000;
|
||||
|
||||
// Polyseed technically supports multiple coins, and the value for Monero is 0
|
||||
// See: https://github.com/tevador/polyseed/blob/dfb05d8edb682b0e8f743b1b70c9131712ff4157
|
||||
// /include/polyseed.h#L57
|
||||
const COIN: u16 = 0;
|
||||
|
||||
/// An error when working with a Polyseed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||
pub enum PolyseedError {
|
||||
/// The seed was invalid.
|
||||
#[cfg_attr(feature = "std", error("invalid seed"))]
|
||||
InvalidSeed,
|
||||
/// The entropy was invalid.
|
||||
#[cfg_attr(feature = "std", error("invalid entropy"))]
|
||||
InvalidEntropy,
|
||||
/// The checksum did not match the data.
|
||||
#[cfg_attr(feature = "std", error("invalid checksum"))]
|
||||
InvalidChecksum,
|
||||
/// Unsupported feature bits were set.
|
||||
#[cfg_attr(feature = "std", error("unsupported features"))]
|
||||
UnsupportedFeatures,
|
||||
}
|
||||
|
||||
/// Language options for Polyseed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Zeroize)]
|
||||
pub enum Language {
|
||||
/// English language option.
|
||||
English,
|
||||
/// Spanish language option.
|
||||
Spanish,
|
||||
/// French language option.
|
||||
French,
|
||||
/// Italian language option.
|
||||
Italian,
|
||||
/// Japanese language option.
|
||||
Japanese,
|
||||
/// Korean language option.
|
||||
Korean,
|
||||
/// Czech language option.
|
||||
Czech,
|
||||
/// Portuguese language option.
|
||||
Portuguese,
|
||||
/// Simplified Chinese language option.
|
||||
ChineseSimplified,
|
||||
/// Traditional Chinese language option.
|
||||
ChineseTraditional,
|
||||
}
|
||||
|
||||
struct WordList {
|
||||
words: &'static [&'static str],
|
||||
has_prefix: bool,
|
||||
has_accent: bool,
|
||||
}
|
||||
|
||||
impl WordList {
|
||||
fn new(words: &'static [&'static str], has_prefix: bool, has_accent: bool) -> WordList {
|
||||
let res = WordList { words, has_prefix, has_accent };
|
||||
// This is needed for a later unwrap to not fails
|
||||
assert!(words.len() < usize::from(u16::MAX));
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
static LANGUAGES: LazyLock<HashMap<Language, WordList>> = LazyLock::new(|| {
|
||||
HashMap::from([
|
||||
(Language::Czech, WordList::new(include!("./words/cs.rs"), true, false)),
|
||||
(Language::French, WordList::new(include!("./words/fr.rs"), true, true)),
|
||||
(Language::Korean, WordList::new(include!("./words/ko.rs"), false, false)),
|
||||
(Language::English, WordList::new(include!("./words/en.rs"), true, false)),
|
||||
(Language::Italian, WordList::new(include!("./words/it.rs"), true, false)),
|
||||
(Language::Spanish, WordList::new(include!("./words/es.rs"), true, true)),
|
||||
(Language::Japanese, WordList::new(include!("./words/ja.rs"), false, false)),
|
||||
(Language::Portuguese, WordList::new(include!("./words/pt.rs"), true, false)),
|
||||
(
|
||||
Language::ChineseSimplified,
|
||||
WordList::new(include!("./words/zh_simplified.rs"), false, false),
|
||||
),
|
||||
(
|
||||
Language::ChineseTraditional,
|
||||
WordList::new(include!("./words/zh_traditional.rs"), false, false),
|
||||
),
|
||||
])
|
||||
});
|
||||
|
||||
/// A Polyseed.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Polyseed {
|
||||
language: Language,
|
||||
features: u8,
|
||||
birthday: u16,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
checksum: u16,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Polyseed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Polyseed").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
fn valid_entropy(entropy: &Zeroizing<[u8; 32]>) -> bool {
|
||||
// Last byte of the entropy should only use certain bits
|
||||
let mut res =
|
||||
entropy[SECRET_SIZE - 1].ct_eq(&(entropy[SECRET_SIZE - 1] & LAST_BYTE_SECRET_BITS_MASK));
|
||||
// Last 13 bytes of the buffer should be unused
|
||||
for b in SECRET_SIZE .. entropy.len() {
|
||||
res &= entropy[b].ct_eq(&0);
|
||||
}
|
||||
res.into()
|
||||
}
|
||||
|
||||
impl Polyseed {
|
||||
// TODO: Clean this
|
||||
fn to_poly(&self) -> Poly {
|
||||
let mut extra_bits = u32::from(FEATURE_BITS + DATE_BITS);
|
||||
let extra_val = (u16::from(self.features) << DATE_BITS) | self.birthday;
|
||||
|
||||
let mut entropy_idx = 0;
|
||||
let mut secret_bits = BITS_PER_BYTE;
|
||||
let mut seed_rem_bits = SECRET_BITS - BITS_PER_BYTE;
|
||||
|
||||
let mut poly = [0; POLYSEED_LENGTH];
|
||||
for i in 0 .. DATA_WORDS {
|
||||
extra_bits -= 1;
|
||||
|
||||
let mut word_bits = 0;
|
||||
let mut word_val = 0;
|
||||
while word_bits < SECRET_BITS_PER_WORD {
|
||||
if secret_bits == 0 {
|
||||
entropy_idx += 1;
|
||||
secret_bits = seed_rem_bits.min(BITS_PER_BYTE);
|
||||
seed_rem_bits -= secret_bits;
|
||||
}
|
||||
let chunk_bits = secret_bits.min(SECRET_BITS_PER_WORD - word_bits);
|
||||
secret_bits -= chunk_bits;
|
||||
word_bits += chunk_bits;
|
||||
word_val <<= chunk_bits;
|
||||
word_val |=
|
||||
(u16::from(self.entropy[entropy_idx]) >> secret_bits) & ((1u16 << chunk_bits) - 1);
|
||||
}
|
||||
|
||||
word_val <<= 1;
|
||||
word_val |= (extra_val >> extra_bits) & 1;
|
||||
poly[POLY_NUM_CHECK_DIGITS + i] = word_val;
|
||||
}
|
||||
|
||||
poly
|
||||
}
|
||||
|
||||
fn from_internal(
|
||||
language: Language,
|
||||
masked_features: u8,
|
||||
encoded_birthday: u16,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
) -> Result<Polyseed, PolyseedError> {
|
||||
if !polyseed_features_supported(masked_features) {
|
||||
Err(PolyseedError::UnsupportedFeatures)?;
|
||||
}
|
||||
|
||||
if !valid_entropy(&entropy) {
|
||||
Err(PolyseedError::InvalidEntropy)?;
|
||||
}
|
||||
|
||||
let mut res = Polyseed {
|
||||
language,
|
||||
birthday: encoded_birthday,
|
||||
features: masked_features,
|
||||
entropy,
|
||||
checksum: 0,
|
||||
};
|
||||
res.checksum = poly_eval(&res.to_poly());
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed` with specific internals.
|
||||
///
|
||||
/// `birthday` is defined in seconds since the epoch.
|
||||
pub fn from(
|
||||
language: Language,
|
||||
features: u8,
|
||||
birthday: u64,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
) -> Result<Polyseed, PolyseedError> {
|
||||
Self::from_internal(language, user_features(features), birthday_encode(birthday), entropy)
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed`.
|
||||
///
|
||||
/// This uses the system's time for the birthday, if available, else 0.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, language: Language) -> Polyseed {
|
||||
// Get the birthday
|
||||
#[cfg(feature = "std")]
|
||||
let birthday =
|
||||
SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or(core::time::Duration::ZERO).as_secs();
|
||||
#[cfg(not(feature = "std"))]
|
||||
let birthday = 0;
|
||||
|
||||
// Derive entropy
|
||||
let mut entropy = Zeroizing::new([0; 32]);
|
||||
rng.fill_bytes(entropy.as_mut());
|
||||
entropy[SECRET_SIZE ..].fill(0);
|
||||
entropy[SECRET_SIZE - 1] &= LAST_BYTE_SECRET_BITS_MASK;
|
||||
|
||||
Self::from(language, 0, birthday, entropy).unwrap()
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed` from a String.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn from_string(lang: Language, seed: Zeroizing<String>) -> Result<Polyseed, PolyseedError> {
|
||||
// Decode the seed into its polynomial coefficients
|
||||
let mut poly = [0; POLYSEED_LENGTH];
|
||||
|
||||
// Validate words are in the lang word list
|
||||
let lang_word_list: &WordList = &LANGUAGES[&lang];
|
||||
for (i, word) in seed.split_whitespace().enumerate() {
|
||||
// Find the word's index
|
||||
fn check_if_matches<S: AsRef<str>, I: Iterator<Item = S>>(
|
||||
has_prefix: bool,
|
||||
mut lang_words: I,
|
||||
word: &str,
|
||||
) -> Option<usize> {
|
||||
if has_prefix {
|
||||
// Get the position of the word within the iterator
|
||||
// Doesn't use starts_with and some words are substrs of others, leading to false
|
||||
// positives
|
||||
let mut get_position = || {
|
||||
lang_words.position(|lang_word| {
|
||||
let mut lang_word = lang_word.as_ref().chars();
|
||||
let mut word = word.chars();
|
||||
|
||||
let mut res = true;
|
||||
for _ in 0 .. PREFIX_LEN {
|
||||
res &= lang_word.next() == word.next();
|
||||
}
|
||||
res
|
||||
})
|
||||
};
|
||||
let res = get_position();
|
||||
// If another word has this prefix, don't call it a match
|
||||
if get_position().is_some() {
|
||||
return None;
|
||||
}
|
||||
res
|
||||
} else {
|
||||
lang_words.position(|lang_word| lang_word.as_ref() == word)
|
||||
}
|
||||
}
|
||||
|
||||
let Some(coeff) = (if lang_word_list.has_accent {
|
||||
let ascii = |word: &str| word.chars().filter(char::is_ascii).collect::<String>();
|
||||
check_if_matches(
|
||||
lang_word_list.has_prefix,
|
||||
lang_word_list.words.iter().map(|lang_word| ascii(lang_word)),
|
||||
&ascii(word),
|
||||
)
|
||||
} else {
|
||||
check_if_matches(lang_word_list.has_prefix, lang_word_list.words.iter(), word)
|
||||
}) else {
|
||||
Err(PolyseedError::InvalidSeed)?
|
||||
};
|
||||
|
||||
// WordList asserts the word list length is less than u16::MAX
|
||||
poly[i] = u16::try_from(coeff).expect("coeff exceeded u16");
|
||||
}
|
||||
|
||||
// xor out the coin
|
||||
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
|
||||
|
||||
// Validate the checksum
|
||||
if poly_eval(&poly) != 0 {
|
||||
Err(PolyseedError::InvalidChecksum)?;
|
||||
}
|
||||
|
||||
// Convert the polynomial into entropy
|
||||
let mut entropy = Zeroizing::new([0; 32]);
|
||||
|
||||
let mut extra = 0;
|
||||
|
||||
let mut entropy_idx = 0;
|
||||
let mut entropy_bits = 0;
|
||||
|
||||
let checksum = poly[0];
|
||||
for mut word_val in poly.into_iter().skip(POLY_NUM_CHECK_DIGITS) {
|
||||
// Parse the bottom bit, which is one of the bits of extra
|
||||
// This iterates for less than 16 iters, meaning this won't drop any bits
|
||||
extra <<= 1;
|
||||
extra |= word_val & 1;
|
||||
word_val >>= 1;
|
||||
|
||||
// 10 bits per word creates a [8, 2], [6, 4], [4, 6], [2, 8] cycle
|
||||
// 15 % 4 is 3, leaving 2 bits off, and 152 (19 * 8) - 2 is 150, the amount of bits in the
|
||||
// secret
|
||||
let mut word_bits = GF_BITS - 1;
|
||||
while word_bits > 0 {
|
||||
if entropy_bits == BITS_PER_BYTE {
|
||||
entropy_idx += 1;
|
||||
entropy_bits = 0;
|
||||
}
|
||||
let chunk_bits = word_bits.min(BITS_PER_BYTE - entropy_bits);
|
||||
word_bits -= chunk_bits;
|
||||
let chunk_mask = (1u16 << chunk_bits) - 1;
|
||||
if chunk_bits < BITS_PER_BYTE {
|
||||
entropy[entropy_idx] <<= chunk_bits;
|
||||
}
|
||||
entropy[entropy_idx] |=
|
||||
u8::try_from((word_val >> word_bits) & chunk_mask).expect("chunk exceeded u8");
|
||||
entropy_bits += chunk_bits;
|
||||
}
|
||||
}
|
||||
|
||||
let birthday = extra & DATE_MASK;
|
||||
// extra is contained to u16, and DATE_BITS > 8
|
||||
let features =
|
||||
u8::try_from(extra >> DATE_BITS).expect("couldn't convert extra >> DATE_BITS to u8");
|
||||
|
||||
let res = Self::from_internal(lang, features, birthday, entropy);
|
||||
if let Ok(res) = res.as_ref() {
|
||||
debug_assert_eq!(res.checksum, checksum);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// When this seed was created, defined in seconds since the epoch.
|
||||
pub fn birthday(&self) -> u64 {
|
||||
birthday_decode(self.birthday)
|
||||
}
|
||||
|
||||
/// This seed's features.
|
||||
pub fn features(&self) -> u8 {
|
||||
self.features
|
||||
}
|
||||
|
||||
/// This seed's entropy.
|
||||
pub fn entropy(&self) -> &Zeroizing<[u8; 32]> {
|
||||
&self.entropy
|
||||
}
|
||||
|
||||
/// The key derived from this seed.
|
||||
pub fn key(&self) -> Zeroizing<[u8; 32]> {
|
||||
let mut key = Zeroizing::new([0; 32]);
|
||||
pbkdf2_hmac::<Sha3_256>(
|
||||
self.entropy.as_slice(),
|
||||
POLYSEED_SALT,
|
||||
POLYSEED_KEYGEN_ITERATIONS,
|
||||
key.as_mut(),
|
||||
);
|
||||
key
|
||||
}
|
||||
|
||||
/// The String representation of this seed.
|
||||
pub fn to_string(&self) -> Zeroizing<String> {
|
||||
// Encode the polynomial with the existing checksum
|
||||
let mut poly = self.to_poly();
|
||||
poly[0] = self.checksum;
|
||||
|
||||
// Embed the coin
|
||||
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
|
||||
|
||||
// Output words
|
||||
let mut seed = Zeroizing::new(String::new());
|
||||
let words = &LANGUAGES[&self.language].words;
|
||||
for i in 0 .. poly.len() {
|
||||
seed.push_str(words[usize::from(poly[i])]);
|
||||
if i < poly.len() - 1 {
|
||||
seed.push(' ');
|
||||
}
|
||||
}
|
||||
|
||||
seed
|
||||
}
|
||||
}
|
||||
@@ -1,218 +0,0 @@
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[test]
|
||||
fn test_polyseed() {
|
||||
struct Vector {
|
||||
language: Language,
|
||||
seed: String,
|
||||
entropy: String,
|
||||
birthday: u64,
|
||||
has_prefix: bool,
|
||||
has_accent: bool,
|
||||
}
|
||||
|
||||
let vectors = [
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "raven tail swear infant grief assist regular lamp \
|
||||
duck valid someone little harsh puppy airport language"
|
||||
.into(),
|
||||
entropy: "dd76e7359a0ded37cd0ff0f3c829a5ae01673300000000000000000000000000".into(),
|
||||
birthday: 1638446400,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "eje fin parte célebre tabú pestaña lienzo puma \
|
||||
prisión hora regalo lengua existir lápiz lote sonoro"
|
||||
.into(),
|
||||
entropy: "5a2b02df7db21fcbe6ec6df137d54c7b20fd2b00000000000000000000000000".into(),
|
||||
birthday: 3118651200,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
Vector {
|
||||
language: Language::French,
|
||||
seed: "valable arracher décaler jeudi amusant dresser mener épaissir risible \
|
||||
prouesse réserve ampleur ajuster muter caméra enchère"
|
||||
.into(),
|
||||
entropy: "11cfd870324b26657342c37360c424a14a050b00000000000000000000000000".into(),
|
||||
birthday: 1679314966,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Italian,
|
||||
seed: "caduco midollo copione meninge isotopo illogico riflesso tartaruga fermento \
|
||||
olandese normale tristezza episodio voragine forbito achille"
|
||||
.into(),
|
||||
entropy: "7ecc57c9b4652d4e31428f62bec91cfd55500600000000000000000000000000".into(),
|
||||
birthday: 1679316358,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Portuguese,
|
||||
seed: "caverna custear azedo adeus senador apertada sedoso omitir \
|
||||
sujeito aurora videira molho cartaz gesso dentista tapar"
|
||||
.into(),
|
||||
entropy: "45473063711376cae38f1b3eba18c874124e1d00000000000000000000000000".into(),
|
||||
birthday: 1679316657,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Czech,
|
||||
seed: "usmrtit nora dotaz komunita zavalit funkce mzda sotva akce \
|
||||
vesta kabel herna stodola uvolnit ustrnout email"
|
||||
.into(),
|
||||
entropy: "7ac8a4efd62d9c3c4c02e350d32326df37821c00000000000000000000000000".into(),
|
||||
birthday: 1679316898,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Korean,
|
||||
seed: "전망 선풍기 국제 무궁화 설사 기름 이론적 해안 절망 예선 \
|
||||
지우개 보관 절망 말기 시각 귀신"
|
||||
.into(),
|
||||
entropy: "684663fda420298f42ed94b2c512ed38ddf12b00000000000000000000000000".into(),
|
||||
birthday: 1679317073,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::Japanese,
|
||||
seed: "うちあわせ ちつじょ つごう しはい けんこう とおる てみやげ はんとし たんとう \
|
||||
といれ おさない おさえる むかう ぬぐう なふだ せまる"
|
||||
.into(),
|
||||
entropy: "94e6665518a6286c6e3ba508a2279eb62b771f00000000000000000000000000".into(),
|
||||
birthday: 1679318722,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::ChineseTraditional,
|
||||
seed: "亂 挖 斤 柄 代 圈 枝 轄 魯 論 函 開 勘 番 榮 壁".into(),
|
||||
entropy: "b1594f585987ab0fd5a31da1f0d377dae5283f00000000000000000000000000".into(),
|
||||
birthday: 1679426433,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: Language::ChineseSimplified,
|
||||
seed: "啊 百 族 府 票 划 伪 仓 叶 虾 借 溜 晨 左 等 鬼".into(),
|
||||
entropy: "21cdd366f337b89b8d1bc1df9fe73047c22b0300000000000000000000000000".into(),
|
||||
birthday: 1679426817,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
// The following seed requires the language specification in order to calculate
|
||||
// a single valid checksum
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "impo sort usua cabi venu nobl oliv clim \
|
||||
cont barr marc auto prod vaca torn fati"
|
||||
.into(),
|
||||
entropy: "dbfce25fe09b68a340e01c62417eeef43ad51800000000000000000000000000".into(),
|
||||
birthday: 1701511650,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
];
|
||||
|
||||
for vector in vectors {
|
||||
let add_whitespace = |mut seed: String| {
|
||||
seed.push(' ');
|
||||
seed
|
||||
};
|
||||
|
||||
let seed_without_accents = |seed: &str| {
|
||||
seed
|
||||
.split_whitespace()
|
||||
.map(|w| w.chars().filter(char::is_ascii).collect::<String>())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
let trim_seed = |seed: &str| {
|
||||
let seed_to_trim =
|
||||
if vector.has_accent { seed_without_accents(seed) } else { seed.to_string() };
|
||||
seed_to_trim
|
||||
.split_whitespace()
|
||||
.map(|w| {
|
||||
let mut ascii = 0;
|
||||
let mut to_take = w.len();
|
||||
for (i, char) in w.chars().enumerate() {
|
||||
if char.is_ascii() {
|
||||
ascii += 1;
|
||||
}
|
||||
if ascii == PREFIX_LEN {
|
||||
// +1 to include this character, which put us at the prefix length
|
||||
to_take = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
w.chars().take(to_take).collect::<String>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
// String -> Seed
|
||||
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
|
||||
let seed = Polyseed::from_string(vector.language, Zeroizing::new(vector.seed.clone())).unwrap();
|
||||
let trim = trim_seed(&vector.seed);
|
||||
let add_whitespace = add_whitespace(vector.seed.clone());
|
||||
let seed_without_accents = seed_without_accents(&vector.seed);
|
||||
|
||||
// Make sure a version with added whitespace still works
|
||||
let whitespaced_seed =
|
||||
Polyseed::from_string(vector.language, Zeroizing::new(add_whitespace)).unwrap();
|
||||
assert_eq!(seed, whitespaced_seed);
|
||||
// Check trimmed versions works
|
||||
if vector.has_prefix {
|
||||
let trimmed_seed = Polyseed::from_string(vector.language, Zeroizing::new(trim)).unwrap();
|
||||
assert_eq!(seed, trimmed_seed);
|
||||
}
|
||||
// Check versions without accents work
|
||||
if vector.has_accent {
|
||||
let seed_without_accents =
|
||||
Polyseed::from_string(vector.language, Zeroizing::new(seed_without_accents)).unwrap();
|
||||
assert_eq!(seed, seed_without_accents);
|
||||
}
|
||||
|
||||
let entropy = Zeroizing::new(hex::decode(vector.entropy).unwrap().try_into().unwrap());
|
||||
assert_eq!(*seed.entropy(), entropy);
|
||||
assert!(seed.birthday().abs_diff(vector.birthday) < TIME_STEP);
|
||||
|
||||
// Entropy -> Seed
|
||||
let from_entropy = Polyseed::from(vector.language, 0, seed.birthday(), entropy).unwrap();
|
||||
assert_eq!(seed.to_string(), from_entropy.to_string());
|
||||
|
||||
// Check against ourselves
|
||||
{
|
||||
let seed = Polyseed::new(&mut OsRng, vector.language);
|
||||
println!("{}. seed: {}", line!(), *seed.to_string());
|
||||
assert_eq!(seed, Polyseed::from_string(vector.language, seed.to_string()).unwrap());
|
||||
assert_eq!(
|
||||
seed,
|
||||
Polyseed::from(vector.language, 0, seed.birthday(), seed.entropy().clone(),).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_polyseed() {
|
||||
// This seed includes unsupported features bits and should error on decode
|
||||
let seed = "include domain claim resemble urban hire lunch bird \
|
||||
crucial fire best wife ring warm ignore model"
|
||||
.into();
|
||||
let res = Polyseed::from_string(Language::English, Zeroizing::new(seed));
|
||||
assert_eq!(res, Err(PolyseedError::UnsupportedFeatures));
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,41 +0,0 @@
|
||||
[package]
|
||||
name = "monero-seed"
|
||||
version = "0.1.0"
|
||||
description = "Rust implementation of Monero's seed algorithm"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet/seed"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
thiserror = { version = "1", default-features = false, optional = true }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
monero-primitives = { path = "../../primitives", default-features = false, features = ["std"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror",
|
||||
|
||||
"zeroize/std",
|
||||
"rand_core/std",
|
||||
]
|
||||
default = ["std"]
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2024 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,11 +0,0 @@
|
||||
# Monero Seeds
|
||||
|
||||
Rust implementation of Monero's seed algorithm.
|
||||
|
||||
This library is usable under no-std when the `std` feature (on by default) is
|
||||
disabled.
|
||||
|
||||
### Cargo Features
|
||||
|
||||
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||
implementations).
|
||||
@@ -1,353 +0,0 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use core::{ops::Deref, fmt};
|
||||
use std_shims::{
|
||||
sync::LazyLock,
|
||||
vec,
|
||||
vec::Vec,
|
||||
string::{String, ToString},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// The amount of words in a seed without a checksum.
|
||||
const SEED_LENGTH: usize = 24;
|
||||
// The amount of words in a seed with a checksum.
|
||||
const SEED_LENGTH_WITH_CHECKSUM: usize = 25;
|
||||
|
||||
/// An error when working with a seed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||
pub enum SeedError {
|
||||
#[cfg_attr(feature = "std", error("invalid seed"))]
|
||||
/// The seed was invalid.
|
||||
InvalidSeed,
|
||||
/// The checksum did not match the data.
|
||||
#[cfg_attr(feature = "std", error("invalid checksum"))]
|
||||
InvalidChecksum,
|
||||
/// The deprecated English language option was used with a checksum.
|
||||
///
|
||||
/// The deprecated English language option did not include a checksum.
|
||||
#[cfg_attr(feature = "std", error("deprecated English language option included a checksum"))]
|
||||
DeprecatedEnglishWithChecksum,
|
||||
}
|
||||
|
||||
/// Language options.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Zeroize)]
|
||||
pub enum Language {
|
||||
/// Chinese language option.
|
||||
Chinese,
|
||||
/// English language option.
|
||||
English,
|
||||
/// Dutch language option.
|
||||
Dutch,
|
||||
/// French language option.
|
||||
French,
|
||||
/// Spanish language option.
|
||||
Spanish,
|
||||
/// German language option.
|
||||
German,
|
||||
/// Italian language option.
|
||||
Italian,
|
||||
/// Portuguese language option.
|
||||
Portuguese,
|
||||
/// Japanese language option.
|
||||
Japanese,
|
||||
/// Russian language option.
|
||||
Russian,
|
||||
/// Esperanto language option.
|
||||
Esperanto,
|
||||
/// Lojban language option.
|
||||
Lojban,
|
||||
/// The original, and deprecated, English language.
|
||||
DeprecatedEnglish,
|
||||
}
|
||||
|
||||
fn trim(word: &str, len: usize) -> Zeroizing<String> {
|
||||
Zeroizing::new(word.chars().take(len).collect())
|
||||
}
|
||||
|
||||
struct WordList {
|
||||
word_list: &'static [&'static str],
|
||||
word_map: HashMap<&'static str, usize>,
|
||||
trimmed_word_map: HashMap<String, usize>,
|
||||
unique_prefix_length: usize,
|
||||
}
|
||||
|
||||
impl WordList {
|
||||
fn new(word_list: &'static [&'static str], prefix_length: usize) -> WordList {
|
||||
let mut lang = WordList {
|
||||
word_list,
|
||||
word_map: HashMap::new(),
|
||||
trimmed_word_map: HashMap::new(),
|
||||
unique_prefix_length: prefix_length,
|
||||
};
|
||||
|
||||
for (i, word) in lang.word_list.iter().enumerate() {
|
||||
lang.word_map.insert(word, i);
|
||||
lang.trimmed_word_map.insert(trim(word, lang.unique_prefix_length).deref().clone(), i);
|
||||
}
|
||||
|
||||
lang
|
||||
}
|
||||
}
|
||||
|
||||
static LANGUAGES: LazyLock<HashMap<Language, WordList>> = LazyLock::new(|| {
|
||||
HashMap::from([
|
||||
(Language::Chinese, WordList::new(include!("./words/zh.rs"), 1)),
|
||||
(Language::English, WordList::new(include!("./words/en.rs"), 3)),
|
||||
(Language::Dutch, WordList::new(include!("./words/nl.rs"), 4)),
|
||||
(Language::French, WordList::new(include!("./words/fr.rs"), 4)),
|
||||
(Language::Spanish, WordList::new(include!("./words/es.rs"), 4)),
|
||||
(Language::German, WordList::new(include!("./words/de.rs"), 4)),
|
||||
(Language::Italian, WordList::new(include!("./words/it.rs"), 4)),
|
||||
(Language::Portuguese, WordList::new(include!("./words/pt.rs"), 4)),
|
||||
(Language::Japanese, WordList::new(include!("./words/ja.rs"), 3)),
|
||||
(Language::Russian, WordList::new(include!("./words/ru.rs"), 4)),
|
||||
(Language::Esperanto, WordList::new(include!("./words/eo.rs"), 4)),
|
||||
(Language::Lojban, WordList::new(include!("./words/jbo.rs"), 4)),
|
||||
(Language::DeprecatedEnglish, WordList::new(include!("./words/ang.rs"), 4)),
|
||||
])
|
||||
});
|
||||
|
||||
fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
|
||||
let mut trimmed_words = Zeroizing::new(String::new());
|
||||
for w in words {
|
||||
*trimmed_words += &trim(w, lang.unique_prefix_length);
|
||||
}
|
||||
|
||||
const fn crc32_table() -> [u32; 256] {
|
||||
let poly = 0xedb88320u32;
|
||||
|
||||
let mut res = [0; 256];
|
||||
let mut i = 0;
|
||||
while i < 256 {
|
||||
let mut entry = i;
|
||||
let mut b = 0;
|
||||
while b < 8 {
|
||||
let trigger = entry & 1;
|
||||
entry >>= 1;
|
||||
if trigger == 1 {
|
||||
entry ^= poly;
|
||||
}
|
||||
b += 1;
|
||||
}
|
||||
res[i as usize] = entry;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
const CRC32_TABLE: [u32; 256] = crc32_table();
|
||||
|
||||
let trimmed_words = trimmed_words.as_bytes();
|
||||
let mut checksum = u32::MAX;
|
||||
for i in 0 .. trimmed_words.len() {
|
||||
checksum = CRC32_TABLE[usize::from(u8::try_from(checksum % 256).unwrap() ^ trimmed_words[i])] ^
|
||||
(checksum >> 8);
|
||||
}
|
||||
|
||||
usize::try_from(!checksum).unwrap() % words.len()
|
||||
}
|
||||
|
||||
// Convert a private key to a seed
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> Seed {
|
||||
let bytes = Zeroizing::new(key.to_bytes());
|
||||
|
||||
// get the language words
|
||||
let words = &LANGUAGES[&lang].word_list;
|
||||
let list_len = u64::try_from(words.len()).unwrap();
|
||||
|
||||
// To store the found words & add the checksum word later.
|
||||
let mut seed = Vec::with_capacity(25);
|
||||
|
||||
// convert to words
|
||||
// 4 bytes -> 3 words. 8 digits base 16 -> 3 digits base 1626
|
||||
let mut segment = [0; 4];
|
||||
let mut indices = [0; 4];
|
||||
for i in 0 .. 8 {
|
||||
// convert first 4 byte to u32 & get the word indices
|
||||
let start = i * 4;
|
||||
// convert 4 byte to u32
|
||||
segment.copy_from_slice(&bytes[start .. (start + 4)]);
|
||||
// Actually convert to a u64 so we can add without overflowing
|
||||
indices[0] = u64::from(u32::from_le_bytes(segment));
|
||||
indices[1] = indices[0];
|
||||
indices[0] /= list_len;
|
||||
indices[2] = indices[0] + indices[1];
|
||||
indices[0] /= list_len;
|
||||
indices[3] = indices[0] + indices[2];
|
||||
|
||||
// append words to seed
|
||||
for i in indices.iter().skip(1) {
|
||||
let word = usize::try_from(i % list_len).unwrap();
|
||||
seed.push(Zeroizing::new(words[word].to_string()));
|
||||
}
|
||||
}
|
||||
segment.zeroize();
|
||||
indices.zeroize();
|
||||
|
||||
// create a checksum word for all languages except old english
|
||||
if lang != Language::DeprecatedEnglish {
|
||||
let checksum = seed[checksum_index(&seed, &LANGUAGES[&lang])].clone();
|
||||
seed.push(checksum);
|
||||
}
|
||||
|
||||
let mut res = Zeroizing::new(String::new());
|
||||
for (i, word) in seed.iter().enumerate() {
|
||||
if i != 0 {
|
||||
*res += " ";
|
||||
}
|
||||
*res += word;
|
||||
}
|
||||
Seed(lang, res)
|
||||
}
|
||||
|
||||
// Convert a seed to bytes
|
||||
fn seed_to_bytes(lang: Language, words: &str) -> Result<Zeroizing<[u8; 32]>, SeedError> {
|
||||
// get seed words
|
||||
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
|
||||
if (words.len() != SEED_LENGTH) && (words.len() != SEED_LENGTH_WITH_CHECKSUM) {
|
||||
panic!("invalid seed passed to seed_to_bytes");
|
||||
}
|
||||
|
||||
let has_checksum = words.len() == SEED_LENGTH_WITH_CHECKSUM;
|
||||
if has_checksum && lang == Language::DeprecatedEnglish {
|
||||
Err(SeedError::DeprecatedEnglishWithChecksum)?;
|
||||
}
|
||||
|
||||
// Validate words are in the language word list
|
||||
let lang_word_list: &WordList = &LANGUAGES[&lang];
|
||||
let matched_indices = (|| {
|
||||
let has_checksum = words.len() == SEED_LENGTH_WITH_CHECKSUM;
|
||||
let mut matched_indices = Zeroizing::new(vec![]);
|
||||
|
||||
// Iterate through all the words and see if they're all present
|
||||
for word in &words {
|
||||
let trimmed = trim(word, lang_word_list.unique_prefix_length);
|
||||
let word = if has_checksum { &trimmed } else { word };
|
||||
|
||||
if let Some(index) = if has_checksum {
|
||||
lang_word_list.trimmed_word_map.get(word.deref())
|
||||
} else {
|
||||
lang_word_list.word_map.get(&word.as_str())
|
||||
} {
|
||||
matched_indices.push(*index);
|
||||
} else {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
}
|
||||
|
||||
if has_checksum {
|
||||
// exclude the last word when calculating a checksum.
|
||||
let last_word = words.last().unwrap().clone();
|
||||
let checksum = words[checksum_index(&words[.. words.len() - 1], lang_word_list)].clone();
|
||||
|
||||
// check the trimmed checksum and trimmed last word line up
|
||||
if trim(&checksum, lang_word_list.unique_prefix_length) !=
|
||||
trim(&last_word, lang_word_list.unique_prefix_length)
|
||||
{
|
||||
Err(SeedError::InvalidChecksum)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(matched_indices)
|
||||
})()?;
|
||||
|
||||
// convert to bytes
|
||||
let mut res = Zeroizing::new([0; 32]);
|
||||
let mut indices = Zeroizing::new([0; 4]);
|
||||
for i in 0 .. 8 {
|
||||
// read 3 indices at a time
|
||||
let i3 = i * 3;
|
||||
indices[1] = matched_indices[i3];
|
||||
indices[2] = matched_indices[i3 + 1];
|
||||
indices[3] = matched_indices[i3 + 2];
|
||||
|
||||
let inner = |i| {
|
||||
let mut base = (lang_word_list.word_list.len() - indices[i] + indices[i + 1]) %
|
||||
lang_word_list.word_list.len();
|
||||
// Shift the index over
|
||||
for _ in 0 .. i {
|
||||
base *= lang_word_list.word_list.len();
|
||||
}
|
||||
base
|
||||
};
|
||||
// set the last index
|
||||
indices[0] = indices[1] + inner(1) + inner(2);
|
||||
if (indices[0] % lang_word_list.word_list.len()) != indices[1] {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
|
||||
let pos = i * 4;
|
||||
let mut bytes = u32::try_from(indices[0]).unwrap().to_le_bytes();
|
||||
res[pos .. (pos + 4)].copy_from_slice(&bytes);
|
||||
bytes.zeroize();
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// A Monero seed.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub struct Seed(Language, Zeroizing<String>);
|
||||
|
||||
impl fmt::Debug for Seed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Seed").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl Seed {
|
||||
/// Create a new seed.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Seed {
|
||||
let mut scalar_bytes = Zeroizing::new([0; 64]);
|
||||
rng.fill_bytes(scalar_bytes.as_mut());
|
||||
key_to_seed(lang, Zeroizing::new(Scalar::from_bytes_mod_order_wide(scalar_bytes.deref())))
|
||||
}
|
||||
|
||||
/// Parse a seed from a string.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn from_string(lang: Language, words: Zeroizing<String>) -> Result<Seed, SeedError> {
|
||||
let entropy = seed_to_bytes(lang, &words)?;
|
||||
|
||||
// Make sure this is a valid scalar
|
||||
let scalar = Scalar::from_canonical_bytes(*entropy);
|
||||
if scalar.is_none().into() {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
let mut scalar = scalar.unwrap();
|
||||
scalar.zeroize();
|
||||
|
||||
// Call from_entropy so a trimmed seed becomes a full seed
|
||||
Ok(Self::from_entropy(lang, entropy).unwrap())
|
||||
}
|
||||
|
||||
/// Create a seed from entropy.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Seed> {
|
||||
Option::from(Scalar::from_canonical_bytes(*entropy))
|
||||
.map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
|
||||
}
|
||||
|
||||
/// Convert a seed to a string.
|
||||
pub fn to_string(&self) -> Zeroizing<String> {
|
||||
self.1.clone()
|
||||
}
|
||||
|
||||
/// Return the entropy underlying this seed.
|
||||
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
||||
seed_to_bytes(self.0, &self.1).unwrap()
|
||||
}
|
||||
}
|
||||
@@ -1,234 +0,0 @@
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
use monero_primitives::keccak256;
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[test]
|
||||
fn test_original_seed() {
|
||||
struct Vector {
|
||||
language: Language,
|
||||
seed: String,
|
||||
spend: String,
|
||||
view: String,
|
||||
}
|
||||
|
||||
let vectors = [
|
||||
Vector {
|
||||
language: Language::Chinese,
|
||||
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
||||
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
||||
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
||||
abnormal memoir nylon mostly building shrugged online ember northern \
|
||||
ruby woes dauntless boil family illness inroads northern"
|
||||
.into(),
|
||||
spend: "c0af65c0dd837e666b9d0dfed62745f4df35aed7ea619b2798a709f0fe545403".into(),
|
||||
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Dutch,
|
||||
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
||||
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
||||
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
||||
.into(),
|
||||
spend: "e2d2873085c447c2bc7664222ac8f7d240df3aeac137f5ff2022eaa629e5b10a".into(),
|
||||
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::French,
|
||||
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
||||
sensuel ennui pacte osselet poudre battre alibi mouton \
|
||||
stade paquet pliage gibier type question position projet pliage"
|
||||
.into(),
|
||||
spend: "2dd39ff1a4628a94b5c2ec3e42fb3dfe15c2b2f010154dc3b3de6791e805b904".into(),
|
||||
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
||||
opaco disco ancla litio clase cuello nasal clase \
|
||||
fiar avance deseo mente grumo negro cordón croqueta clase"
|
||||
.into(),
|
||||
spend: "ae2c9bebdddac067d73ec0180147fc92bdf9ac7337f1bcafbbe57dd13558eb02".into(),
|
||||
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::German,
|
||||
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
||||
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
||||
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
||||
.into(),
|
||||
spend: "79801b7a1b9796856e2397d862a113862e1fdc289a205e79d8d70995b276db06".into(),
|
||||
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Italian,
|
||||
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
||||
forzare meritare litigare lezione segreto evasione votare buio \
|
||||
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
||||
.into(),
|
||||
spend: "5e7fd774eb00fa5877e2a8b4dc9c7ffe111008a3891220b56a6e49ac816d650a".into(),
|
||||
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Portuguese,
|
||||
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
||||
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
||||
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
||||
.into(),
|
||||
spend: "13b3115f37e35c6aa1db97428b897e584698670c1b27854568d678e729200c0f".into(),
|
||||
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Japanese,
|
||||
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
||||
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
||||
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
||||
.into(),
|
||||
spend: "c56e895cdb13007eda8399222974cdbab493640663804b93cbef3d8c3df80b0b".into(),
|
||||
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Russian,
|
||||
seed: "шатер икра нация ехать получать инерция доза реальный \
|
||||
рыжий таможня лопата душа веселый клетка атлас лекция \
|
||||
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
||||
.into(),
|
||||
spend: "7cb5492df5eb2db4c84af20766391cd3e3662ab1a241c70fc881f3d02c381f05".into(),
|
||||
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Esperanto,
|
||||
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
||||
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
||||
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
||||
.into(),
|
||||
spend: "82ebf0336d3b152701964ed41df6b6e9a035e57fc98b84039ed0bd4611c58904".into(),
|
||||
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Lojban,
|
||||
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
||||
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
||||
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
||||
.into(),
|
||||
spend: "e4f8c6819ab6cf792cebb858caabac9307fd646901d72123e0367ebc0a79c200".into(),
|
||||
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::DeprecatedEnglish,
|
||||
seed: "glorious especially puff son moment add youth nowhere \
|
||||
throw glide grip wrong rhythm consume very swear \
|
||||
bitter heavy eventually begin reason flirt type unable"
|
||||
.into(),
|
||||
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
|
||||
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
|
||||
},
|
||||
// The following seeds require the language specification in order to calculate
|
||||
// a single valid checksum
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "pluma laico atraer pintor peor cerca balde buscar \
|
||||
lancha batir nulo reloj resto gemelo nevera poder columna gol \
|
||||
oveja latir amplio bolero feliz fuerza nevera"
|
||||
.into(),
|
||||
spend: "30303983fc8d215dd020cc6b8223793318d55c466a86e4390954f373fdc7200a".into(),
|
||||
view: "97c649143f3c147ba59aa5506cc09c7992c5c219bb26964442142bf97980800e".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "pluma pluma pluma pluma pluma pluma pluma pluma \
|
||||
pluma pluma pluma pluma pluma pluma pluma pluma \
|
||||
pluma pluma pluma pluma pluma pluma pluma pluma pluma"
|
||||
.into(),
|
||||
spend: "b4050000b4050000b4050000b4050000b4050000b4050000b4050000b4050000".into(),
|
||||
view: "d73534f7912b395eb70ef911791a2814eb6df7ce56528eaaa83ff2b72d9f5e0f".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "plus plus plus plus plus plus plus plus \
|
||||
plus plus plus plus plus plus plus plus \
|
||||
plus plus plus plus plus plus plus plus plus"
|
||||
.into(),
|
||||
spend: "3b0400003b0400003b0400003b0400003b0400003b0400003b0400003b040000".into(),
|
||||
view: "43a8a7715eed11eff145a2024ddcc39740255156da7bbd736ee66a0838053a02".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
seed: "audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio audio"
|
||||
.into(),
|
||||
spend: "ba000000ba000000ba000000ba000000ba000000ba000000ba000000ba000000".into(),
|
||||
view: "1437256da2c85d029b293d8c6b1d625d9374969301869b12f37186e3f906c708".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
seed: "audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio \
|
||||
audio audio audio audio audio audio audio audio audio"
|
||||
.into(),
|
||||
spend: "7900000079000000790000007900000079000000790000007900000079000000".into(),
|
||||
view: "20bec797ab96780ae6a045dd816676ca7ed1d7c6773f7022d03ad234b581d600".into(),
|
||||
},
|
||||
];
|
||||
|
||||
for vector in vectors {
|
||||
fn trim_by_lang(word: &str, lang: Language) -> String {
|
||||
if lang != Language::DeprecatedEnglish {
|
||||
word.chars().take(LANGUAGES[&lang].unique_prefix_length).collect()
|
||||
} else {
|
||||
word.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
let trim_seed = |seed: &str| {
|
||||
seed
|
||||
.split_whitespace()
|
||||
.map(|word| trim_by_lang(word, vector.language))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
// Test against Monero
|
||||
{
|
||||
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
|
||||
let seed = Seed::from_string(vector.language, Zeroizing::new(vector.seed.clone())).unwrap();
|
||||
let trim = trim_seed(&vector.seed);
|
||||
assert_eq!(seed, Seed::from_string(vector.language, Zeroizing::new(trim)).unwrap());
|
||||
|
||||
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
||||
// For originalal seeds, Monero directly uses the entropy as a spend key
|
||||
assert_eq!(
|
||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(*seed.entropy())),
|
||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(spend)),
|
||||
);
|
||||
|
||||
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
||||
// Monero then derives the view key as H(spend)
|
||||
assert_eq!(
|
||||
Scalar::from_bytes_mod_order(keccak256(spend)),
|
||||
Scalar::from_canonical_bytes(view).unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
|
||||
}
|
||||
|
||||
// Test against ourselves
|
||||
{
|
||||
let seed = Seed::new(&mut OsRng, vector.language);
|
||||
println!("{}. seed: {}", line!(), *seed.to_string());
|
||||
let trim = trim_seed(&seed.to_string());
|
||||
assert_eq!(seed, Seed::from_string(vector.language, Zeroizing::new(trim)).unwrap());
|
||||
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
|
||||
assert_eq!(seed, Seed::from_string(vector.language, seed.to_string()).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user