57 Commits

Author SHA1 Message Date
Luke Parker
e3809b2ff1 Remove unnecessary edits to Docker config in an attempt to fix the CI 2025-08-12 01:27:28 -04:00
Luke Parker
fd2d8b4f0a Use Rust 1.89 when installing bins via cargo, version pin svm-rs
svm-rs just released a new version requiring 1.89 to compile. This process to
not install _any_ software with 1.85 to minimize how many toolchains we have in
use.
2025-08-12 01:27:28 -04:00
Luke Parker
bc81614894 Attempt Docker 24 again 2025-08-12 01:27:28 -04:00
Luke Parker
8df5aa2e2d Forward docker stderr to stdout in case stderr is being dropped for some reason 2025-08-12 01:27:28 -04:00
Luke Parker
b000740470 Docker 25 since 24 doesn't have an active tag anymore 2025-08-12 01:27:28 -04:00
Luke Parker
b9f554111d Attempt to use Docker 24
Long-shot premised on an old forum post on how downgrading to Docker 24 solved
their instance of the error we face, though our conditions for it are
presumably different.
2025-08-12 01:27:28 -04:00
Luke Parker
354c408e3e Stop using an older version of Docker 2025-08-12 01:27:28 -04:00
Luke Parker
df3b60376a Restore Debian 12 Bookworm over Debian 11 Bullseye 2025-08-12 01:27:28 -04:00
Luke Parker
8d209c652e Add missing "-4" arguments to wget 2025-08-12 01:27:28 -04:00
Luke Parker
9ddad794b4 Use wget -4 for the same reason as the prior commit 2025-08-12 01:27:28 -04:00
Luke Parker
b934e484cc Replace busybox wget with wget on alpine to attempt to resolve DNS issues
See https://github.com/alpinelinux/docker-alpine/issues/155.
2025-08-12 01:27:28 -04:00
Luke Parker
f8aee9b3c8 Add overflow-checks = true recommandation to monero-serai 2025-08-12 01:27:28 -04:00
Luke Parker
f51d77d26a Fix tweaked Substrate connection code in serai-client tests 2025-08-12 01:27:28 -04:00
Luke Parker
0780deb643 Use three separate commands within the Bitcoin Dockerfile to download the release
Attempts to debug which is failing, as right now, the command as a whole is within the CI.
2025-08-12 01:27:28 -04:00
Luke Parker
75c38560f4 Bookworm -> Bullseye, except for the runtime 2025-08-12 01:27:28 -04:00
Luke Parker
9f1c5268a5 Attempt downgrading Docker from 27 to 26 2025-08-12 01:27:28 -04:00
Luke Parker
35b113768b Attempt downgrading docker from .28 to .27 2025-08-12 01:27:28 -04:00
Luke Parker
f2595c4939 Tweak how subtrate-client tests waits to connect to the Monero node 2025-08-12 01:27:28 -04:00
Luke Parker
8fcfa6d3d5 Add dedicated error for when amounts aren't representable within a u64
Fixes the issue where _inputs_ could still overflow u64::MAX and cause a panic.
2025-08-12 01:27:28 -04:00
Luke Parker
54c9d19726 Have docker install set host 2025-08-12 01:27:28 -04:00
Luke Parker
25324c3cd5 Add uidmap dependency for rootless Docker 2025-08-12 01:27:28 -04:00
Luke Parker
ecb7df85b0 if: runner.os == 'Linux', with single quotes 2025-08-12 01:27:28 -04:00
Luke Parker
68c7acdbef Attempt using rootless Docker in CI via the setup-docker-action
Restores using ubuntu-latest.

Basically, at some point in the last year the existing Docker e2e tests started
failing. I'm unclear if this is an issue with the OS, the docker packages, or
what. This just tries to find a solution.
2025-08-12 01:27:28 -04:00
Luke Parker
8b60feed92 Normalize FROM AS casing in Dockerfiles 2025-08-12 01:27:28 -04:00
Luke Parker
5c895efcd0 Downgrade tests requiring Docker from Ubuntu latest to Ubuntu 22.04
Attempts to resolve containers immediately exiting for some specific test runs.
2025-08-12 01:27:28 -04:00
Luke Parker
60e55656aa deny --hide-inclusion-graph 2025-08-12 01:27:28 -04:00
Luke Parker
9536282418 Update which deb archive to use within the runtime Dockerfile 2025-08-12 01:27:28 -04:00
Luke Parker
8297d0679d Update substrate to one with a properly defined panic handler as of modern Rust 2025-08-12 01:27:28 -04:00
Luke Parker
d9f854b08a Attempt to fix install of clang within runtime Dockerfile 2025-08-12 01:27:28 -04:00
Luke Parker
8aaf7f7dc6 Remove (presumably) unnecessary command to explicitly install python 2025-08-12 01:27:28 -04:00
Luke Parker
ce447558ac Update Rust versions used in orchestration 2025-08-12 01:27:28 -04:00
Luke Parker
fc850da30e Missing --allow-remove-essential flag 2025-08-12 01:27:28 -04:00
Luke Parker
d6f6cf1965 Attempt to force remove shim-signed to resolve 'unmet dependencies' issues with shim-signed 2025-08-12 01:27:28 -04:00
Luke Parker
4438b51881 Expand python packages explicitly installed 2025-08-12 01:27:28 -04:00
Luke Parker
6ae0d9fad7 Install cargo deny with Rust 1.85 and pin its version 2025-08-12 01:27:28 -04:00
Luke Parker
ad08b410a8 Pin cargo-machete to 0.8.0 to prevent other unexpected CI failures 2025-08-12 01:27:28 -04:00
Luke Parker
ec3cfd3ab7 Explicitly install python3 after removing various unnecessary packages 2025-08-12 01:27:28 -04:00
Luke Parker
01eb2daa0b Updated dated version of actions/cache 2025-08-12 01:27:28 -04:00
Luke Parker
885000f970 Add update, upgrade, fix-missing call to Ubuntu build dependencies
Attempts to fix a CI failure for some misconfiguration...
2025-08-12 01:27:28 -04:00
Luke Parker
4be506414b Install cargo machete with Rust 1.85
cargo machete now uses Rust's 2024 edition, and 1.85 was the first to ship it.
2025-08-12 01:27:28 -04:00
Luke Parker
1143d84e1d Remove msbuild from packages to remove when the CI starts
Apparently, it's no longer installed by default.
2025-08-12 01:27:28 -04:00
Luke Parker
336922101f Further harden decoy selection
It risked panicking if a non-monotonic distribution was returned. While the
provided RPC code won't return non-monotonic distributions, users are allowed
to define their own implementations and override the provided method. Said
implementations could omit this required check.
2025-08-12 01:27:28 -04:00
Luke Parker
ffa033d978 Clarify transcripting for Clsag::verify, Mlsag::verify, as with Clsag::sign 2025-08-12 01:27:28 -04:00
Luke Parker
23f986f57a Tweak the Substrate runtime as required by the Rust version bump performed 2025-08-12 01:27:28 -04:00
Luke Parker
bb726b58af Fix #654 2025-08-12 01:27:28 -04:00
Luke Parker
387615705c Fix #643 2025-08-12 01:27:28 -04:00
Luke Parker
c7f825a192 Rename Bulletproof::calculate_bp_clawback to Bulletproof::calculate_clawback 2025-08-12 01:27:28 -04:00
Luke Parker
d363b1c173 Fix #630 2025-08-12 01:27:28 -04:00
Luke Parker
d5077ae966 Respond to 13.1.1.
Uses Zeroizing for username/password in monero-simple-request-rpc.
2025-08-12 01:27:28 -04:00
Luke Parker
188fcc3cb4 Remove potentially-failing unchecked arithmetic operations for ones which error
In response to 9.13.3.

Requires a bump to Rust 1.82 to take advantage of `Option::is_none_or`.
2025-08-12 01:27:28 -04:00
Luke Parker
cbab9486c6 Clarify messages in non-debug assertions 2025-08-12 01:27:28 -04:00
Luke Parker
a5f4c450c6 Response to usage of unwrap in non-test code
This commit replaces all usage of `unwrap` with `expect` within
`networks/monero`, clarifying why the panic risked is unreachable. This commit
also replaces some uses of `unwrap` with solutions which are guaranteed not to
fail.

Notably, compilation on 128-bit systems is prevented, ensuring
`u64::try_from(usize::MAX)` will never panic at runtime.

Slight breaking changes are additionally included as necessary to massage out
some avoidable panics.
2025-08-12 01:27:28 -04:00
Luke Parker
4f65a0b147 Remove Clone from ClsagMultisigMask{Sender, Receiver}
This had ill-defined properties on Clone, as a mask could be sent multiple times
(unintended) and multiple algorithms may receive the same mask from a singular
sender.

Requires removing the Clone bound within modular-frost and expanding the test
helpers accordingly.

This was not raised in the audit yet upon independent review.
2025-08-12 01:27:28 -04:00
Luke Parker
feb18d64a7 Respond to 2 3
We now use `FrostError::InternalError` instead of a panic to represent the mask
not being set.
2025-08-12 01:27:28 -04:00
Luke Parker
cb1e6535cb Respond to 2 2 2025-08-12 01:27:28 -04:00
Luke Parker
6b8cf6653a Respond to 1.1 A2 (also cited as 2 1)
`read_vec` was unbounded. It now accepts an optional bound. In some places, we
are able to define and provide a bound (Bulletproofs(+)' `L` and `R` vectors).
In others, we cannot (the amount of inputs within a transaction, which is not
subject to any rule in the current consensus other than the total transaction
size limit). Usage of `None` in those locations preserves the existing
behavior.
2025-08-12 01:27:28 -04:00
Luke Parker
b426bfcfe8 Respond to 1.1 A1 2025-08-12 01:27:28 -04:00
66 changed files with 852 additions and 489 deletions

View File

@@ -12,7 +12,7 @@ runs:
steps:
- name: Bitcoin Daemon Cache
id: cache-bitcoind
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
with:
path: bitcoin.tar.gz
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}

View File

@@ -7,13 +7,12 @@ runs:
- name: Remove unused packages
shell: bash
run: |
sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
sudo apt remove -y --allow-remove-essential -f shim-signed
# This command would fail, due to shim-signed having unmet dependencies, hence its removal
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
sudo apt autoremove -y
sudo apt clean
docker system prune -a --volumes
if: runner.os == 'Linux'
- name: Remove unused packages
@@ -41,9 +40,34 @@ runs:
- name: Install solc
shell: bash
run: |
cargo install svm-rs
cargo +1.89 install svm-rs --version =0.5.18
svm install 0.8.25
svm use 0.8.25
- name: Remove preinstalled Docker
shell: bash
run: |
docker system prune -a --volumes
sudo apt remove -y *docker*
# Install uidmap which will be required for the explicitly installed Docker
sudo apt install uidmap
if: runner.os == 'Linux'
- name: Update system dependencies
shell: bash
run: |
sudo apt update -y
sudo apt upgrade -y
sudo apt autoremove -y
sudo apt clean
if: runner.os == 'Linux'
- name: Install rootless Docker
uses: docker/setup-docker-action@b60f85385d03ac8acfca6d9996982511d8620a19
with:
rootless: true
set-host: true
if: runner.os == 'Linux'
# - name: Cache Rust
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43

View File

@@ -12,7 +12,7 @@ runs:
steps:
- name: Monero Wallet RPC Cache
id: cache-monero-wallet-rpc
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
with:
path: monero-wallet-rpc
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}

View File

@@ -12,7 +12,7 @@ runs:
steps:
- name: Monero Daemon Cache
id: cache-monerod
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
with:
path: /usr/bin/monerod
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}

View File

@@ -1 +1 @@
nightly-2024-07-01
nightly-2024-09-01

View File

@@ -12,13 +12,13 @@ jobs:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Advisory Cache
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
- name: Install cargo deny
run: cargo install --locked cargo-deny
run: cargo +1.89 install cargo-deny --version =0.18.3
- name: Run cargo deny
run: cargo deny -L error --all-features check
run: cargo deny -L error --all-features check --hide-inclusion-graph

View File

@@ -46,16 +46,16 @@ jobs:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Advisory Cache
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
- name: Install cargo deny
run: cargo install --locked cargo-deny
run: cargo +1.89 install cargo-deny --version =0.18.3
- name: Run cargo deny
run: cargo deny -L error --all-features check
run: cargo deny -L error --all-features check --hide-inclusion-graph
fmt:
runs-on: ubuntu-latest
@@ -79,5 +79,5 @@ jobs:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Verify all dependencies are in use
run: |
cargo install cargo-machete
cargo machete
cargo +1.89 install cargo-machete --version =0.8.0
cargo +1.89 machete

211
Cargo.lock generated
View File

@@ -2660,7 +2660,7 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "fork-tree"
version = "3.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
]
@@ -2683,7 +2683,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa"
[[package]]
name = "frame-benchmarking"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support",
"frame-support-procedural",
@@ -2708,7 +2708,7 @@ dependencies = [
[[package]]
name = "frame-executive"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support",
"frame-system",
@@ -2737,7 +2737,7 @@ dependencies = [
[[package]]
name = "frame-support"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"bitflags 1.3.2",
"environmental",
@@ -2770,7 +2770,7 @@ dependencies = [
[[package]]
name = "frame-support-procedural"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"Inflector",
"cfg-expr",
@@ -2788,7 +2788,7 @@ dependencies = [
[[package]]
name = "frame-support-procedural-tools"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support-procedural-tools-derive",
"proc-macro-crate 1.3.1",
@@ -2800,7 +2800,7 @@ dependencies = [
[[package]]
name = "frame-support-procedural-tools-derive"
version = "3.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"proc-macro2",
"quote",
@@ -2810,7 +2810,7 @@ dependencies = [
[[package]]
name = "frame-system"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"cfg-if",
"frame-support",
@@ -2829,7 +2829,7 @@ dependencies = [
[[package]]
name = "frame-system-rpc-runtime-api"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"sp-api",
@@ -2838,7 +2838,7 @@ dependencies = [
[[package]]
name = "frame-try-runtime"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support",
"parity-scale-codec",
@@ -5008,6 +5008,7 @@ dependencies = [
"monero-rpc",
"simple-request",
"tokio",
"zeroize",
]
[[package]]
@@ -5507,7 +5508,7 @@ dependencies = [
[[package]]
name = "pallet-authorship"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support",
"frame-system",
@@ -5521,7 +5522,7 @@ dependencies = [
[[package]]
name = "pallet-babe"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-benchmarking",
"frame-support",
@@ -5545,7 +5546,7 @@ dependencies = [
[[package]]
name = "pallet-grandpa"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-benchmarking",
"frame-support",
@@ -5568,7 +5569,7 @@ dependencies = [
[[package]]
name = "pallet-session"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support",
"frame-system",
@@ -5589,7 +5590,7 @@ dependencies = [
[[package]]
name = "pallet-timestamp"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-benchmarking",
"frame-support",
@@ -5607,7 +5608,7 @@ dependencies = [
[[package]]
name = "pallet-transaction-payment"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-support",
"frame-system",
@@ -5623,7 +5624,7 @@ dependencies = [
[[package]]
name = "pallet-transaction-payment-rpc"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"jsonrpsee",
"pallet-transaction-payment-rpc-runtime-api",
@@ -5639,7 +5640,7 @@ dependencies = [
[[package]]
name = "pallet-transaction-payment-rpc-runtime-api"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"pallet-transaction-payment",
"parity-scale-codec",
@@ -6825,7 +6826,7 @@ dependencies = [
[[package]]
name = "sc-allocator"
version = "4.1.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"log",
"sp-core",
@@ -6836,7 +6837,7 @@ dependencies = [
[[package]]
name = "sc-authority-discovery"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"futures",
@@ -6864,7 +6865,7 @@ dependencies = [
[[package]]
name = "sc-basic-authorship"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"futures",
"futures-timer",
@@ -6887,7 +6888,7 @@ dependencies = [
[[package]]
name = "sc-block-builder"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"sc-client-api",
@@ -6902,7 +6903,7 @@ dependencies = [
[[package]]
name = "sc-chain-spec"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"memmap2",
"sc-chain-spec-derive",
@@ -6921,7 +6922,7 @@ dependencies = [
[[package]]
name = "sc-chain-spec-derive"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"proc-macro-crate 1.3.1",
"proc-macro2",
@@ -6932,7 +6933,7 @@ dependencies = [
[[package]]
name = "sc-cli"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"chrono",
@@ -6971,7 +6972,7 @@ dependencies = [
[[package]]
name = "sc-client-api"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"fnv",
"futures",
@@ -6996,7 +6997,7 @@ dependencies = [
[[package]]
name = "sc-client-db"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"hash-db",
"kvdb",
@@ -7022,7 +7023,7 @@ dependencies = [
[[package]]
name = "sc-consensus"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"futures",
@@ -7047,7 +7048,7 @@ dependencies = [
[[package]]
name = "sc-consensus-babe"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"fork-tree",
@@ -7083,7 +7084,7 @@ dependencies = [
[[package]]
name = "sc-consensus-epochs"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"fork-tree",
"parity-scale-codec",
@@ -7096,7 +7097,7 @@ dependencies = [
[[package]]
name = "sc-consensus-grandpa"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"ahash",
"array-bytes",
@@ -7137,7 +7138,7 @@ dependencies = [
[[package]]
name = "sc-consensus-slots"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"futures",
@@ -7160,7 +7161,7 @@ dependencies = [
[[package]]
name = "sc-executor"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"parking_lot 0.12.3",
@@ -7182,7 +7183,7 @@ dependencies = [
[[package]]
name = "sc-executor-common"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"sc-allocator",
"sp-maybe-compressed-blob",
@@ -7194,7 +7195,7 @@ dependencies = [
[[package]]
name = "sc-executor-wasmtime"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"anyhow",
"cfg-if",
@@ -7211,7 +7212,7 @@ dependencies = [
[[package]]
name = "sc-informant"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"anstyle",
"futures",
@@ -7227,7 +7228,7 @@ dependencies = [
[[package]]
name = "sc-keystore"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"parking_lot 0.12.3",
@@ -7241,7 +7242,7 @@ dependencies = [
[[package]]
name = "sc-network"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"async-channel",
@@ -7283,7 +7284,7 @@ dependencies = [
[[package]]
name = "sc-network-bitswap"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-channel",
"cid",
@@ -7303,7 +7304,7 @@ dependencies = [
[[package]]
name = "sc-network-common"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"bitflags 1.3.2",
@@ -7320,7 +7321,7 @@ dependencies = [
[[package]]
name = "sc-network-gossip"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"ahash",
"futures",
@@ -7339,7 +7340,7 @@ dependencies = [
[[package]]
name = "sc-network-light"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"async-channel",
@@ -7360,7 +7361,7 @@ dependencies = [
[[package]]
name = "sc-network-sync"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"async-channel",
@@ -7394,7 +7395,7 @@ dependencies = [
[[package]]
name = "sc-network-transactions"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"futures",
@@ -7412,7 +7413,7 @@ dependencies = [
[[package]]
name = "sc-offchain"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"bytes",
"fnv",
@@ -7443,7 +7444,7 @@ dependencies = [
[[package]]
name = "sc-proposer-metrics"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"log",
"substrate-prometheus-endpoint",
@@ -7452,7 +7453,7 @@ dependencies = [
[[package]]
name = "sc-rpc"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"futures",
"jsonrpsee",
@@ -7482,7 +7483,7 @@ dependencies = [
[[package]]
name = "sc-rpc-api"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"jsonrpsee",
"parity-scale-codec",
@@ -7501,7 +7502,7 @@ dependencies = [
[[package]]
name = "sc-rpc-server"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"http 0.2.12",
"jsonrpsee",
@@ -7516,7 +7517,7 @@ dependencies = [
[[package]]
name = "sc-rpc-spec-v2"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"futures",
@@ -7542,7 +7543,7 @@ dependencies = [
[[package]]
name = "sc-service"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"directories",
@@ -7605,7 +7606,7 @@ dependencies = [
[[package]]
name = "sc-state-db"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"log",
"parity-scale-codec",
@@ -7616,7 +7617,7 @@ dependencies = [
[[package]]
name = "sc-sysinfo"
version = "6.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"futures",
"libc",
@@ -7635,7 +7636,7 @@ dependencies = [
[[package]]
name = "sc-telemetry"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"chrono",
"futures",
@@ -7654,7 +7655,7 @@ dependencies = [
[[package]]
name = "sc-tracing"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"anstyle",
"chrono",
@@ -7682,7 +7683,7 @@ dependencies = [
[[package]]
name = "sc-tracing-proc-macro"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"proc-macro-crate 1.3.1",
"proc-macro2",
@@ -7693,7 +7694,7 @@ dependencies = [
[[package]]
name = "sc-transaction-pool"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"futures",
@@ -7719,7 +7720,7 @@ dependencies = [
[[package]]
name = "sc-transaction-pool-api"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"futures",
@@ -7735,7 +7736,7 @@ dependencies = [
[[package]]
name = "sc-utils"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-channel",
"futures",
@@ -8936,7 +8937,7 @@ dependencies = [
[[package]]
name = "sp-api"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"hash-db",
"log",
@@ -8957,7 +8958,7 @@ dependencies = [
[[package]]
name = "sp-api-proc-macro"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"Inflector",
"blake2",
@@ -8971,7 +8972,7 @@ dependencies = [
[[package]]
name = "sp-application-crypto"
version = "23.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"scale-info",
@@ -8984,7 +8985,7 @@ dependencies = [
[[package]]
name = "sp-arithmetic"
version = "16.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"integer-sqrt",
"num-traits",
@@ -8998,7 +8999,7 @@ dependencies = [
[[package]]
name = "sp-authority-discovery"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"scale-info",
@@ -9010,7 +9011,7 @@ dependencies = [
[[package]]
name = "sp-block-builder"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"sp-api",
"sp-inherents",
@@ -9021,7 +9022,7 @@ dependencies = [
[[package]]
name = "sp-blockchain"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"futures",
"log",
@@ -9039,7 +9040,7 @@ dependencies = [
[[package]]
name = "sp-consensus"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"futures",
@@ -9053,7 +9054,7 @@ dependencies = [
[[package]]
name = "sp-consensus-babe"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"parity-scale-codec",
@@ -9072,7 +9073,7 @@ dependencies = [
[[package]]
name = "sp-consensus-grandpa"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"finality-grandpa",
"log",
@@ -9090,7 +9091,7 @@ dependencies = [
[[package]]
name = "sp-consensus-slots"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"scale-info",
@@ -9102,7 +9103,7 @@ dependencies = [
[[package]]
name = "sp-core"
version = "21.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"array-bytes",
"bitflags 1.3.2",
@@ -9145,7 +9146,7 @@ dependencies = [
[[package]]
name = "sp-core-hashing"
version = "9.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"blake2b_simd",
"byteorder",
@@ -9157,7 +9158,7 @@ dependencies = [
[[package]]
name = "sp-core-hashing-proc-macro"
version = "9.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"quote",
"sp-core-hashing",
@@ -9167,7 +9168,7 @@ dependencies = [
[[package]]
name = "sp-database"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"kvdb",
"parking_lot 0.12.3",
@@ -9176,7 +9177,7 @@ dependencies = [
[[package]]
name = "sp-debug-derive"
version = "8.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"proc-macro2",
"quote",
@@ -9186,7 +9187,7 @@ dependencies = [
[[package]]
name = "sp-externalities"
version = "0.19.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"environmental",
"parity-scale-codec",
@@ -9197,7 +9198,7 @@ dependencies = [
[[package]]
name = "sp-inherents"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"impl-trait-for-tuples",
@@ -9211,7 +9212,7 @@ dependencies = [
[[package]]
name = "sp-io"
version = "23.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"bytes",
"ed25519",
@@ -9233,7 +9234,7 @@ dependencies = [
[[package]]
name = "sp-keyring"
version = "24.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"lazy_static",
"sp-core",
@@ -9244,7 +9245,7 @@ dependencies = [
[[package]]
name = "sp-keystore"
version = "0.27.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"parking_lot 0.12.3",
@@ -9256,7 +9257,7 @@ dependencies = [
[[package]]
name = "sp-maybe-compressed-blob"
version = "4.1.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"thiserror",
"zstd 0.12.4",
@@ -9265,7 +9266,7 @@ dependencies = [
[[package]]
name = "sp-metadata-ir"
version = "0.1.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-metadata",
"parity-scale-codec",
@@ -9276,7 +9277,7 @@ dependencies = [
[[package]]
name = "sp-offchain"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"sp-api",
"sp-core",
@@ -9286,7 +9287,7 @@ dependencies = [
[[package]]
name = "sp-panic-handler"
version = "8.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"backtrace",
"lazy_static",
@@ -9296,7 +9297,7 @@ dependencies = [
[[package]]
name = "sp-rpc"
version = "6.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"rustc-hash 1.1.0",
"serde",
@@ -9306,7 +9307,7 @@ dependencies = [
[[package]]
name = "sp-runtime"
version = "24.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"either",
"hash256-std-hasher",
@@ -9328,7 +9329,7 @@ dependencies = [
[[package]]
name = "sp-runtime-interface"
version = "17.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"bytes",
"impl-trait-for-tuples",
@@ -9346,7 +9347,7 @@ dependencies = [
[[package]]
name = "sp-runtime-interface-proc-macro"
version = "11.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"Inflector",
"proc-macro-crate 1.3.1",
@@ -9358,7 +9359,7 @@ dependencies = [
[[package]]
name = "sp-session"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"scale-info",
@@ -9373,7 +9374,7 @@ dependencies = [
[[package]]
name = "sp-staking"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"impl-trait-for-tuples",
"parity-scale-codec",
@@ -9387,7 +9388,7 @@ dependencies = [
[[package]]
name = "sp-state-machine"
version = "0.28.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"hash-db",
"log",
@@ -9408,12 +9409,12 @@ dependencies = [
[[package]]
name = "sp-std"
version = "8.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
[[package]]
name = "sp-storage"
version = "13.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"impl-serde",
"parity-scale-codec",
@@ -9426,7 +9427,7 @@ dependencies = [
[[package]]
name = "sp-timestamp"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"async-trait",
"parity-scale-codec",
@@ -9439,7 +9440,7 @@ dependencies = [
[[package]]
name = "sp-tracing"
version = "10.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"sp-std",
@@ -9451,7 +9452,7 @@ dependencies = [
[[package]]
name = "sp-transaction-pool"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"sp-api",
"sp-runtime",
@@ -9460,7 +9461,7 @@ dependencies = [
[[package]]
name = "sp-trie"
version = "22.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"ahash",
"hash-db",
@@ -9483,7 +9484,7 @@ dependencies = [
[[package]]
name = "sp-version"
version = "22.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"impl-serde",
"parity-scale-codec",
@@ -9500,7 +9501,7 @@ dependencies = [
[[package]]
name = "sp-version-proc-macro"
version = "8.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"proc-macro2",
@@ -9511,7 +9512,7 @@ dependencies = [
[[package]]
name = "sp-wasm-interface"
version = "14.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"anyhow",
"impl-trait-for-tuples",
@@ -9524,7 +9525,7 @@ dependencies = [
[[package]]
name = "sp-weights"
version = "20.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"parity-scale-codec",
"scale-info",
@@ -9724,12 +9725,12 @@ dependencies = [
[[package]]
name = "substrate-build-script-utils"
version = "3.0.0"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
[[package]]
name = "substrate-frame-rpc-system"
version = "4.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"frame-system-rpc-runtime-api",
"futures",
@@ -9748,7 +9749,7 @@ dependencies = [
[[package]]
name = "substrate-prometheus-endpoint"
version = "0.10.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"hyper 0.14.30",
"log",
@@ -9760,7 +9761,7 @@ dependencies = [
[[package]]
name = "substrate-wasm-builder"
version = "5.0.0-dev"
source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46148aa8c7d737a"
source = "git+https://github.com/serai-dex/substrate#8587cd89bec74f789d4e23fdf776508a0ed5db6f"
dependencies = [
"anstyle",
"build-helper",

View File

@@ -25,7 +25,7 @@ pub trait Addendum: Send + Sync + Clone + PartialEq + Debug + WriteAddendum {}
impl<A: Send + Sync + Clone + PartialEq + Debug + WriteAddendum> Addendum for A {}
/// Algorithm trait usable by the FROST signing machine to produce signatures..
pub trait Algorithm<C: Curve>: Send + Sync + Clone {
pub trait Algorithm<C: Curve>: Send + Sync {
/// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible
/// transcript included in this crate.
type Transcript: Sync + Clone + Debug + Transcript;

View File

@@ -47,7 +47,7 @@ impl<T: Writable> Writable for Vec<T> {
}
// Pairing of an Algorithm with a ThresholdKeys instance.
#[derive(Clone, Zeroize)]
#[derive(Zeroize)]
struct Params<C: Curve, A: Algorithm<C>> {
// Skips the algorithm due to being too large a bound to feasibly enforce on users
#[zeroize(skip)]
@@ -193,7 +193,7 @@ impl<C: Curve> SignatureShare<C> {
/// Trait for the second machine of a two-round signing protocol.
pub trait SignMachine<S>: Send + Sync + Sized {
/// Params used to instantiate this machine which can be used to rebuild from a cache.
type Params: Clone;
type Params;
/// Keys used for signing operations.
type Keys;
/// Preprocess message for this machine.
@@ -397,7 +397,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
Ok((
AlgorithmSignatureMachine {
params: self.params.clone(),
params: self.params,
view,
B,
Rs,

View File

@@ -37,10 +37,10 @@ pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
}
/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm.
pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
pub fn algorithm_machines_without_clone<R: RngCore, C: Curve, A: Algorithm<C>>(
rng: &mut R,
algorithm: &A,
keys: &HashMap<Participant, ThresholdKeys<C>>,
machines: HashMap<Participant, AlgorithmMachine<C, A>>,
) -> HashMap<Participant, AlgorithmMachine<C, A>> {
let mut included = vec![];
while included.len() < usize::from(keys[&Participant::new(1).unwrap()].params().t()) {
@@ -54,18 +54,28 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
included.push(n);
}
keys
.iter()
.filter_map(|(i, keys)| {
if included.contains(i) {
Some((*i, AlgorithmMachine::new(algorithm.clone(), keys.clone())))
} else {
None
}
})
machines
.into_iter()
.filter_map(|(i, machine)| if included.contains(&i) { Some((i, machine)) } else { None })
.collect()
}
/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm.
pub fn algorithm_machines<R: RngCore, C: Curve, A: Clone + Algorithm<C>>(
rng: &mut R,
algorithm: &A,
keys: &HashMap<Participant, ThresholdKeys<C>>,
) -> HashMap<Participant, AlgorithmMachine<C, A>> {
algorithm_machines_without_clone(
rng,
keys,
keys
.values()
.map(|keys| (keys.params().i(), AlgorithmMachine::new(algorithm.clone(), keys.clone())))
.collect(),
)
}
// Run the preprocess step
pub(crate) fn preprocess<
R: RngCore + CryptoRng,
@@ -165,10 +175,10 @@ pub fn sign_without_caching<R: RngCore + CryptoRng, M: PreprocessMachine>(
/// Execute the signing protocol, randomly caching various machines to ensure they can cache
/// successfully.
pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
pub fn sign_without_clone<R: RngCore + CryptoRng, M: PreprocessMachine>(
rng: &mut R,
params: &<M::SignMachine as SignMachine<M::Signature>>::Params,
mut keys: HashMap<Participant, <M::SignMachine as SignMachine<M::Signature>>::Keys>,
mut params: HashMap<Participant, <M::SignMachine as SignMachine<M::Signature>>::Params>,
machines: HashMap<Participant, M>,
msg: &[u8],
) -> M::Signature {
@@ -183,7 +193,8 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
let cache = machines.remove(&i).unwrap().cache();
machines.insert(
i,
M::SignMachine::from_cache(params.clone(), keys.remove(&i).unwrap(), cache).0,
M::SignMachine::from_cache(params.remove(&i).unwrap(), keys.remove(&i).unwrap(), cache)
.0,
);
}
}
@@ -192,6 +203,22 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
)
}
/// Execute the signing protocol, randomly caching various machines to ensure they can cache
/// successfully.
pub fn sign<
R: RngCore + CryptoRng,
M: PreprocessMachine<SignMachine: SignMachine<M::Signature, Params: Clone>>,
>(
rng: &mut R,
params: &<M::SignMachine as SignMachine<M::Signature>>::Params,
keys: HashMap<Participant, <M::SignMachine as SignMachine<M::Signature>>::Keys>,
machines: HashMap<Participant, M>,
msg: &[u8],
) -> M::Signature {
let params = keys.keys().map(|i| (*i, params.clone())).collect();
sign_without_clone(rng, keys, params, machines, msg)
}
/// Test a basic Schnorr signature with the provided keys.
pub fn test_schnorr_with_keys<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
rng: &mut R,

View File

@@ -6,6 +6,9 @@ the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
Recommended usage of the library is with `overflow-checks = true`, even for
release builds.
### Wallet Functionality
monero-serai originally included wallet functionality. That has been moved to

View File

@@ -29,7 +29,11 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
let uv3 = u * v3;
let v7 = v3 * v3 * v;
let uv7 = u * v7;
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
uv3 *
uv7.pow(
(-FieldElement::from(5u8)) *
FieldElement::from(8u8).invert().expect("eight was coprime with the prime 2^{255}-19"),
)
};
let x = X.square() * x;
@@ -45,9 +49,23 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
#[allow(non_snake_case)]
let mut Y = z - w;
Y *= Z.invert().unwrap();
/*
If sign, `z = -486662`, else, `z = -486662 * v`
`w = v + 1`
We need `z + w \ne 0`, which would require `z \cong -w \mod 2^{255}-19`. This requires:
- If `sign`, `v \mod 2^{255}-19 \ne 486661`.
- If `!sign`, `(v + 1) \mod 2^{255}-19 \ne (v * 486662) \mod 2^{255}-19` which is equivalent to
`(v * 486661) \mod 2^{255}-19 \ne 1`.
In summary, if `sign`, `v` must not `486661`, and if `!sign`, `v` must not be the
multiplicative inverse of `486661`. Since `v` is the output of a hash function, this should
have negligible probability. Additionally, since the definition of `sign` is dependent on `v`,
it may be truly impossible to reach.
*/
Y *= Z.invert().expect("if sign, v was 486661. if !sign, v was 486661^{-1}");
let mut bytes = Y.to_repr();
bytes[31] |= sign.unwrap_u8() << 7;
decompress_point(bytes).unwrap().mul_by_cofactor()
decompress_point(bytes).expect("point from hash-to-curve wasn't on-curve").mul_by_cofactor()
}

View File

@@ -28,7 +28,7 @@ fn keccak256(data: &[u8]) -> [u8; 32] {
#[allow(non_snake_case)]
pub static H: LazyLock<EdwardsPoint> = LazyLock::new(|| {
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
.unwrap()
.expect("known on-curve point wasn't on-curve")
.mul_by_cofactor()
});
@@ -51,8 +51,6 @@ pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
pub const MAX_COMMITMENTS: usize = 16;
/// The amount of bits a value within a commitment may use.
pub const COMMITMENT_BITS: usize = 64;
/// The logarithm (over 2) of the amount of bits a value within a commitment may use.
pub const LOG_COMMITMENT_BITS: usize = 6; // 2 ** 6 == N
/// Container struct for Bulletproofs(+) generators.
#[allow(non_snake_case)]
@@ -80,11 +78,11 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
let i = 2 * i;
let mut even = preimage.clone();
write_varint(&i, &mut even).unwrap();
write_varint(&i, &mut even).expect("write failed but <Vec as io::Write> doesn't fail");
res.H.push(hash_to_point(keccak256(&even)));
let mut odd = preimage.clone();
write_varint(&(i + 1), &mut odd).unwrap();
write_varint(&(i + 1), &mut odd).expect("write failed but <Vec as io::Write> doesn't fail");
res.G.push(hash_to_point(keccak256(&odd)));
}
res

View File

@@ -18,10 +18,12 @@ use curve25519_dalek::{
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
mod sealed {
use core::fmt::Debug;
/// A trait for a number readable/writable as a VarInt.
///
/// This is sealed to prevent unintended implementations.
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
pub trait VarInt: TryInto<u64, Error: Debug> + TryFrom<u64, Error: Debug> + Copy {
const BITS: usize;
}
@@ -34,6 +36,10 @@ mod sealed {
impl VarInt for u64 {
const BITS: usize = 64;
}
// Don't compile for platforms where `usize` exceeds `u64`, preventing various possible runtime
// exceptions
const _NO_128_BIT_PLATFORMS: [(); (u64::BITS - usize::BITS) as usize] =
[(); (u64::BITS - usize::BITS) as usize];
impl VarInt for usize {
const BITS: usize = core::mem::size_of::<usize>() * 8;
}
@@ -43,8 +49,12 @@ mod sealed {
///
/// This function will panic if the VarInt exceeds u64::MAX.
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
let varint_u64: u64 = varint.try_into().expect("varint exceeded u64");
((usize::try_from(u64::BITS - varint_u64.leading_zeros())
.expect("64 > usize::MAX")
.saturating_sub(1)) /
7) +
1
}
/// Write a byte.
@@ -58,9 +68,10 @@ pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
///
/// This will panic if the VarInt exceeds u64::MAX.
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
let mut varint: u64 = (*varint).try_into().expect("varint exceeded u64");
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK))
.expect("& eight_bit_mask left more than 8 bits set");
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
@@ -210,10 +221,28 @@ pub fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: us
f: F,
r: &mut R,
) -> io::Result<[T; N]> {
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
read_raw_vec(f, N, r).map(|vec| {
vec.try_into().expect(
"read vector of specific length yet couldn't transform to an array of the same length",
)
})
}
/// Read a length-prefixed variable-length list of elements.
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?, r)
///
/// An optional bound on the length of the result may be provided. If `None`, the returned `Vec`
/// will be of the length read off the reader, if successfully read. If `Some(_)`, an error will be
/// raised if the length read off the read is greater than the bound.
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
length_bound: Option<usize>,
r: &mut R,
) -> io::Result<Vec<T>> {
let declared_length: usize = read_varint(r)?;
if let Some(length_bound) = length_bound {
if declared_length > length_bound {
Err(io::Error::other("vector exceeds bound on length"))?;
}
}
read_raw_vec(f, declared_length, r)
}

View File

@@ -73,7 +73,11 @@ pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar {
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
// not generate/verify a proof we believe to be valid when it isn't
assert!(scalar != Scalar::ZERO, "ZERO HASH: {:?}", data.as_ref());
assert!(
scalar != Scalar::ZERO,
"keccak256(preimage) \\cong 0 \\mod l! Preimage: {:?}",
data.as_ref()
);
scalar
}
@@ -124,7 +128,7 @@ impl Commitment {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 8);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -162,7 +166,14 @@ impl Decoys {
/// `offsets` are the positions of each ring member within the Monero blockchain, offset from the
/// prior member's position (with the initial ring member offset from 0).
pub fn new(offsets: Vec<u64>, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option<Self> {
if (offsets.len() != ring.len()) || (usize::from(signer_index) >= ring.len()) {
if (offsets.len() > usize::from(u8::MAX)) ||
(offsets.len() != ring.len()) ||
(usize::from(signer_index) >= ring.len())
{
None?;
}
// Check these offsets form representable positions
if offsets.iter().copied().try_fold(0, u64::checked_add).is_none() {
None?;
}
Some(Decoys { offsets, signer_index, ring })
@@ -213,7 +224,7 @@ impl Decoys {
pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> {
write_vec(write_varint, &self.offsets, w)?;
w.write_all(&[self.signer_index])?;
write_vec(
write_raw_vec(
|pair, w| {
write_point(&pair[0], w)?;
write_point(&pair[1], w)
@@ -230,7 +241,7 @@ impl Decoys {
pub fn serialize(&self) -> Vec<u8> {
let mut res =
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -239,10 +250,12 @@ impl Decoys {
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read(r: &mut impl io::Read) -> io::Result<Decoys> {
let offsets = read_vec(read_varint, None, r)?;
let len = offsets.len();
Decoys::new(
read_vec(read_varint, r)?,
offsets,
read_byte(r)?,
read_vec(|r| Ok([read_point(r)?, read_point(r)?]), r)?,
read_raw_vec(|r| Ok([read_point(r)?, read_point(r)?]), len, r)?,
)
.ok_or_else(|| io::Error::other("invalid Decoys"))
}

View File

@@ -8,7 +8,7 @@ fn recover_scalars() {
let stored = UnreducedScalar(hex::decode(stored).unwrap().try_into().unwrap());
let recovered =
Scalar::from_canonical_bytes(hex::decode(recovered).unwrap().try_into().unwrap()).unwrap();
assert_eq!(stored.recover_monero_slide_scalar(), recovered);
assert_eq!(stored.ref10_slide_scalar_vartime(), recovered);
};
// https://www.moneroinflation.com/static/data_py/report_scalars_df.pdf

View File

@@ -14,7 +14,8 @@ use monero_io::*;
static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| {
let mut precomputed_scalars = [Scalar::ONE; 8];
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
*scalar = Scalar::from(u8::try_from((i * 2) + 1).unwrap());
*scalar =
Scalar::from(u64::try_from((i * 2) + 1).expect("enumerating more than u64::MAX / 2 items"));
}
precomputed_scalars
});
@@ -54,12 +55,12 @@ impl UnreducedScalar {
// This matches Monero's `slide` function and intentionally gives incorrect outputs under
// certain conditions in order to match Monero.
//
// This function does not execute in constant time.
// This function does not execute in constant time and must only be used with public data.
fn non_adjacent_form(&self) -> [i8; 256] {
let bits = self.as_bits();
let mut naf = [0i8; 256];
for (b, bit) in bits.into_iter().enumerate() {
naf[b] = i8::try_from(bit).unwrap();
naf[b] = i8::try_from(bit).expect("bit didn't fit within an i8");
}
for i in 0 .. 256 {
@@ -107,15 +108,17 @@ impl UnreducedScalar {
naf
}
/// Recover the scalar that an array of bytes was incorrectly interpreted as by Monero's `slide`
/// function.
/// Recover the scalar that an array of bytes was incorrectly interpreted as by ref10's `slide`
/// function (as used by the reference Monero implementation in C++).
///
/// In Borromean range proofs, Monero was not checking that the scalars used were
/// reduced. This lead to the scalar stored being interpreted as a different scalar.
/// This function recovers that scalar.
/// For Borromean range proofs, Monero did not check the scalars used were reduced. This led to
/// some scalars serialized being interpreted as distinct scalars. This function recovers these
/// distinct scalars, as required to verify Borromean range proofs within the Monero protocol.
///
/// See <https://github.com/monero-project/monero/issues/8438> for more info.
pub fn recover_monero_slide_scalar(&self) -> Scalar {
//
/// This function does not execute in constant time and must only be used with public data.
pub fn ref10_slide_scalar_vartime(&self) -> Scalar {
if self.0[31] & 128 == 0 {
// Computing the w-NAF of a number can only give an output with 1 more bit than
// the number, so even if the number isn't reduced, the `slide` function will be
@@ -127,8 +130,13 @@ impl UnreducedScalar {
for &numb in self.non_adjacent_form().iter().rev() {
recovered += recovered;
match numb.cmp(&0) {
Ordering::Greater => recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).unwrap() / 2],
Ordering::Less => recovered -= PRECOMPUTED_SCALARS[usize::try_from(-numb).unwrap() / 2],
Ordering::Greater => {
recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).expect("positive i8 -> usize") / 2]
}
Ordering::Less => {
recovered -=
PRECOMPUTED_SCALARS[usize::try_from(-numb).expect("negated negative i8 -> usize") / 2]
}
Ordering::Equal => (),
}
}

View File

@@ -56,13 +56,13 @@ impl BorromeanSignatures {
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
&self.ee,
&keys_a[i],
&self.s0[i].recover_monero_slide_scalar(),
&self.s0[i].ref10_slide_scalar_vartime(),
);
#[allow(non_snake_case)]
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
&keccak256_to_scalar(LL.compress().as_bytes()),
&keys_b[i],
&self.s1[i].recover_monero_slide_scalar(),
&self.s1[i].ref10_slide_scalar_vartime(),
);
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
}

View File

@@ -16,8 +16,10 @@ fn generators(prefix: &'static str, path: &str) {
generators_string.extend(
format!(
"
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(),
",
curve25519_dalek::edwards::CompressedEdwardsY({:?})
.decompress()
.expect(\"generator from build script wasn't on-curve\"),
",
generator.compress().to_bytes()
)
.chars(),
@@ -33,10 +35,10 @@ fn generators(prefix: &'static str, path: &str) {
let mut H_str = String::new();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.expect("failed to create file in $OUT_DIR")
.write_all(
format!(
"
@@ -52,15 +54,15 @@ fn generators(prefix: &'static str, path: &str) {
)
.as_bytes(),
)
.unwrap();
.expect("couldn't write generated source code to file on disk");
}
#[cfg(not(feature = "compile-time-generators"))]
fn generators(prefix: &'static str, path: &str) {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.expect("failed to create file in $OUT_DIR")
.write_all(
format!(
r#"
@@ -71,7 +73,7 @@ fn generators(prefix: &'static str, path: &str) {
)
.as_bytes(),
)
.unwrap();
.expect("couldn't write generated source code to file on disk");
}
fn main() {

View File

@@ -23,6 +23,11 @@ pub(crate) struct InternalBatchVerifier {
impl InternalBatchVerifier {
#[must_use]
fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool {
/*
Technically, this following line can overflow, and joining these `Vec`s _may_ panic if
they're individually acceptable lengths yet their sum isn't. This is so negligible, due to
the amount of memory required, it's dismissed.
*/
let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len();
let mut scalars = Vec::with_capacity(capacity);
let mut points = Vec::with_capacity(capacity);

View File

@@ -6,7 +6,7 @@ use curve25519_dalek::{
edwards::EdwardsPoint,
};
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS, LOG_COMMITMENT_BITS};
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS};
pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
let mut buf_scalars = Vec::with_capacity(pairs.len());

View File

@@ -5,7 +5,6 @@
#![allow(non_snake_case)]
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
@@ -17,13 +16,13 @@ use curve25519_dalek::edwards::EdwardsPoint;
use monero_io::*;
pub use monero_generators::MAX_COMMITMENTS;
use monero_generators::COMMITMENT_BITS;
use monero_primitives::Commitment;
pub(crate) mod scalar_vector;
pub(crate) mod point_vector;
pub(crate) mod core;
use crate::core::LOG_COMMITMENT_BITS;
pub(crate) mod batch_verifier;
use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier};
@@ -44,6 +43,11 @@ use crate::plus::{
#[cfg(test)]
mod tests;
// The logarithm (over 2) of the amount of bits a value within a commitment may use.
const LOG_COMMITMENT_BITS: usize = COMMITMENT_BITS.ilog2() as usize;
// The maximum length of L/R `Vec`s.
const MAX_LR: usize = (MAX_COMMITMENTS.ilog2() as usize) + LOG_COMMITMENT_BITS;
/// An error from proving/verifying Bulletproofs(+).
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
@@ -82,27 +86,30 @@ impl Bulletproof {
/// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone
/// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of
/// the weight lost by using a proof smaller than it is fast to compensate for this.
///
/// If the amount of outputs specified exceeds the maximum amount of outputs, the result for the
/// maximum amount of outputs will be returned.
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
pub fn calculate_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
#[allow(non_snake_case)]
let mut LR_len = 0;
let mut n_padded_outputs = 1;
while n_padded_outputs < n_outputs {
while n_padded_outputs < n_outputs.min(MAX_COMMITMENTS) {
LR_len += 1;
n_padded_outputs = 1 << LR_len;
}
LR_len += LOG_COMMITMENT_BITS;
let mut bp_clawback = 0;
let mut clawback = 0;
if n_padded_outputs > 2 {
let fields = Bulletproof::bp_fields(plus);
let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2;
let size = (fields + (2 * LR_len)) * 32;
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
clawback = ((base * n_padded_outputs) - size) * 4 / 5;
}
(bp_clawback, LR_len)
(clawback, LR_len)
}
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof.
@@ -119,9 +126,15 @@ impl Bulletproof {
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Original(
OriginalStatement::new(&commitments)
.unwrap()
.prove(rng, OriginalWitness::new(outputs).unwrap())
.unwrap(),
.expect("failed to create statement despite checking amount of commitments")
.prove(
rng,
OriginalWitness::new(outputs)
.expect("failed to create witness despite checking amount of commitments"),
)
.expect(
"failed to prove Bulletproof::Original despite ensuring statement/witness consistency",
),
))
}
@@ -139,9 +152,15 @@ impl Bulletproof {
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Plus(
PlusStatement::new(&commitments)
.unwrap()
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap()))
.unwrap(),
.expect("failed to create statement despite checking amount of commitments")
.prove(
rng,
&Zeroizing::new(
PlusWitness::new(outputs)
.expect("failed to create witness despite checking amount of commitments"),
),
)
.expect("failed to prove Bulletproof::Plus despite ensuring statement/witness consistency"),
))
}
@@ -250,8 +269,8 @@ impl Bulletproof {
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
let mut serialized = Vec::with_capacity(512);
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}
@@ -265,8 +284,8 @@ impl Bulletproof {
tau_x: read_scalar(r)?,
mu: read_scalar(r)?,
ip: IpProof {
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
L: read_vec(read_point, Some(MAX_LR), r)?,
R: read_vec(read_point, Some(MAX_LR), r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
},
@@ -284,8 +303,8 @@ impl Bulletproof {
r_answer: read_scalar(r)?,
s_answer: read_scalar(r)?,
delta_answer: read_scalar(r)?,
L: read_vec(read_point, r)?.into_iter().collect(),
R: read_vec(read_point, r)?.into_iter().collect(),
L: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(),
R: read_vec(read_point, Some(MAX_LR), r)?.into_iter().collect(),
},
}))
}

View File

@@ -174,7 +174,11 @@ impl IpStatement {
R_vec.push(R * INV_EIGHT());
// Now that we've calculate L, R, transcript them to receive x (26-27)
transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap());
transcript = Self::transcript_L_R(
transcript,
*L_vec.last().expect("couldn't get last L_vec despite always being non-empty"),
*R_vec.last().expect("couldn't get last R_vec despite always being non-empty"),
);
let x = transcript;
let x_inv = x.invert();

View File

@@ -227,8 +227,11 @@ impl<'a> AggregateRangeStatement<'a> {
let x_ip = transcript;
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
.prove(transcript, IpWitness::new(l, r).unwrap())
.unwrap();
.prove(
transcript,
IpWitness::new(l, r).expect("Bulletproofs::Original created an invalid IpWitness"),
)
.expect("Bulletproofs::Original failed to prove the inner-product");
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
#[cfg(debug_assertions)]

View File

@@ -106,7 +106,9 @@ impl<'a> AggregateRangeStatement<'a> {
let mut d = ScalarVector::new(mn);
for j in 1 ..= V.len() {
z_pow.push(*z_pow.last().unwrap() * z_pow[0]);
z_pow.push(
*z_pow.last().expect("couldn't get last z_pow despite always being non-empty") * z_pow[0],
);
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
}
@@ -229,8 +231,15 @@ impl<'a> AggregateRangeStatement<'a> {
Some(AggregateRangeProof {
A,
wip: WipStatement::new(generators, A_hat, y)
.prove(rng, transcript, &Zeroizing::new(WipWitness::new(a_l, a_r, alpha).unwrap()))
.unwrap(),
.prove(
rng,
transcript,
&Zeroizing::new(
WipWitness::new(a_l, a_r, alpha)
.expect("Bulletproofs::Plus created an invalid WipWitness"),
),
)
.expect("Bulletproof::Plus failed to prove the weighted inner-product"),
})
}

View File

@@ -65,7 +65,10 @@ impl BpPlusGenerators {
pub(crate) fn reduce(&self, generators: usize) -> Self {
// Round to the nearest power of 2
let generators = padded_pow_of_2(generators);
assert!(generators <= self.g_bold.len());
assert!(
generators <= self.g_bold.len(),
"instantiated with less generators than application required"
);
BpPlusGenerators { g_bold: &self.g_bold[.. generators], h_bold: &self.h_bold[.. generators] }
}

View File

@@ -230,7 +230,9 @@ impl WipStatement {
let c_l = a1.clone().weighted_inner_product(&b2, &y);
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
let y_inv_n_hat = y_inv.pop().unwrap();
let y_inv_n_hat = y_inv
.pop()
.expect("couldn't pop y_inv despite y_inv being of same length as times iterated");
let mut L_terms = (a1.clone() * y_inv_n_hat)
.0
@@ -331,7 +333,9 @@ impl WipStatement {
let mut res = Vec::with_capacity(y.len());
res.push(inv_y);
while res.len() < y.len() {
res.push(inv_y * res.last().unwrap());
res.push(
inv_y * res.last().expect("couldn't get last inv_y despite inv_y always being non-empty"),
);
}
res
};

View File

@@ -89,8 +89,8 @@ impl ClsagContext {
#[allow(clippy::large_enum_variant)]
enum Mode {
Sign(usize, EdwardsPoint, EdwardsPoint),
Verify(Scalar),
Sign { signer_index: u8, A: EdwardsPoint, AH: EdwardsPoint },
Verify { c1: Scalar, D_serialized: EdwardsPoint },
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
@@ -100,18 +100,18 @@ fn core(
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32],
D: &EdwardsPoint,
msg_hash: &[u8; 32],
D_torsion_free: &EdwardsPoint,
s: &[Scalar],
A_c1: &Mode,
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
let images_precomp = match A_c1 {
Mode::Sign(..) => None,
Mode::Verify(..) => Some(VartimeEdwardsPrecomputation::new([I, D])),
Mode::Sign { .. } => None,
Mode::Verify { .. } => Some(VartimeEdwardsPrecomputation::new([I, D_torsion_free])),
};
let D_INV_EIGHT = D * INV_EIGHT();
let D_inv_eight = D_torsion_free * INV_EIGHT();
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
@@ -140,7 +140,14 @@ fn core(
}
to_hash.extend(I.compress().to_bytes());
to_hash.extend(D_INV_EIGHT.compress().to_bytes());
match A_c1 {
Mode::Sign { .. } => {
to_hash.extend(D_inv_eight.compress().to_bytes());
}
Mode::Verify { D_serialized, .. } => {
to_hash.extend(D_serialized.compress().to_bytes());
}
}
to_hash.extend(pseudo_out.compress().to_bytes());
// mu_P with agg_0
let mu_P = keccak256_to_scalar(&to_hash);
@@ -156,22 +163,23 @@ fn core(
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
// truncated just to add it back
to_hash.extend(pseudo_out.compress().to_bytes());
to_hash.extend(msg);
to_hash.extend(msg_hash);
// Configure the loop based on if we're signing or verifying
let start;
let end;
let mut c;
match A_c1 {
Mode::Sign(r, A, AH) => {
start = r + 1;
end = r + n;
Mode::Sign { signer_index, A, AH } => {
let signer_index = usize::from(*signer_index);
start = signer_index + 1;
end = signer_index + n;
to_hash.extend(A.compress().to_bytes());
to_hash.extend(AH.compress().to_bytes());
c = keccak256_to_scalar(&to_hash);
}
Mode::Verify(c1) => {
Mode::Verify { c1, .. } => {
start = 0;
end = n;
c = *c1;
@@ -186,10 +194,10 @@ fn core(
// (s_i * G) + (c_p * P_i) + (c_c * C_i)
let L = match A_c1 {
Mode::Sign(..) => {
Mode::Sign { .. } => {
EdwardsPoint::multiscalar_mul([s[i], c_p, c_c], [ED25519_BASEPOINT_POINT, P[i], C[i]])
}
Mode::Verify(..) => {
Mode::Verify { .. } => {
G_PRECOMP().vartime_mixed_multiscalar_mul([s[i]], [c_p, c_c], [P[i], C[i]])
}
};
@@ -198,10 +206,13 @@ fn core(
// (c_p * I) + (c_c * D) + (s_i * PH)
let R = match A_c1 {
Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]),
Mode::Verify(..) => {
images_precomp.as_ref().unwrap().vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH])
Mode::Sign { .. } => {
EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D_torsion_free, &PH])
}
Mode::Verify { .. } => images_precomp
.as_ref()
.expect("value populated when verifying wasn't populated")
.vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]),
};
to_hash.truncate(((2 * n) + 3) * 32);
@@ -216,7 +227,7 @@ fn core(
}
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
((D_INV_EIGHT, c * mu_P, c * mu_C), c1)
((D_inv_eight, c * mu_P, c * mu_C), c1)
}
/// The CLSAG signature, as used in Monero.
@@ -245,23 +256,30 @@ impl Clsag {
I: &EdwardsPoint,
input: &ClsagContext,
mask: Scalar,
msg: &[u8; 32],
msg_hash: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint,
) -> ClsagSignCore {
let r: usize = input.decoys.signer_index().into();
let signer_index = input.decoys.signer_index();
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
let mask_delta = input.commitment.mask - mask;
let H = hash_to_point(input.decoys.ring()[r][0].compress().0);
let H = hash_to_point(input.decoys.ring()[usize::from(signer_index)][0].compress().0);
let D = H * mask_delta;
let mut s = Vec::with_capacity(input.decoys.ring().len());
for _ in 0 .. input.decoys.ring().len() {
s.push(Scalar::random(rng));
}
let ((D, c_p, c_c), c1) =
core(input.decoys.ring(), I, &pseudo_out, msg, &D, &s, &Mode::Sign(r, A, AH));
let ((D, c_p, c_c), c1) = core(
input.decoys.ring(),
I,
&pseudo_out,
msg_hash,
&D,
&s,
&Mode::Sign { signer_index, A, AH },
);
ClsagSignCore {
incomplete_clsag: Clsag { D, s, c1 },
@@ -288,11 +306,15 @@ impl Clsag {
/// `inputs` is of the form (discrete logarithm of the key, context).
///
/// `sum_outputs` is for the sum of the output commitments' masks.
///
/// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which
/// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do
/// not use this if you don't know what you're doing.
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
mut inputs: Vec<(Zeroizing<Scalar>, ClsagContext)>,
sum_outputs: Scalar,
msg: [u8; 32],
msg_hash: [u8; 32],
) -> Result<Vec<(Clsag, EdwardsPoint)>, ClsagError> {
// Create the key images
let mut key_image_generators = vec![];
@@ -329,7 +351,7 @@ impl Clsag {
&key_images[i],
&inputs[i].1,
mask,
&msg,
&msg_hash,
nonce.deref() * ED25519_BASEPOINT_TABLE,
nonce.deref() * key_image_generators[i],
);
@@ -345,7 +367,7 @@ impl Clsag {
nonce.zeroize();
debug_assert!(clsag
.verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg)
.verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg_hash)
.is_ok());
res.push((clsag, pseudo_out));
@@ -355,12 +377,16 @@ impl Clsag {
}
/// Verify a CLSAG signature for the provided context.
///
/// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which
/// makes assumptions on what has already been transcripted and bound to within `msg_hash`. Do
/// not use this if you don't know what you're doing.
pub fn verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32],
msg_hash: &[u8; 32],
) -> Result<(), ClsagError> {
// Preliminary checks
// s, c1, and points must also be encoded canonically, which is checked at time of decode
@@ -374,12 +400,20 @@ impl Clsag {
Err(ClsagError::InvalidImage)?;
}
let D = self.D.mul_by_cofactor();
if D.is_identity() {
let D_torsion_free = self.D.mul_by_cofactor();
if D_torsion_free.is_identity() {
Err(ClsagError::InvalidD)?;
}
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, &Mode::Verify(self.c1));
let (_, c1) = core(
ring,
I,
pseudo_out,
msg_hash,
&D_torsion_free,
&self.s,
&Mode::Verify { c1: self.c1, D_serialized: self.D },
);
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}

View File

@@ -56,13 +56,12 @@ impl ClsagContext {
/// A channel to send the mask to use for the pseudo-out (rerandomized commitment) with.
///
/// A mask must be sent along this channel before any preprocess addendums are handled. Breaking
/// this rule will cause a panic.
#[derive(Clone, Debug)]
/// A mask must be sent along this channel before any preprocess addendums are handled.
#[derive(Debug)]
pub struct ClsagMultisigMaskSender {
buf: Arc<Mutex<Option<Scalar>>>,
}
#[derive(Clone, Debug)]
#[derive(Debug)]
struct ClsagMultisigMaskReceiver {
buf: Arc<Mutex<Option<Scalar>>>,
}
@@ -74,12 +73,14 @@ impl ClsagMultisigMaskSender {
/// Send a mask to a CLSAG multisig instance.
pub fn send(self, mask: Scalar) {
// There is no risk this was prior set as this consumes `self`, which does not implement
// `Clone`
*self.buf.lock() = Some(mask);
}
}
impl ClsagMultisigMaskReceiver {
fn recv(self) -> Scalar {
self.buf.lock().unwrap()
fn recv(self) -> Option<Scalar> {
*self.buf.lock()
}
}
@@ -114,12 +115,12 @@ struct Interim {
/// FROST-inspired algorithm for producing a CLSAG signature.
///
/// Before this has its `process_addendum` called, a mask must be set. Else this will panic.
/// Before this has its `process_addendum` called, a mask must be set.
///
/// The message signed is expected to be a 32-byte value. Per Monero, it's the keccak256 hash of
/// the transaction data which is signed. This will panic if the message is not a 32-byte value.
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
#[derive(Debug)]
pub struct ClsagMultisig {
transcript: RecommendedTranscript,
@@ -132,7 +133,7 @@ pub struct ClsagMultisig {
mask_recv: Option<ClsagMultisigMaskReceiver>,
mask: Option<Scalar>,
msg: Option<[u8; 32]>,
msg_hash: Option<[u8; 32]>,
interim: Option<Interim>,
}
@@ -156,7 +157,7 @@ impl ClsagMultisig {
mask_recv: Some(mask_recv),
mask: None,
msg: None,
msg_hash: None,
interim: None,
},
mask_send,
@@ -218,7 +219,14 @@ impl Algorithm<Ed25519> for ClsagMultisig {
// Fetch the mask from the Mutex
// We set it to a variable to ensure our view of it is consistent
// It was this or a mpsc channel... std doesn't have oneshot :/
self.mask = Some(self.mask_recv.take().unwrap().recv());
self.mask = Some(
self
.mask_recv
.take()
.expect("image was none multiple times, despite setting to Some on first iteration")
.recv()
.ok_or(FrostError::InternalError("CLSAG mask was not provided"))?,
);
// Transcript the mask
self.transcript.append_message(b"mask", self.mask.expect("mask wasn't set").to_bytes());
@@ -235,7 +243,8 @@ impl Algorithm<Ed25519> for ClsagMultisig {
// Accumulate the interpolated share
let interpolated_key_image_share =
addendum.key_image_share * lagrange::<dfg::Scalar>(l, view.included());
*self.image.as_mut().unwrap() += interpolated_key_image_share;
*self.image.as_mut().expect("image populated on first iteration wasn't Some") +=
interpolated_key_image_share;
self
.key_image_shares
@@ -253,7 +262,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
view: &ThresholdView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: Vec<Zeroizing<dfg::Scalar>>,
msg: &[u8],
msg_hash: &[u8],
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
//
@@ -264,14 +273,15 @@ impl Algorithm<Ed25519> for ClsagMultisig {
// opening of the commitment being re-randomized (and what it's re-randomized to)
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
let msg_hash = msg_hash.try_into().expect("CLSAG message hash should be 32-bytes");
self.msg_hash = Some(msg_hash);
let sign_core = Clsag::sign_core(
&mut rng,
&self.image.expect("verifying a share despite never processing any addendums").0,
&self.context,
self.mask.expect("mask wasn't set"),
self.msg.as_ref().unwrap(),
&msg_hash,
nonce_sums[0][0].0,
nonce_sums[0][1].0,
);
@@ -293,7 +303,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
_: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar,
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let interim = self.interim.as_ref().expect("verify called before sign_share");
let mut clsag = interim.clsag.clone();
// We produced shares as `r - p x`, yet the signature is actually `r - p x - c x`
// Substract `c x` (saved as `c`) now
@@ -303,7 +313,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
self.context.decoys.ring(),
&self.image.expect("verifying a signature despite never processing any addendums").0,
&interim.pseudo_out,
self.msg.as_ref().unwrap(),
self.msg_hash.as_ref().expect("verify called before sign_share"),
)
.is_ok()
{
@@ -318,7 +328,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
nonces: &[Vec<dfg::EdwardsPoint>],
share: dfg::Scalar,
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
let interim = self.interim.as_ref().unwrap();
let interim = self.interim.as_ref().expect("verify_share called before sign_share");
// For a share `r - p x`, the following two equalities should hold:
// - `(r - p x)G == R.0 - pV`, where `V = xG`

View File

@@ -19,7 +19,8 @@ use crate::ClsagMultisig;
#[cfg(feature = "multisig")]
use frost::{
Participant,
tests::{key_gen, algorithm_machines, sign},
sign::AlgorithmMachine,
tests::{key_gen, algorithm_machines_without_clone, sign_without_clone},
};
const RING_LEN: u64 = 11;
@@ -31,7 +32,7 @@ const RING_INDEX: u8 = 3;
#[test]
fn clsag() {
for real in 0 .. RING_LEN {
let msg = [1; 32];
let msg_hash = [1; 32];
let mut secrets = (Zeroizing::new(Scalar::ZERO), Scalar::ZERO);
let mut ring = vec![];
@@ -61,18 +62,18 @@ fn clsag() {
.unwrap(),
)],
Scalar::random(&mut OsRng),
msg,
msg_hash,
)
.unwrap()
.swap_remove(0);
let image =
hash_to_point((ED25519_BASEPOINT_TABLE * secrets.0.deref()).compress().0) * secrets.0.deref();
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
clsag.verify(&ring, &image, &pseudo_out, &msg_hash).unwrap();
// make sure verification fails if we throw a random `c1` at it.
clsag.c1 = Scalar::random(&mut OsRng);
assert!(clsag.verify(&ring, &image, &pseudo_out, &msg).is_err());
assert!(clsag.verify(&ring, &image, &pseudo_out, &msg_hash).is_err());
}
}
@@ -99,21 +100,32 @@ fn clsag_multisig() {
ring.push([dest, Commitment::new(mask, amount).calculate()]);
}
let (algorithm, mask_send) = ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
ClsagContext::new(
Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(),
Commitment::new(randomness, AMOUNT),
)
.unwrap(),
);
mask_send.send(Scalar::random(&mut OsRng));
let mask = Scalar::random(&mut OsRng);
let params = || {
let (algorithm, mask_send) = ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
ClsagContext::new(
Decoys::new((1 ..= RING_LEN).collect(), RING_INDEX, ring.clone()).unwrap(),
Commitment::new(randomness, AMOUNT),
)
.unwrap(),
);
mask_send.send(mask);
algorithm
};
sign(
sign_without_clone(
&mut OsRng,
&algorithm,
keys.clone(),
algorithm_machines(&mut OsRng, &algorithm, &keys),
keys.values().map(|keys| (keys.params().i(), params())).collect(),
algorithm_machines_without_clone(
&mut OsRng,
&keys,
keys
.values()
.map(|keys| (keys.params().i(), AlgorithmMachine::new(params(), keys.clone())))
.collect(),
),
&[1; 32],
);
}

View File

@@ -122,6 +122,10 @@ impl Mlsag {
}
/// Verify a MLSAG.
///
/// WARNING: This follows the Fiat-Shamir transcript format used by the Monero protocol, which
/// makes assumptions on what has already been transcripted and bound to within `msg`. Do not use
/// this if you don't know what you're doing.
pub fn verify(
&self,
msg: &[u8; 32],

View File

@@ -17,6 +17,7 @@ workspace = true
[dependencies]
hex = { version = "0.4", default-features = false, features = ["alloc"] }
zeroize = { version = "^1.5", default-features = false, features = ["alloc", "std"] }
digest_auth = { version = "0.3", default-features = false }
simple-request = { path = "../../../../common/request", version = "0.1", default-features = false, features = ["tls"] }
tokio = { version = "1", default-features = false }

View File

@@ -7,6 +7,7 @@ use std::{sync::Arc, io::Read, time::Duration};
use tokio::sync::Mutex;
use zeroize::Zeroizing;
use digest_auth::{WwwAuthenticateHeader, AuthContext};
use simple_request::{
hyper::{StatusCode, header::HeaderValue, Request},
@@ -25,8 +26,8 @@ enum Authentication {
// This ensures that if a nonce is requested, another caller doesn't make a request invalidating
// it
Authenticated {
username: String,
password: String,
username: Zeroizing<String>,
password: Zeroizing<String>,
#[allow(clippy::type_complexity)]
connection: Arc<Mutex<(Option<(WwwAuthenticateHeader, u64)>, Client)>>,
},
@@ -77,7 +78,7 @@ impl SimpleRequestRpc {
) -> Result<SimpleRequestRpc, RpcError> {
let authentication = if url.contains('@') {
// Parse out the username and password
let url_clone = url;
let url_clone = Zeroizing::new(url);
let split_url = url_clone.split('@').collect::<Vec<_>>();
if split_url.len() != 2 {
Err(RpcError::ConnectionError("invalid amount of login specifications".to_string()))?;
@@ -114,8 +115,8 @@ impl SimpleRequestRpc {
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
)?;
Authentication::Authenticated {
username: split_userpass[0].to_string(),
password: (*split_userpass.get(1).unwrap_or(&"")).to_string(),
username: Zeroizing::new(split_userpass[0].to_string()),
password: Zeroizing::new((*split_userpass.get(1).unwrap_or(&"")).to_string()),
connection: Arc::new(Mutex::new((challenge, client))),
}
} else {
@@ -135,35 +136,13 @@ impl SimpleRequestRpc {
};
async fn body_from_response(response: Response<'_>) -> Result<Vec<u8>, RpcError> {
/*
let length = usize::try_from(
response
.headers()
.get("content-length")
.ok_or(RpcError::InvalidNode("no content-length header"))?
.to_str()
.map_err(|_| RpcError::InvalidNode("non-ascii content-length value"))?
.parse::<u32>()
.map_err(|_| RpcError::InvalidNode("non-u32 content-length value"))?,
)
.unwrap();
// Only pre-allocate 1 MB so a malicious node which claims a content-length of 1 GB actually
// has to send 1 GB of data to cause a 1 GB allocation
let mut res = Vec::with_capacity(length.max(1024 * 1024));
let mut body = response.into_body();
while res.len() < length {
let Some(data) = body.data().await else { break };
res.extend(data.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?.as_ref());
}
*/
let mut res = Vec::with_capacity(128);
response
.body()
.await
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
.read_to_end(&mut res)
.unwrap();
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?;
Ok(res)
}
@@ -202,8 +181,8 @@ impl SimpleRequestRpc {
*cnonce += 1;
let mut context = AuthContext::new_post::<_, _, _, &[u8]>(
username,
password,
<_ as AsRef<str>>::as_ref(username),
<_ as AsRef<str>>::as_ref(password),
"/".to_string() + route,
None,
);
@@ -219,7 +198,12 @@ impl SimpleRequestRpc {
})?
.to_header_string(),
)
.unwrap(),
.map_err(|_| {
RpcError::InternalError(
"digest-auth challenge response wasn't a valid string for an HTTP header"
.to_string(),
)
})?,
);
}
@@ -269,7 +253,7 @@ impl SimpleRequestRpc {
))?
}
} else {
body_from_response(response.unwrap()).await?
body_from_response(response.expect("no response yet also no error?")).await?
}
}
});

View File

@@ -121,7 +121,7 @@ impl FeeRate {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(16);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -139,15 +139,22 @@ impl FeeRate {
///
/// This function may panic upon overflow.
pub fn calculate_fee_from_weight(&self, weight: usize) -> u64 {
let fee = self.per_weight * u64::try_from(weight).unwrap();
let fee =
self.per_weight * u64::try_from(weight).expect("couldn't convert weight (usize) to u64");
let fee = fee.div_ceil(self.mask) * self.mask;
debug_assert_eq!(weight, self.calculate_weight_from_fee(fee), "Miscalculated weight from fee");
debug_assert_eq!(
Some(weight),
self.calculate_weight_from_fee(fee),
"Miscalculated weight from fee"
);
fee
}
/// Calculate the weight from the fee.
pub fn calculate_weight_from_fee(&self, fee: u64) -> usize {
usize::try_from(fee / self.per_weight).unwrap()
///
/// Returns `None` if the weight would not fit within a `usize`.
pub fn calculate_weight_from_fee(&self, fee: u64) -> Option<usize> {
usize::try_from(fee / self.per_weight).ok()
}
}
@@ -272,8 +279,14 @@ pub trait Rpc: Sync + Clone {
let res = self
.post(
route,
if let Some(params) = params {
serde_json::to_string(&params).unwrap().into_bytes()
if let Some(params) = params.as_ref() {
serde_json::to_string(params)
.map_err(|e| {
RpcError::InternalError(format!(
"couldn't convert parameters ({params:?}) to JSON: {e:?}"
))
})?
.into_bytes()
} else {
vec![]
},
@@ -295,7 +308,10 @@ pub trait Rpc: Sync + Clone {
async move {
let mut req = json!({ "method": method });
if let Some(params) = params {
req.as_object_mut().unwrap().insert("params".into(), params);
req
.as_object_mut()
.expect("accessing object as object failed?")
.insert("params".into(), params);
}
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
}
@@ -385,6 +401,11 @@ pub trait Rpc: Sync + Clone {
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
))?;
}
if txs.txs.len() != this_count {
Err(RpcError::InvalidNode(
"not missing any transactions yet didn't return all transactions".to_string(),
))?;
}
all_txs.extend(txs.txs);
}
@@ -1130,7 +1151,13 @@ impl<R: Rpc> DecoyRpc for R {
)))?;
}
let expected_len = if zero_zero_case { 2 } else { (to - start_height) + 1 };
let expected_len = if zero_zero_case {
2
} else {
(to - start_height).checked_add(1).ok_or_else(|| {
RpcError::InternalError("expected length of distribution exceeded usize".to_string())
})?
};
// Yet this is actually a height
if expected_len != distribution.len() {
Err(RpcError::InvalidNode(format!(
@@ -1145,6 +1172,20 @@ impl<R: Rpc> DecoyRpc for R {
if zero_zero_case {
distribution.pop();
}
// Check the distribution monotonically increases
{
let mut monotonic = 0;
for d in &distribution {
if *d < monotonic {
Err(RpcError::InvalidNode(
"received output distribution didn't increase monotonically".to_string(),
))?;
}
monotonic = *d;
}
}
Ok(distribution)
}
}
@@ -1255,8 +1296,8 @@ impl<R: Rpc> DecoyRpc for R {
// https://github.com/monero-project/monero/blob
// /cc73fe71162d564ffda8e549b79a350bca53c454/src/cryptonote_core
// /blockchain.cpp#L3836
((out.height + DEFAULT_LOCK_WINDOW) <= height) &&
(Timelock::Block(height - 1 + ACCEPTED_TIMELOCK_DELTA) >=
out.height.checked_add(DEFAULT_LOCK_WINDOW).is_some_and(|locked| locked <= height) &&
(Timelock::Block(height.wrapping_add(ACCEPTED_TIMELOCK_DELTA - 1)) >=
txs[i].prefix().additional_timelock)
} else {
out.unlocked

View File

@@ -51,7 +51,7 @@ impl BlockHeader {
/// Serialize the BlockHeader to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}
@@ -111,7 +111,7 @@ impl Block {
/// Serialize the Block to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}
@@ -122,7 +122,13 @@ impl Block {
pub fn serialize_pow_hash(&self) -> Vec<u8> {
let mut blob = self.header.serialize();
blob.extend_from_slice(&merkle_root(self.miner_transaction.hash(), &self.transactions));
write_varint(&(1 + u64::try_from(self.transactions.len()).unwrap()), &mut blob).unwrap();
write_varint(
&(1 +
u64::try_from(self.transactions.len())
.expect("amount of transactions in block exceeded u64::MAX")),
&mut blob,
)
.expect("write failed but <Vec as io::Write> doesn't fail");
blob
}
@@ -132,7 +138,11 @@ impl Block {
// Monero pre-appends a VarInt of the block-to-hash'ss length before getting the block hash,
// but doesn't do this when getting the proof of work hash :)
let mut hashing_blob = Vec::with_capacity(9 + hashable.len());
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
write_varint(
&u64::try_from(hashable.len()).expect("length of block hash's preimage exceeded u64::MAX"),
&mut hashing_blob,
)
.expect("write failed but <Vec as io::Write> doesn't fail");
hashing_blob.append(&mut hashable);
let hash = keccak256(hashing_blob);

View File

@@ -28,7 +28,7 @@ pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
let mut paired_hashes = Vec::with_capacity(overage);
while let Some(left) = rightmost.next() {
let right = rightmost.next().unwrap();
let right = rightmost.next().expect("rightmost is of even length");
paired_hashes.push(keccak256([left.as_ref(), &right].concat()));
}
drop(rightmost);

View File

@@ -326,7 +326,9 @@ impl RctPrunable {
/// Serialize the RctPrunable to a `Vec<u8>`.
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized, rct_type).unwrap();
self
.write(&mut serialized, rct_type)
.expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}
@@ -341,7 +343,13 @@ impl RctPrunable {
Ok(match rct_type {
RctType::AggregateMlsagBorromean => RctPrunable::AggregateMlsagBorromean {
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
mlsag: Mlsag::read(ring_length, inputs + 1, r)?,
mlsag: Mlsag::read(
ring_length,
inputs.checked_add(1).ok_or_else(|| {
io::Error::other("reading a MLSAG for more inputs than representable")
})?,
r,
)?,
},
RctType::MlsagBorromean => RctPrunable::MlsagBorromean {
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
@@ -441,7 +449,7 @@ impl RctProofs {
/// Serialize the RctProofs to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}

View File

@@ -53,7 +53,7 @@ impl Input {
/// Serialize the Input to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -71,7 +71,7 @@ impl Input {
let amount = if amount == 0 { None } else { Some(amount) };
Input::ToKey {
amount,
key_offsets: read_vec(read_varint, r)?,
key_offsets: read_vec(read_varint, None, r)?,
key_image: read_torsion_free_point(r)?,
}
}
@@ -106,7 +106,7 @@ impl Output {
/// Write the Output to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(8 + 1 + 32);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -163,7 +163,7 @@ impl Timelock {
/// Serialize the Timelock to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -241,7 +241,7 @@ impl TransactionPrefix {
pub fn read<R: Read>(r: &mut R, version: u64) -> io::Result<TransactionPrefix> {
let additional_timelock = Timelock::read(r)?;
let inputs = read_vec(|r| Input::read(r), r)?;
let inputs = read_vec(|r| Input::read(r), None, r)?;
if inputs.is_empty() {
Err(io::Error::other("transaction had no inputs"))?;
}
@@ -250,17 +250,17 @@ impl TransactionPrefix {
let mut prefix = TransactionPrefix {
additional_timelock,
inputs,
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?,
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), None, r)?,
extra: vec![],
};
prefix.extra = read_vec(read_byte, r)?;
prefix.extra = read_vec(read_byte, None, r)?;
Ok(prefix)
}
fn hash(&self, version: u64) -> [u8; 32] {
let mut buf = vec![];
write_varint(&version, &mut buf).unwrap();
self.write(&mut buf).unwrap();
write_varint(&version, &mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
keccak256(buf)
}
}
@@ -451,7 +451,7 @@ impl<P: PotentiallyPruned> Transaction<P> {
/// Write the Transaction to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(2048);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -493,15 +493,16 @@ impl<P: PotentiallyPruned> Transaction<P> {
let mut buf = Vec::with_capacity(512);
// We don't use `self.write` as that may write the signatures (if this isn't pruned)
write_varint(&self.version(), &mut buf).unwrap();
prefix.write(&mut buf).unwrap();
write_varint(&self.version(), &mut buf)
.expect("write failed but <Vec as io::Write> doesn't fail");
prefix.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
// We explicitly write the signatures ourselves here
let PrunableHash::V1(signatures) = prunable else {
panic!("hashing v1 TX with non-v1 prunable data")
};
for signature in signatures {
signature.write(&mut buf).unwrap();
signature.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
}
keccak256(buf)
@@ -513,7 +514,10 @@ impl<P: PotentiallyPruned> Transaction<P> {
if let Some(proofs) = proofs {
let mut buf = Vec::with_capacity(512);
proofs.base().write(&mut buf, proofs.rct_type()).unwrap();
proofs
.base()
.write(&mut buf, proofs.rct_type())
.expect("write failed but <Vec as io::Write> doesn't fail");
hashes.extend(keccak256(&buf));
} else {
// Serialization of RctBase::Null
@@ -540,7 +544,10 @@ impl Transaction<NotPruned> {
Transaction::V2 { proofs, .. } => {
self.hash_with_prunable_hash(PrunableHash::V2(if let Some(proofs) = proofs {
let mut buf = Vec::with_capacity(1024);
proofs.prunable.write(&mut buf, proofs.rct_type()).unwrap();
proofs
.prunable
.write(&mut buf, proofs.rct_type())
.expect("write failed but <Vec as io::Write> doesn't fail");
keccak256(buf)
} else {
[0; 32]
@@ -563,7 +570,10 @@ impl Transaction<NotPruned> {
Transaction::V2 { proofs, .. } => self.hash_with_prunable_hash({
let Some(proofs) = proofs else { None? };
let mut buf = Vec::with_capacity(1024);
proofs.prunable.signature_write(&mut buf).unwrap();
proofs
.prunable
.signature_write(&mut buf)
.expect("write failed but <Vec as io::Write> doesn't fail");
PrunableHash::V2(keccak256(buf))
}),
})
@@ -599,7 +609,7 @@ impl Transaction<NotPruned> {
blob_size
} else {
blob_size +
Bulletproof::calculate_bp_clawback(
Bulletproof::calculate_clawback(
bp_plus,
match self {
Transaction::V1 { .. } => panic!("v1 transaction was BP(+)"),

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/wallet"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.80"
rust-version = "1.82"
[package.metadata.docs.rs]
all-features = true

View File

@@ -76,8 +76,10 @@ pub(crate) fn decode(data: &str) -> Option<Vec<u8>> {
break;
}
}
let used_bytes = used_bytes
.expect("chunk of bounded length exhaustively searched but couldn't find matching length");
// Only push on the used bytes
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes.unwrap()) ..]);
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes) ..]);
}
Some(res)
@@ -92,11 +94,10 @@ pub(crate) fn encode_check(mut data: Vec<u8>) -> String {
// Decode an arbitrary-length stream of data, with a checksum
pub(crate) fn decode_check(data: &str) -> Option<Vec<u8>> {
if data.len() < CHECKSUM_LEN {
let mut res = decode(data)?;
if res.len() < CHECKSUM_LEN {
None?;
}
let mut res = decode(data)?;
let checksum_pos = res.len() - CHECKSUM_LEN;
if keccak256(&res[.. checksum_pos])[.. CHECKSUM_LEN] != res[checksum_pos ..] {
None?;

View File

@@ -357,21 +357,21 @@ pub struct Address<const ADDRESS_BYTES: u128> {
impl<const ADDRESS_BYTES: u128> fmt::Debug for Address<ADDRESS_BYTES> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let hex = |bytes: &[u8]| -> String {
let hex = |bytes: &[u8]| -> Result<String, fmt::Error> {
let mut res = String::with_capacity(2 + (2 * bytes.len()));
res.push_str("0x");
for b in bytes {
write!(&mut res, "{b:02x}").unwrap();
write!(&mut res, "{b:02x}")?;
}
res
Ok(res)
};
fmt
.debug_struct("Address")
.field("network", &self.network)
.field("kind", &self.kind)
.field("spend", &hex(&self.spend.compress().to_bytes()))
.field("view", &hex(&self.view.compress().to_bytes()))
.field("spend", &hex(&self.spend.compress().to_bytes())?)
.field("view", &hex(&self.view.compress().to_bytes())?)
// This is not a real field yet is the most valuable thing to know when debugging
.field("(address)", &self.to_string())
.finish()
@@ -389,7 +389,8 @@ impl<const ADDRESS_BYTES: u128> fmt::Display for Address<ADDRESS_BYTES> {
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind {
let features_uint =
(u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress);
write_varint(&features_uint, &mut data).unwrap();
write_varint(&features_uint, &mut data)
.expect("write failed but <Vec as io::Write> doesn't fail");
}
if let Some(id) = self.kind.payment_id() {
data.extend(id);

View File

@@ -17,8 +17,8 @@ use crate::{
WalletOutput,
};
const RECENT_WINDOW: usize = 15;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
const RECENT_WINDOW: u64 = 15;
const BLOCKS_PER_YEAR: usize = (365 * 24 * 60 * 60) / BLOCK_TIME;
#[allow(clippy::cast_precision_loss)]
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
@@ -27,7 +27,7 @@ async fn select_n(
rpc: &impl DecoyRpc,
height: usize,
real_output: u64,
ring_len: usize,
ring_len: u8,
fingerprintable_deterministic: bool,
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
if height < DEFAULT_LOCK_WINDOW {
@@ -48,8 +48,9 @@ async fn select_n(
// This assumes that each miner TX had one output (as sane) and checks we have sufficient
// outputs even when excluding them (due to their own timelock requirements)
// Considering this a temporal error for very new chains, it's sufficiently sane to have
if highest_output_exclusive_bound.saturating_sub(u64::try_from(COINBASE_LOCK_WINDOW).unwrap()) <
u64::try_from(ring_len).unwrap()
if highest_output_exclusive_bound.saturating_sub(
u64::try_from(COINBASE_LOCK_WINDOW).expect("coinbase lock window exceeds 2^{64}"),
) < u64::from(ring_len)
{
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
@@ -67,7 +68,7 @@ async fn select_n(
let mut do_not_select = HashSet::new();
do_not_select.insert(real_output);
let decoy_count = ring_len - 1;
let decoy_count = usize::from(ring_len - 1);
let mut res = Vec::with_capacity(decoy_count);
let mut iters = 0;
@@ -87,8 +88,9 @@ async fn select_n(
// We check both that we aren't at the maximum amount of iterations and that the not-yet
// selected candidates exceed the amount of candidates necessary to trigger the next iteration
if (iters == MAX_ITERS) ||
((highest_output_exclusive_bound - u64::try_from(do_not_select.len()).unwrap()) <
u64::try_from(ring_len).unwrap())
((highest_output_exclusive_bound -
u64::try_from(do_not_select.len()).expect("amount of ignored decoys exceeds 2^{64}")) <
u64::from(ring_len))
{
Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?;
}
@@ -99,13 +101,18 @@ async fn select_n(
// Use a gamma distribution, as Monero does
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
// /src/wallet/wallet2.cpp#L142-L143
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61)
.expect("constant Gamma distribution could no longer be created")
.sample(rng)
.exp();
#[allow(clippy::cast_precision_loss)]
if age > TIP_APPLICATION {
age -= TIP_APPLICATION;
} else {
// f64 does not have try_from available, which is why these are written with `as`
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
age = (rng.next_u64() %
(RECENT_WINDOW * u64::try_from(BLOCK_TIME).expect("BLOCK_TIME exceeded u64::MAX")))
as f64;
}
#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
@@ -114,7 +121,9 @@ async fn select_n(
// Find which block this points to
let i = distribution.partition_point(|s| *s < (highest_output_exclusive_bound - 1 - o));
let prev = i.saturating_sub(1);
let n = distribution[i] - distribution[prev];
let n = distribution[i].checked_sub(distribution[prev]).ok_or_else(|| {
RpcError::InternalError("RPC returned non-monotonic distribution".to_string())
})?;
if n != 0 {
// Select an output from within this block
let o = distribution[prev] + (rng.next_u64() % n);
@@ -135,7 +144,11 @@ async fn select_n(
candidates.push(real_output);
// Sort candidates so the real spends aren't the ones at the end
candidates.sort();
Some(candidates.binary_search(&real_output).unwrap())
Some(
candidates
.binary_search(&real_output)
.expect("selected a ring which didn't include the real spend"),
)
} else {
None
};
@@ -169,11 +182,15 @@ async fn select_n(
async fn select_decoys<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &impl DecoyRpc,
ring_len: usize,
ring_len: u8,
height: usize,
input: &WalletOutput,
fingerprintable_deterministic: bool,
) -> Result<Decoys, RpcError> {
if ring_len == 0 {
Err(RpcError::InternalError("requesting a ring of length 0".to_string()))?;
}
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
@@ -215,10 +232,13 @@ async fn select_decoys<R: RngCore + CryptoRng>(
Decoys::new(
offsets,
// Binary searches for the real spend since we don't know where it sorted to
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)).unwrap(),
// TODO: Define our own collection whose `len` function returns `u8` to ensure this bound
// with types
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain))
.expect("ring of size <= u8::MAX had an index exceeding u8::MAX"),
ring.into_iter().map(|output| output.1).collect(),
)
.unwrap(),
.expect("selected a syntactically-invalid set of Decoys"),
)
}
@@ -234,7 +254,7 @@ impl OutputWithDecoys {
pub async fn new(
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
rpc: &impl DecoyRpc,
ring_len: usize,
ring_len: u8,
height: usize,
output: WalletOutput,
) -> Result<OutputWithDecoys, RpcError> {
@@ -253,7 +273,7 @@ impl OutputWithDecoys {
pub async fn fingerprintable_deterministic_new(
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
rpc: &impl DecoyRpc,
ring_len: usize,
ring_len: u8,
height: usize,
output: WalletOutput,
) -> Result<OutputWithDecoys, RpcError> {
@@ -297,7 +317,7 @@ impl OutputWithDecoys {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(128);
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}

View File

@@ -67,7 +67,7 @@ impl PaymentId {
/// Serialize the PaymentId to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1 + 8);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -100,7 +100,7 @@ pub enum ExtraField {
///
/// This is used within miner transactions who are merge-mining Monero to specify the foreign
/// block they mined.
MergeMining(usize, [u8; 32]),
MergeMining(u64, [u8; 32]),
/// The additional transaction keys.
///
/// These are the per-output commitments to the randomness used for deriving outputs.
@@ -132,7 +132,7 @@ impl ExtraField {
}
ExtraField::MergeMining(height, merkle) => {
w.write_all(&[3])?;
write_varint(&u64::try_from(*height).unwrap(), w)?;
write_varint(height, w)?;
w.write_all(merkle)?;
}
ExtraField::PublicKeys(keys) => {
@@ -150,7 +150,7 @@ impl ExtraField {
/// Serialize the ExtraField to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1 + 8);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -181,16 +181,10 @@ impl ExtraField {
size
}),
1 => ExtraField::PublicKey(read_point(r)?),
2 => ExtraField::Nonce({
let nonce = read_vec(read_byte, r)?;
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
Err(io::Error::other("too long nonce"))?;
}
nonce
}),
2 => ExtraField::Nonce(read_vec(read_byte, Some(MAX_TX_EXTRA_NONCE_SIZE), r)?),
3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?),
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, r)?),
4 => ExtraField::PublicKeys(read_vec(read_point, None, r)?),
0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, None, r)?),
_ => Err(io::Error::other("unknown extra field"))?,
})
}
@@ -286,7 +280,7 @@ impl Extra {
/// Serialize the Extra to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
buf
}

View File

@@ -61,7 +61,7 @@ impl SharedKeyDerivations {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
write_varint(height, &mut u).expect("write failed but <Vec as io::Write> doesn't fail");
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
@@ -83,7 +83,8 @@ impl SharedKeyDerivations {
// || o
{
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
write_varint(&o, output_derivation).unwrap();
write_varint(&o, output_derivation)
.expect("write failed but <Vec as io::Write> doesn't fail");
}
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
@@ -145,7 +146,11 @@ impl SharedKeyDerivations {
let amount_scalar = Scalar::from_bytes_mod_order(*amount) - amount_shared_sec_scalar;
// d2b from rctTypes.cpp
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
let amount = u64::from_le_bytes(
amount_scalar.to_bytes()[.. 8]
.try_into()
.expect("32-byte array couldn't have an 8-byte slice taken"),
);
Commitment::new(mask, amount)
}

View File

@@ -18,7 +18,7 @@ use crate::{
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub(crate) struct AbsoluteId {
pub(crate) transaction: [u8; 32],
pub(crate) index_in_transaction: u32,
pub(crate) index_in_transaction: u64,
}
impl core::fmt::Debug for AbsoluteId {
@@ -46,7 +46,7 @@ impl AbsoluteId {
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u32(r)? })
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u64(r)? })
}
}
@@ -128,11 +128,11 @@ impl OutputData {
self.commitment.write(w)
}
/*
/* Commented as it's unused, due to self being private
/// Serialize the OutputData to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 32 + 40);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
*/
@@ -194,9 +194,17 @@ impl Metadata {
w.write_all(&[0])?;
}
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
w.write_all(
&u64::try_from(self.arbitrary_data.len())
.expect("amount of arbitrary data chunks exceeded u64::MAX")
.to_le_bytes(),
)?;
for part in &self.arbitrary_data {
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
// TODO: Define our own collection whose `len` function returns `u8` to ensure this bound
// with types
w.write_all(&[
u8::try_from(part.len()).expect("piece of arbitrary data exceeded max length of u8::MAX")
])?;
w.write_all(part)?;
}
Ok(())
@@ -224,7 +232,7 @@ impl Metadata {
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
for _ in 0 .. read_u64(r)? {
let len = read_byte(r)?;
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
}
@@ -260,7 +268,7 @@ impl WalletOutput {
}
/// The index of the output within the transaction.
pub fn index_in_transaction(&self) -> u32 {
pub fn index_in_transaction(&self) -> u64 {
self.absolute_id.index_in_transaction
}
@@ -349,7 +357,7 @@ impl WalletOutput {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(128);
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}

View File

@@ -228,13 +228,16 @@ impl InternalScanner {
// Decrypt the payment ID
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
let o = u64::try_from(o).expect("couldn't convert output index (usize) to u64");
res.push(WalletOutput {
absolute_id: AbsoluteId {
transaction: tx_hash,
index_in_transaction: o.try_into().unwrap(),
},
absolute_id: AbsoluteId { transaction: tx_hash, index_in_transaction: o },
relative_id: RelativeId {
index_on_blockchain: output_index_for_first_ringct_output + u64::try_from(o).unwrap(),
index_on_blockchain: output_index_for_first_ringct_output.checked_add(o).ok_or(
ScanError::InvalidScannableBlock(
"transaction's output's index isn't representable as a u64",
),
)?,
},
data: OutputData { key: output_key, key_offset, commitment },
metadata: Metadata {
@@ -295,7 +298,8 @@ impl InternalScanner {
// Update the RingCT starting index for the next TX
if matches!(tx, Transaction::V2 { .. }) {
output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len()).unwrap()
output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len())
.expect("couldn't convert amount of outputs (usize) to u64")
}
}

View File

@@ -177,6 +177,17 @@ pub enum SendError {
/// The created transaction was too large.
#[cfg_attr(feature = "std", error("too large of a transaction"))]
TooLargeTransaction,
/// The transactions' amounts could not be represented within a `u64`.
#[cfg_attr(
feature = "std",
error("transaction amounts exceed u64::MAX (in {in_amount}, out {out_amount})")
)]
AmountsUnrepresentable {
/// The amount in (via inputs).
in_amount: u128,
/// The amount which would be out (between outputs and the fee).
out_amount: u128,
},
/// This transaction could not pay for itself.
#[cfg_attr(
feature = "std",
@@ -300,27 +311,39 @@ impl SignableTransaction {
}
// Make sure we have enough funds
let in_amount = self.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
let payments_amount = self
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(amount),
InternalPayment::Change(_) => None,
})
.sum::<u64>();
let (weight, necessary_fee) = self.weight_and_necessary_fee();
if in_amount < (payments_amount + necessary_fee) {
Err(SendError::NotEnoughFunds {
inputs: in_amount,
outputs: payments_amount,
necessary_fee: Some(necessary_fee),
})?;
let weight;
{
let in_amount: u128 =
self.inputs.iter().map(|input| u128::from(input.commitment().amount)).sum();
let payments_amount: u128 = self
.payments
.iter()
.filter_map(|payment| match payment {
InternalPayment::Payment(_, amount) => Some(u128::from(*amount)),
InternalPayment::Change(_) => None,
})
.sum();
let necessary_fee;
(weight, necessary_fee) = self.weight_and_necessary_fee();
let out_amount = payments_amount + u128::from(necessary_fee);
let in_out_amount = u64::try_from(in_amount)
.and_then(|in_amount| u64::try_from(out_amount).map(|out_amount| (in_amount, out_amount)));
let Ok((in_amount, out_amount)) = in_out_amount else {
Err(SendError::AmountsUnrepresentable { in_amount, out_amount })?
};
if in_amount < out_amount {
Err(SendError::NotEnoughFunds {
inputs: in_amount,
outputs: u64::try_from(payments_amount)
.expect("total out fit within u64 but not part of total out"),
necessary_fee: Some(necessary_fee),
})?;
}
}
// The limit is half the no-penalty block size
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/wallet/wallet2.cpp#L110766-L11085
// /src/wallet/wallet2.cpp#L11076-L11085
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
// /src/cryptonote_config.h#L61
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c454
@@ -446,7 +469,7 @@ impl SignableTransaction {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(256);
self.write(&mut buf).unwrap();
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
buf
}
@@ -456,7 +479,7 @@ impl SignableTransaction {
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<SignableTransaction> {
fn read_address<R: io::Read>(r: &mut R) -> io::Result<MoneroAddress> {
String::from_utf8(read_vec(read_byte, r)?)
String::from_utf8(read_vec(read_byte, None, r)?)
.ok()
.and_then(|str| MoneroAddress::from_str_with_unchecked_network(&str).ok())
.ok_or_else(|| io::Error::other("invalid address"))
@@ -484,9 +507,9 @@ impl SignableTransaction {
rct_type: RctType::try_from(read_byte(r)?)
.map_err(|()| io::Error::other("unsupported/invalid RctType"))?,
outgoing_view_key: Zeroizing::new(read_bytes(r)?),
inputs: read_vec(OutputWithDecoys::read, r)?,
payments: read_vec(read_payment, r)?,
data: read_vec(|r| read_vec(read_byte, r), r)?,
inputs: read_vec(OutputWithDecoys::read, None, r)?,
payments: read_vec(read_payment, None, r)?,
data: read_vec(|r| read_vec(read_byte, None, r), None, r)?,
fee_rate: FeeRate::read(r)?,
};
match res.validate() {
@@ -553,9 +576,13 @@ impl SignableTransaction {
let mut tx = tx.transaction_without_signatures();
// Sign the CLSAGs
let clsags_and_pseudo_outs =
Clsag::sign(rng, clsag_signs, mask_sum, tx.signature_hash().unwrap())
.map_err(SendError::ClsagError)?;
let clsags_and_pseudo_outs = Clsag::sign(
rng,
clsag_signs,
mask_sum,
tx.signature_hash().expect("signing a transaction which isn't signed?"),
)
.map_err(SendError::ClsagError)?;
// Fill in the CLSAGs/pseudo-outs
let inputs_len = tx.prefix().inputs.len();

View File

@@ -251,7 +251,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
}
let tx = tx.transaction_without_signatures();
let msg = tx.signature_hash().unwrap();
let msg = tx.signature_hash().expect("signing a transaction which isn't signed?");
// Iterate over each CLSAG calling sign
let mut shares = Vec::with_capacity(to_sign.len());

View File

@@ -73,7 +73,9 @@ impl SignableTransaction {
{
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
let mut id_vec = Vec::with_capacity(1 + 8);
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
PaymentId::Encrypted(id)
.write(&mut id_vec)
.expect("write failed but <Vec as io::Write> doesn't fail");
extra.push_nonce(id_vec);
} else {
/*
@@ -96,7 +98,9 @@ impl SignableTransaction {
.expect("multiple change outputs?");
let mut id_vec = Vec::with_capacity(1 + 8);
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap();
PaymentId::Encrypted(*payment_id_xor)
.write(&mut id_vec)
.expect("write failed but <Vec as io::Write> doesn't fail");
extra.push_nonce(id_vec);
}
}
@@ -109,7 +113,7 @@ impl SignableTransaction {
}
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
extra.write(&mut serialized).unwrap();
extra.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}
@@ -180,7 +184,8 @@ impl SignableTransaction {
push_scalar(&mut bp);
}
for _ in 0 .. 2 {
write_varint(&lr_len, &mut bp).unwrap();
write_varint(&lr_len, &mut bp)
.expect("write failed but <Vec as io::Write> doesn't fail");
for _ in 0 .. lr_len {
push_point(&mut bp);
}
@@ -204,7 +209,8 @@ impl SignableTransaction {
push_scalar(&mut bp);
}
for _ in 0 .. 2 {
write_varint(&lr_len, &mut bp).unwrap();
write_varint(&lr_len, &mut bp)
.expect("write failed but <Vec as io::Write> doesn't fail");
for _ in 0 .. lr_len {
push_point(&mut bp);
}
@@ -261,7 +267,8 @@ impl SignableTransaction {
break;
}
}
weight_and_fee.unwrap()
weight_and_fee
.expect("length of highest possible fee was greater than highest possible fee length")
}
}

View File

@@ -21,7 +21,9 @@ fn seeded_rng(
mut input_keys: Vec<EdwardsPoint>,
) -> ChaCha20Rng {
// Apply the DST
let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]);
let mut transcript = Zeroizing::new(vec![
u8::try_from(dst.len()).expect("internal RNG with constant DST had a too-long DST specified")
]);
transcript.extend(dst);
// Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript
@@ -116,12 +118,12 @@ impl SignableTransaction {
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
let mut tx_keys = TransactionKeys::new(&self.outgoing_view_key, self.input_keys());
let tx_key = tx_keys.next().unwrap();
let tx_key = tx_keys.next().expect("TransactionKeys (never-ending) was exhausted");
let mut additional_keys = vec![];
if self.should_use_additional_keys() {
for _ in 0 .. self.payments.len() {
additional_keys.push(tx_keys.next().unwrap());
additional_keys.push(tx_keys.next().expect("TransactionKeys (never-ending) was exhausted"));
}
}
(tx_key, additional_keys)

View File

@@ -21,7 +21,7 @@ use monero_wallet::{
mod builder;
pub use builder::SignableTransactionBuilder;
pub fn ring_len(rct_type: RctType) -> usize {
pub fn ring_len(rct_type: RctType) -> u8 {
match rct_type {
RctType::ClsagBulletproof => 11,
RctType::ClsagBulletproofPlus => 16,
@@ -118,7 +118,7 @@ pub fn check_weight_and_fee(tx: &Transaction, fee_rate: FeeRate) {
let fee = proofs.base.fee;
let weight = tx.weight();
let expected_weight = fee_rate.calculate_weight_from_fee(fee);
let expected_weight = fee_rate.calculate_weight_from_fee(fee).unwrap();
assert_eq!(weight, expected_weight);
let expected_fee = fee_rate.calculate_fee_from_weight(weight);

View File

@@ -1,14 +1,14 @@
# rust:1.80.0-slim-bookworm as of July 27th, 2024 (GMT)
FROM --platform=linux/amd64 rust@sha256:37e6f90f98b3afd15c2526d7abb257a1f4cb7d49808fe3729d9d62020b07b544 as deterministic
# rust:1.89.0-slim-bookworm as of August 1st, 2025 (GMT)
FROM --platform=linux/amd64 rust@sha256:703cfb0f80db8eb8a3452bf5151162472039c1b37fe4fb2957b495a6f0104ae7 AS deterministic
# Move to a Debian package snapshot
RUN rm -rf /etc/apt/sources.list.d/debian.sources && \
rm -rf /var/lib/apt/lists/* && \
echo "deb [arch=amd64] http://snapshot.debian.org/archive/debian/20240301T000000Z bookworm main" > /etc/apt/sources.list && \
echo "deb [arch=amd64] http://snapshot.debian.org/archive/debian/20250801T000000Z bookworm main" > /etc/apt/sources.list && \
apt update
# Install dependencies
RUN apt update && apt upgrade && apt install clang -y
RUN apt update -y && apt upgrade -y && apt install -y clang
# Add the wasm toolchain
RUN rustup target add wasm32-unknown-unknown

View File

@@ -92,7 +92,7 @@ fn os(os: Os, additional_root: &str, user: &str) -> String {
match os {
Os::Alpine => format!(
r#"
FROM alpine:latest as image
FROM alpine:latest AS image
COPY --from=mimalloc-alpine libmimalloc.so /usr/lib
ENV LD_PRELOAD=libmimalloc.so
@@ -117,7 +117,7 @@ WORKDIR /home/{user}
Os::Debian => format!(
r#"
FROM debian:bookworm-slim as image
FROM debian:bookworm-slim AS image
COPY --from=mimalloc-debian libmimalloc.so /usr/lib
RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload
@@ -146,7 +146,7 @@ fn build_serai_service(prelude: &str, release: bool, features: &str, package: &s
format!(
r#"
FROM rust:1.80-slim-bookworm as builder
FROM rust:1.89-slim-bookworm AS builder
COPY --from=mimalloc-debian libmimalloc.so /usr/lib
RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload

View File

@@ -2,7 +2,7 @@ use crate::Os;
pub fn mimalloc(os: Os) -> &'static str {
const ALPINE_MIMALLOC: &str = r#"
FROM alpine:latest as mimalloc-alpine
FROM alpine:latest AS mimalloc-alpine
RUN apk update && apk upgrade && apk --no-cache add gcc g++ libc-dev make cmake git
RUN git clone https://github.com/microsoft/mimalloc && \
@@ -16,7 +16,7 @@ RUN git clone https://github.com/microsoft/mimalloc && \
"#;
const DEBIAN_MIMALLOC: &str = r#"
FROM debian:bookworm-slim as mimalloc-debian
FROM debian:bookworm-slim AS mimalloc-debian
RUN apt update && apt upgrade -y && apt install -y gcc g++ make cmake git
RUN git clone https://github.com/microsoft/mimalloc && \

View File

@@ -5,16 +5,16 @@ use crate::{Network, Os, mimalloc, os, write_dockerfile};
pub fn bitcoin(orchestration_path: &Path, network: Network) {
#[rustfmt::skip]
const DOWNLOAD_BITCOIN: &str = r#"
FROM alpine:latest as bitcoin
FROM alpine:latest AS bitcoin
ENV BITCOIN_VERSION=27.1
RUN apk --no-cache add git gnupg
RUN apk --no-cache add wget git gnupg
# Download Bitcoin
RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz \
&& wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS \
&& wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS.asc
RUN wget -4 https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${BITCOIN_VERSION}-$(uname -m)-linux-gnu.tar.gz
RUN wget -4 https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS
RUN wget -4 https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/SHA256SUMS.asc
# Verify all sigs and check for a valid signature from laanwj -- 71A3
RUN git clone https://github.com/bitcoin-core/guix.sigs && \

View File

@@ -5,15 +5,15 @@ pub fn lighthouse(network: Network) -> (String, String, String) {
#[rustfmt::skip]
const DOWNLOAD_LIGHTHOUSE: &str = r#"
FROM alpine:latest as lighthouse
FROM alpine:latest AS lighthouse
ENV LIGHTHOUSE_VERSION=5.1.3
RUN apk --no-cache add git gnupg
RUN apk --no-cache add wget git gnupg
# Download lighthouse
RUN wget https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz
RUN wget https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc
RUN wget -4 https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz
RUN wget -4 https://github.com/sigp/lighthouse/releases/download/v${LIGHTHOUSE_VERSION}/lighthouse-v${LIGHTHOUSE_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc
# Verify the signature
gpg --keyserver keyserver.ubuntu.com --recv-keys 15E66D941F697E28F49381F426416DC3F30674B0

View File

@@ -20,13 +20,15 @@ pub fn nimbus(network: Network) -> (String, String, String) {
#[rustfmt::skip]
let download_nimbus = format!(r#"
FROM alpine:latest as nimbus
FROM alpine:latest AS nimbus
ENV NIMBUS_VERSION=24.3.0
ENV NIMBUS_COMMIT=dc19b082
RUN apk --no-cache add wget
# Download nimbus
RUN wget https://github.com/status-im/nimbus-eth2/releases/download/v${{NIMBUS_VERSION}}/nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz
RUN wget -4 https://github.com/status-im/nimbus-eth2/releases/download/v${{NIMBUS_VERSION}}/nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz
# Extract nimbus
RUN tar xvf nimbus-eth2_Linux_{platform}_${{NIMBUS_VERSION}}_${{NIMBUS_COMMIT}}.tar.gz

View File

@@ -5,15 +5,15 @@ pub fn reth(network: Network) -> (String, String, String) {
#[rustfmt::skip]
const DOWNLOAD_RETH: &str = r#"
FROM alpine:latest as reth
FROM alpine:latest AS reth
ENV RETH_VERSION=0.2.0-beta.6
RUN apk --no-cache add git gnupg
RUN apk --no-cache add wget git gnupg
# Download reth
RUN wget https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz
RUN wget https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc
RUN wget -4 https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz
RUN wget -4 https://github.com/paradigmxyz/reth/releases/download/v${RETH_VERSION}/reth-v${RETH_VERSION}-$(uname -m)-unknown-linux-gnu.tar.gz.asc
# Verify the signature
gpg --keyserver keyserver.ubuntu.com --recv-keys A3AE097C89093A124049DF1F5391A3C4100530B4

View File

@@ -22,12 +22,12 @@ fn monero_internal(
#[rustfmt::skip]
let download_monero = format!(r#"
FROM alpine:latest as monero
FROM alpine:latest AS monero
RUN apk --no-cache add gnupg
RUN apk --no-cache add wget gnupg
# Download Monero
RUN wget https://downloads.getmonero.org/cli/monero-linux-{arch}-v{MONERO_VERSION}.tar.bz2
RUN wget -4 https://downloads.getmonero.org/cli/monero-linux-{arch}-v{MONERO_VERSION}.tar.bz2
# Verify Binary -- fingerprint from https://github.com/monero-project/monero-site/issues/1949
ADD orchestration/{}/networks/monero/hashes-v{MONERO_VERSION}.txt .

View File

@@ -390,6 +390,8 @@ impl Monero {
MakeSignableTransactionResult::SignableTransaction(signable)
}
})),
// AmountsUnrepresentable is unreachable on Monero without 100% of the supply before tail
// emission or fundamental corruption
Err(e) => match e {
SendError::UnsupportedRctType => {
panic!("trying to use an RctType unsupported by monero-wallet")
@@ -398,6 +400,7 @@ impl Monero {
SendError::InvalidDecoyQuantity |
SendError::NoOutputs |
SendError::TooManyOutputs |
SendError::AmountsUnrepresentable { .. } |
SendError::NoChange |
SendError::TooMuchArbitraryData |
SendError::TooLargeTransaction |

View File

@@ -1,5 +1,5 @@
[toolchain]
channel = "1.80"
channel = "1.82"
targets = ["wasm32-unknown-unknown"]
profile = "minimal"
components = ["rust-src", "rustfmt", "clippy"]

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/substrate/client"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["serai"]
edition = "2021"
rust-version = "1.74"
rust-version = "1.82"
[package.metadata.docs.rs]
all-features = true

View File

@@ -49,17 +49,24 @@ macro_rules! serai_test {
test.provide_container(composition);
test.run_async(|ops| async move {
// Sleep until the Substrate RPC starts
let serai_rpc = ops.handle(handle).host_port(9944).unwrap();
let serai_rpc = format!("http://{}:{}", serai_rpc.0, serai_rpc.1);
// Bound execution to 60 seconds
for _ in 0 .. 60 {
let mut ticks = 0;
let serai_rpc = loop {
// Bound execution to 60 seconds
if ticks > 60 {
panic!("Serai node didn't start within 60 seconds");
}
tokio::time::sleep(core::time::Duration::from_secs(1)).await;
ticks += 1;
let Some(serai_rpc) = ops.handle(handle).host_port(9944) else { continue };
let serai_rpc = format!("http://{}:{}", serai_rpc.0, serai_rpc.1);
let Ok(client) = Serai::new(serai_rpc.clone()).await else { continue };
if client.latest_finalized_block_hash().await.is_err() {
continue;
}
break;
}
break serai_rpc;
};
#[allow(clippy::redundant_closure_call)]
$test(Serai::new(serai_rpc).await.unwrap()).await;
}).await;

View File

@@ -1,5 +1,10 @@
use substrate_wasm_builder::WasmBuilder;
fn main() {
WasmBuilder::new().with_current_project().export_heap_base().import_memory().build()
WasmBuilder::new()
.with_current_project()
.disable_runtime_version_section_check()
.export_heap_base()
.import_memory()
.build()
}

View File

@@ -216,7 +216,6 @@ impl TryInto<Call> for RuntimeCall {
coins::Call::burn_with_instruction { instruction } => {
serai_abi::coins::Call::burn_with_instruction { instruction }
}
_ => Err(())?,
}),
RuntimeCall::LiquidityTokens(call) => Call::LiquidityTokens(match call {
coins::Call::transfer { to, balance } => {
@@ -270,7 +269,6 @@ impl TryInto<Call> for RuntimeCall {
send_to: send_to.into(),
}
}
_ => Err(())?,
}),
RuntimeCall::GenesisLiquidity(call) => Call::GenesisLiquidity(match call {
genesis_liquidity::Call::remove_coin_liquidity { balance } => {
@@ -279,7 +277,6 @@ impl TryInto<Call> for RuntimeCall {
genesis_liquidity::Call::oraclize_values { values, signature } => {
serai_abi::genesis_liquidity::Call::oraclize_values { values, signature }
}
_ => Err(())?,
}),
RuntimeCall::ValidatorSets(call) => Call::ValidatorSets(match call {
validator_sets::Call::set_keys { network, removed_participants, key_pair, signature } => {
@@ -315,13 +312,11 @@ impl TryInto<Call> for RuntimeCall {
validator_sets::Call::claim_deallocation { network, session } => {
serai_abi::validator_sets::Call::claim_deallocation { network, session }
}
_ => Err(())?,
}),
RuntimeCall::InInstructions(call) => Call::InInstructions(match call {
in_instructions::Call::execute_batch { batch } => {
serai_abi::in_instructions::Call::execute_batch { batch }
}
_ => Err(())?,
}),
RuntimeCall::Signals(call) => Call::Signals(match call {
signals::Call::register_retirement_signal { in_favor_of } => {
@@ -339,7 +334,6 @@ impl TryInto<Call> for RuntimeCall {
signals::Call::stand_against { signal_id, for_network } => {
serai_abi::signals::Call::stand_against { signal_id, for_network }
}
_ => Err(())?,
}),
RuntimeCall::Babe(call) => Call::Babe(match call {
babe::Call::report_equivocation { equivocation_proof, key_owner_proof } => {
@@ -377,7 +371,6 @@ impl TryInto<Call> for RuntimeCall {
}
_ => Err(())?,
}),
_ => Err(())?,
})
}
}